after_merge
stringlengths 28
79.6k
| before_merge
stringlengths 20
79.6k
| url
stringlengths 38
71
| full_traceback
stringlengths 43
922k
| traceback_type
stringclasses 555
values |
|---|---|---|---|---|
def get_tileable_nsplits(self, tileable, chunk_result=None):
chunk_idx_to_shape = OrderedDict()
tiled = get_tiled(tileable, mapping=tileable_optimized)
chunk_result = chunk_result if chunk_result is not None else self._chunk_result
for chunk in tiled.chunks:
chunk_idx_to_shape[chunk.index] = self._get_chunk_shape(chunk.key, chunk_result)
return calc_nsplits(chunk_idx_to_shape)
|
def get_tileable_nsplits(self, tileable, chunk_result=None):
chunk_idx_to_shape = OrderedDict()
tiled = get_tiled(tileable, mapping=tileable_optimized)
chunk_result = chunk_result if chunk_result is not None else self._chunk_result
for chunk in tiled.chunks:
chunk_idx_to_shape[chunk.index] = chunk_result[chunk.key].shape
return calc_nsplits(chunk_idx_to_shape)
|
https://github.com/mars-project/mars/issues/1542
|
In [1]: from mars.session import new_session
In [2]: import mars.dataframe as md
In [3]: new_session(backend='ray').as_default()
2020-09-01 20:05:51,291 INFO resource_spec.py:231 -- Starting Ray with 5.08 GiB memory available for workers and up to 2.56 GiB for objects. You can adjust these settings with ray.init(memory=<bytes>, object_store_memory=<bytes>).
2020-09-01 20:05:51,883 INFO services.py:1193 -- View the Ray dashboard at localhost:8265
Out[3]: <mars.session.Session at 0x7fc51364fb50>
In [4]: df = md.read_csv('Downloads/ratings.csv')
In [5]: df.groupby('userId').agg({'rating': ['min', 'max', 'mean', 'std']}).exec
...: ute()
---------------------------------------------------------------------------
RayTaskError(TypeError) Traceback (most recent call last)
<ipython-input-5-180cc92d1395> in <module>
----> 1 df.groupby('userId').agg({'rating': ['min', 'max', 'mean', 'std']}).execute()
~/Workspace/mars/mars/core.py in execute(self, session, **kw)
576
577 def execute(self, session=None, **kw):
--> 578 self._data.execute(session, **kw)
579 return self
580
~/Workspace/mars/mars/core.py in execute(self, session, **kw)
364
365 # no more fetch, thus just fire run
--> 366 session.run(self, **kw)
367 # return Tileable or ExecutableTuple itself
368 return self
~/Workspace/mars/mars/session.py in run(self, *tileables, **kw)
478 tileables = tuple(mt.tensor(t) if not isinstance(t, (Entity, Base)) else t
479 for t in tileables)
--> 480 result = self._sess.run(*tileables, **kw)
481
482 for t in tileables:
~/Workspace/mars/mars/ray/core.py in run(self, *tileables, **kw)
188 if 'n_parallel' not in kw: # pragma: no cover
189 kw['n_parallel'] = ray.cluster_resources()['CPU']
--> 190 return self._executor.execute_tileables(tileables, **kw)
191
192 def __enter__(self):
~/Workspace/mars/mars/utils.py in _inner(*args, **kwargs)
437 def _inner(*args, **kwargs):
438 with self:
--> 439 return func(*args, **kwargs)
440
441 return _inner
~/Workspace/mars/mars/executor.py in execute_tileables(self, tileables, fetch, n_parallel, n_thread, print_progress, mock, compose, name)
879 n_parallel=n_parallel or n_thread,
880 print_progress=print_progress, mock=mock,
--> 881 chunk_result=chunk_result)
882
883 # update shape of tileable and its chunks whatever it's successful or not
~/Workspace/mars/mars/executor.py in execute_graph(self, graph, keys, n_parallel, print_progress, mock, no_intermediate, compose, retval, chunk_result)
691 print_progress=print_progress, mock=mock, mock_max_memory=self._mock_max_memory,
692 fetch_keys=fetch_keys, no_intermediate=no_intermediate)
--> 693 res = graph_execution.execute(retval)
694 self._mock_max_memory = max(self._mock_max_memory, graph_execution._mock_max_memory)
695 if mock:
~/Workspace/mars/mars/executor.py in execute(self, retval)
572 # wait until all the futures completed
573 for future in executed_futures:
--> 574 future.result()
575
576 if retval:
~/miniconda3/lib/python3.7/concurrent/futures/_base.py in result(self, timeout)
426 raise CancelledError()
427 elif self._state == FINISHED:
--> 428 return self.__get_result()
429
430 self._condition.wait(timeout)
~/miniconda3/lib/python3.7/concurrent/futures/_base.py in __get_result(self)
382 def __get_result(self):
383 if self._exception:
--> 384 raise self._exception
385 else:
386 return self._result
~/miniconda3/lib/python3.7/concurrent/futures/thread.py in run(self)
55
56 try:
---> 57 result = self.fn(*self.args, **self.kwargs)
58 except BaseException as exc:
59 self.future.set_exception(exc)
~/Workspace/mars/mars/utils.py in _inner(*args, **kwargs)
437 def _inner(*args, **kwargs):
438 with self:
--> 439 return func(*args, **kwargs)
440
441 return _inner
~/Workspace/mars/mars/executor.py in _execute_operand(self, op)
444 # so we pass the first operand's first output to Executor.handle
445 first_op = ops[0]
--> 446 self.handle_op(first_op, results, self._mock)
447
448 # update maximal memory usage during execution
~/Workspace/mars/mars/ray/core.py in handle_op(self, *args, **kw)
66 class GraphExecutionForRay(GraphExecution):
67 def handle_op(self, *args, **kw):
---> 68 return RayExecutor.handle(*args, **kw)
69
70
~/Workspace/mars/mars/ray/core.py in handle(cls, op, results, mock)
147
148 try:
--> 149 return ray.get(build_remote_funtion(runner).remote(results, op))
150 except NotImplementedError:
151 for op_cls in mapper.keys():
~/miniconda3/lib/python3.7/site-packages/ray/worker.py in get(object_refs, timeout)
1536 worker.core_worker.dump_object_store_memory_usage()
1537 if isinstance(value, RayTaskError):
-> 1538 raise value.as_instanceof_cause()
1539 else:
1540 raise value
RayTaskError(TypeError): ray::mars.ray.core.remote_runner() (pid=31351, ip=30.225.12.80)
File "python/ray/_raylet.pyx", line 479, in ray._raylet.execute_task
File "/Users/qinxuye/Workspace/mars/mars/ray/core.py", line 144, in remote_runner
return func(results, op)
File "/Users/qinxuye/Workspace/mars/mars/dataframe/datasource/read_csv.py", line 322, in execute
df = cls._cudf_read_csv(op) if op.gpu else cls._pandas_read_csv(f, op)
File "/Users/qinxuye/Workspace/mars/mars/dataframe/datasource/read_csv.py", line 273, in _pandas_read_csv
dtype=dtypes.to_dict(), nrows=op.nrows, **csv_kwargs)
TypeError: parser_f() got an unexpected keyword argument 'outputs_ref'
|
TypeError
|
def operand_deserializer(value):
graph = DAG.from_json(value)
if len(graph) == 1:
chunks = [list(graph)[0]]
else:
chunks = [c for c in graph if not isinstance(c.op, Fetch)]
op = chunks[0].op
return _OperandWrapper(op, chunks)
|
def operand_deserializer(value):
graph = DAG.from_json(value)
if len(graph) == 1:
chunks = [list(graph)[0]]
else:
chunks = [c for c in graph if not isinstance(c.op, Fetch)]
op = chunks[0].op
op._extra_params["outputs_ref"] = chunks
return op
|
https://github.com/mars-project/mars/issues/1542
|
In [1]: from mars.session import new_session
In [2]: import mars.dataframe as md
In [3]: new_session(backend='ray').as_default()
2020-09-01 20:05:51,291 INFO resource_spec.py:231 -- Starting Ray with 5.08 GiB memory available for workers and up to 2.56 GiB for objects. You can adjust these settings with ray.init(memory=<bytes>, object_store_memory=<bytes>).
2020-09-01 20:05:51,883 INFO services.py:1193 -- View the Ray dashboard at localhost:8265
Out[3]: <mars.session.Session at 0x7fc51364fb50>
In [4]: df = md.read_csv('Downloads/ratings.csv')
In [5]: df.groupby('userId').agg({'rating': ['min', 'max', 'mean', 'std']}).exec
...: ute()
---------------------------------------------------------------------------
RayTaskError(TypeError) Traceback (most recent call last)
<ipython-input-5-180cc92d1395> in <module>
----> 1 df.groupby('userId').agg({'rating': ['min', 'max', 'mean', 'std']}).execute()
~/Workspace/mars/mars/core.py in execute(self, session, **kw)
576
577 def execute(self, session=None, **kw):
--> 578 self._data.execute(session, **kw)
579 return self
580
~/Workspace/mars/mars/core.py in execute(self, session, **kw)
364
365 # no more fetch, thus just fire run
--> 366 session.run(self, **kw)
367 # return Tileable or ExecutableTuple itself
368 return self
~/Workspace/mars/mars/session.py in run(self, *tileables, **kw)
478 tileables = tuple(mt.tensor(t) if not isinstance(t, (Entity, Base)) else t
479 for t in tileables)
--> 480 result = self._sess.run(*tileables, **kw)
481
482 for t in tileables:
~/Workspace/mars/mars/ray/core.py in run(self, *tileables, **kw)
188 if 'n_parallel' not in kw: # pragma: no cover
189 kw['n_parallel'] = ray.cluster_resources()['CPU']
--> 190 return self._executor.execute_tileables(tileables, **kw)
191
192 def __enter__(self):
~/Workspace/mars/mars/utils.py in _inner(*args, **kwargs)
437 def _inner(*args, **kwargs):
438 with self:
--> 439 return func(*args, **kwargs)
440
441 return _inner
~/Workspace/mars/mars/executor.py in execute_tileables(self, tileables, fetch, n_parallel, n_thread, print_progress, mock, compose, name)
879 n_parallel=n_parallel or n_thread,
880 print_progress=print_progress, mock=mock,
--> 881 chunk_result=chunk_result)
882
883 # update shape of tileable and its chunks whatever it's successful or not
~/Workspace/mars/mars/executor.py in execute_graph(self, graph, keys, n_parallel, print_progress, mock, no_intermediate, compose, retval, chunk_result)
691 print_progress=print_progress, mock=mock, mock_max_memory=self._mock_max_memory,
692 fetch_keys=fetch_keys, no_intermediate=no_intermediate)
--> 693 res = graph_execution.execute(retval)
694 self._mock_max_memory = max(self._mock_max_memory, graph_execution._mock_max_memory)
695 if mock:
~/Workspace/mars/mars/executor.py in execute(self, retval)
572 # wait until all the futures completed
573 for future in executed_futures:
--> 574 future.result()
575
576 if retval:
~/miniconda3/lib/python3.7/concurrent/futures/_base.py in result(self, timeout)
426 raise CancelledError()
427 elif self._state == FINISHED:
--> 428 return self.__get_result()
429
430 self._condition.wait(timeout)
~/miniconda3/lib/python3.7/concurrent/futures/_base.py in __get_result(self)
382 def __get_result(self):
383 if self._exception:
--> 384 raise self._exception
385 else:
386 return self._result
~/miniconda3/lib/python3.7/concurrent/futures/thread.py in run(self)
55
56 try:
---> 57 result = self.fn(*self.args, **self.kwargs)
58 except BaseException as exc:
59 self.future.set_exception(exc)
~/Workspace/mars/mars/utils.py in _inner(*args, **kwargs)
437 def _inner(*args, **kwargs):
438 with self:
--> 439 return func(*args, **kwargs)
440
441 return _inner
~/Workspace/mars/mars/executor.py in _execute_operand(self, op)
444 # so we pass the first operand's first output to Executor.handle
445 first_op = ops[0]
--> 446 self.handle_op(first_op, results, self._mock)
447
448 # update maximal memory usage during execution
~/Workspace/mars/mars/ray/core.py in handle_op(self, *args, **kw)
66 class GraphExecutionForRay(GraphExecution):
67 def handle_op(self, *args, **kw):
---> 68 return RayExecutor.handle(*args, **kw)
69
70
~/Workspace/mars/mars/ray/core.py in handle(cls, op, results, mock)
147
148 try:
--> 149 return ray.get(build_remote_funtion(runner).remote(results, op))
150 except NotImplementedError:
151 for op_cls in mapper.keys():
~/miniconda3/lib/python3.7/site-packages/ray/worker.py in get(object_refs, timeout)
1536 worker.core_worker.dump_object_store_memory_usage()
1537 if isinstance(value, RayTaskError):
-> 1538 raise value.as_instanceof_cause()
1539 else:
1540 raise value
RayTaskError(TypeError): ray::mars.ray.core.remote_runner() (pid=31351, ip=30.225.12.80)
File "python/ray/_raylet.pyx", line 479, in ray._raylet.execute_task
File "/Users/qinxuye/Workspace/mars/mars/ray/core.py", line 144, in remote_runner
return func(results, op)
File "/Users/qinxuye/Workspace/mars/mars/dataframe/datasource/read_csv.py", line 322, in execute
df = cls._cudf_read_csv(op) if op.gpu else cls._pandas_read_csv(f, op)
File "/Users/qinxuye/Workspace/mars/mars/dataframe/datasource/read_csv.py", line 273, in _pandas_read_csv
dtype=dtypes.to_dict(), nrows=op.nrows, **csv_kwargs)
TypeError: parser_f() got an unexpected keyword argument 'outputs_ref'
|
TypeError
|
def __init__(self):
self._store = dict()
|
def __init__(self):
self._dict = dict()
|
https://github.com/mars-project/mars/issues/1542
|
In [1]: from mars.session import new_session
In [2]: import mars.dataframe as md
In [3]: new_session(backend='ray').as_default()
2020-09-01 20:05:51,291 INFO resource_spec.py:231 -- Starting Ray with 5.08 GiB memory available for workers and up to 2.56 GiB for objects. You can adjust these settings with ray.init(memory=<bytes>, object_store_memory=<bytes>).
2020-09-01 20:05:51,883 INFO services.py:1193 -- View the Ray dashboard at localhost:8265
Out[3]: <mars.session.Session at 0x7fc51364fb50>
In [4]: df = md.read_csv('Downloads/ratings.csv')
In [5]: df.groupby('userId').agg({'rating': ['min', 'max', 'mean', 'std']}).exec
...: ute()
---------------------------------------------------------------------------
RayTaskError(TypeError) Traceback (most recent call last)
<ipython-input-5-180cc92d1395> in <module>
----> 1 df.groupby('userId').agg({'rating': ['min', 'max', 'mean', 'std']}).execute()
~/Workspace/mars/mars/core.py in execute(self, session, **kw)
576
577 def execute(self, session=None, **kw):
--> 578 self._data.execute(session, **kw)
579 return self
580
~/Workspace/mars/mars/core.py in execute(self, session, **kw)
364
365 # no more fetch, thus just fire run
--> 366 session.run(self, **kw)
367 # return Tileable or ExecutableTuple itself
368 return self
~/Workspace/mars/mars/session.py in run(self, *tileables, **kw)
478 tileables = tuple(mt.tensor(t) if not isinstance(t, (Entity, Base)) else t
479 for t in tileables)
--> 480 result = self._sess.run(*tileables, **kw)
481
482 for t in tileables:
~/Workspace/mars/mars/ray/core.py in run(self, *tileables, **kw)
188 if 'n_parallel' not in kw: # pragma: no cover
189 kw['n_parallel'] = ray.cluster_resources()['CPU']
--> 190 return self._executor.execute_tileables(tileables, **kw)
191
192 def __enter__(self):
~/Workspace/mars/mars/utils.py in _inner(*args, **kwargs)
437 def _inner(*args, **kwargs):
438 with self:
--> 439 return func(*args, **kwargs)
440
441 return _inner
~/Workspace/mars/mars/executor.py in execute_tileables(self, tileables, fetch, n_parallel, n_thread, print_progress, mock, compose, name)
879 n_parallel=n_parallel or n_thread,
880 print_progress=print_progress, mock=mock,
--> 881 chunk_result=chunk_result)
882
883 # update shape of tileable and its chunks whatever it's successful or not
~/Workspace/mars/mars/executor.py in execute_graph(self, graph, keys, n_parallel, print_progress, mock, no_intermediate, compose, retval, chunk_result)
691 print_progress=print_progress, mock=mock, mock_max_memory=self._mock_max_memory,
692 fetch_keys=fetch_keys, no_intermediate=no_intermediate)
--> 693 res = graph_execution.execute(retval)
694 self._mock_max_memory = max(self._mock_max_memory, graph_execution._mock_max_memory)
695 if mock:
~/Workspace/mars/mars/executor.py in execute(self, retval)
572 # wait until all the futures completed
573 for future in executed_futures:
--> 574 future.result()
575
576 if retval:
~/miniconda3/lib/python3.7/concurrent/futures/_base.py in result(self, timeout)
426 raise CancelledError()
427 elif self._state == FINISHED:
--> 428 return self.__get_result()
429
430 self._condition.wait(timeout)
~/miniconda3/lib/python3.7/concurrent/futures/_base.py in __get_result(self)
382 def __get_result(self):
383 if self._exception:
--> 384 raise self._exception
385 else:
386 return self._result
~/miniconda3/lib/python3.7/concurrent/futures/thread.py in run(self)
55
56 try:
---> 57 result = self.fn(*self.args, **self.kwargs)
58 except BaseException as exc:
59 self.future.set_exception(exc)
~/Workspace/mars/mars/utils.py in _inner(*args, **kwargs)
437 def _inner(*args, **kwargs):
438 with self:
--> 439 return func(*args, **kwargs)
440
441 return _inner
~/Workspace/mars/mars/executor.py in _execute_operand(self, op)
444 # so we pass the first operand's first output to Executor.handle
445 first_op = ops[0]
--> 446 self.handle_op(first_op, results, self._mock)
447
448 # update maximal memory usage during execution
~/Workspace/mars/mars/ray/core.py in handle_op(self, *args, **kw)
66 class GraphExecutionForRay(GraphExecution):
67 def handle_op(self, *args, **kw):
---> 68 return RayExecutor.handle(*args, **kw)
69
70
~/Workspace/mars/mars/ray/core.py in handle(cls, op, results, mock)
147
148 try:
--> 149 return ray.get(build_remote_funtion(runner).remote(results, op))
150 except NotImplementedError:
151 for op_cls in mapper.keys():
~/miniconda3/lib/python3.7/site-packages/ray/worker.py in get(object_refs, timeout)
1536 worker.core_worker.dump_object_store_memory_usage()
1537 if isinstance(value, RayTaskError):
-> 1538 raise value.as_instanceof_cause()
1539 else:
1540 raise value
RayTaskError(TypeError): ray::mars.ray.core.remote_runner() (pid=31351, ip=30.225.12.80)
File "python/ray/_raylet.pyx", line 479, in ray._raylet.execute_task
File "/Users/qinxuye/Workspace/mars/mars/ray/core.py", line 144, in remote_runner
return func(results, op)
File "/Users/qinxuye/Workspace/mars/mars/dataframe/datasource/read_csv.py", line 322, in execute
df = cls._cudf_read_csv(op) if op.gpu else cls._pandas_read_csv(f, op)
File "/Users/qinxuye/Workspace/mars/mars/dataframe/datasource/read_csv.py", line 273, in _pandas_read_csv
dtype=dtypes.to_dict(), nrows=op.nrows, **csv_kwargs)
TypeError: parser_f() got an unexpected keyword argument 'outputs_ref'
|
TypeError
|
def __getitem__(self, item):
meta: ChunkMeta = ray.get(self.meta_store.get_meta.remote(item))
return ray.get(meta.object_id)
|
def __getitem__(self, item):
return ray.get(self.ray_dict_ref.getitem.remote(item))
|
https://github.com/mars-project/mars/issues/1542
|
In [1]: from mars.session import new_session
In [2]: import mars.dataframe as md
In [3]: new_session(backend='ray').as_default()
2020-09-01 20:05:51,291 INFO resource_spec.py:231 -- Starting Ray with 5.08 GiB memory available for workers and up to 2.56 GiB for objects. You can adjust these settings with ray.init(memory=<bytes>, object_store_memory=<bytes>).
2020-09-01 20:05:51,883 INFO services.py:1193 -- View the Ray dashboard at localhost:8265
Out[3]: <mars.session.Session at 0x7fc51364fb50>
In [4]: df = md.read_csv('Downloads/ratings.csv')
In [5]: df.groupby('userId').agg({'rating': ['min', 'max', 'mean', 'std']}).exec
...: ute()
---------------------------------------------------------------------------
RayTaskError(TypeError) Traceback (most recent call last)
<ipython-input-5-180cc92d1395> in <module>
----> 1 df.groupby('userId').agg({'rating': ['min', 'max', 'mean', 'std']}).execute()
~/Workspace/mars/mars/core.py in execute(self, session, **kw)
576
577 def execute(self, session=None, **kw):
--> 578 self._data.execute(session, **kw)
579 return self
580
~/Workspace/mars/mars/core.py in execute(self, session, **kw)
364
365 # no more fetch, thus just fire run
--> 366 session.run(self, **kw)
367 # return Tileable or ExecutableTuple itself
368 return self
~/Workspace/mars/mars/session.py in run(self, *tileables, **kw)
478 tileables = tuple(mt.tensor(t) if not isinstance(t, (Entity, Base)) else t
479 for t in tileables)
--> 480 result = self._sess.run(*tileables, **kw)
481
482 for t in tileables:
~/Workspace/mars/mars/ray/core.py in run(self, *tileables, **kw)
188 if 'n_parallel' not in kw: # pragma: no cover
189 kw['n_parallel'] = ray.cluster_resources()['CPU']
--> 190 return self._executor.execute_tileables(tileables, **kw)
191
192 def __enter__(self):
~/Workspace/mars/mars/utils.py in _inner(*args, **kwargs)
437 def _inner(*args, **kwargs):
438 with self:
--> 439 return func(*args, **kwargs)
440
441 return _inner
~/Workspace/mars/mars/executor.py in execute_tileables(self, tileables, fetch, n_parallel, n_thread, print_progress, mock, compose, name)
879 n_parallel=n_parallel or n_thread,
880 print_progress=print_progress, mock=mock,
--> 881 chunk_result=chunk_result)
882
883 # update shape of tileable and its chunks whatever it's successful or not
~/Workspace/mars/mars/executor.py in execute_graph(self, graph, keys, n_parallel, print_progress, mock, no_intermediate, compose, retval, chunk_result)
691 print_progress=print_progress, mock=mock, mock_max_memory=self._mock_max_memory,
692 fetch_keys=fetch_keys, no_intermediate=no_intermediate)
--> 693 res = graph_execution.execute(retval)
694 self._mock_max_memory = max(self._mock_max_memory, graph_execution._mock_max_memory)
695 if mock:
~/Workspace/mars/mars/executor.py in execute(self, retval)
572 # wait until all the futures completed
573 for future in executed_futures:
--> 574 future.result()
575
576 if retval:
~/miniconda3/lib/python3.7/concurrent/futures/_base.py in result(self, timeout)
426 raise CancelledError()
427 elif self._state == FINISHED:
--> 428 return self.__get_result()
429
430 self._condition.wait(timeout)
~/miniconda3/lib/python3.7/concurrent/futures/_base.py in __get_result(self)
382 def __get_result(self):
383 if self._exception:
--> 384 raise self._exception
385 else:
386 return self._result
~/miniconda3/lib/python3.7/concurrent/futures/thread.py in run(self)
55
56 try:
---> 57 result = self.fn(*self.args, **self.kwargs)
58 except BaseException as exc:
59 self.future.set_exception(exc)
~/Workspace/mars/mars/utils.py in _inner(*args, **kwargs)
437 def _inner(*args, **kwargs):
438 with self:
--> 439 return func(*args, **kwargs)
440
441 return _inner
~/Workspace/mars/mars/executor.py in _execute_operand(self, op)
444 # so we pass the first operand's first output to Executor.handle
445 first_op = ops[0]
--> 446 self.handle_op(first_op, results, self._mock)
447
448 # update maximal memory usage during execution
~/Workspace/mars/mars/ray/core.py in handle_op(self, *args, **kw)
66 class GraphExecutionForRay(GraphExecution):
67 def handle_op(self, *args, **kw):
---> 68 return RayExecutor.handle(*args, **kw)
69
70
~/Workspace/mars/mars/ray/core.py in handle(cls, op, results, mock)
147
148 try:
--> 149 return ray.get(build_remote_funtion(runner).remote(results, op))
150 except NotImplementedError:
151 for op_cls in mapper.keys():
~/miniconda3/lib/python3.7/site-packages/ray/worker.py in get(object_refs, timeout)
1536 worker.core_worker.dump_object_store_memory_usage()
1537 if isinstance(value, RayTaskError):
-> 1538 raise value.as_instanceof_cause()
1539 else:
1540 raise value
RayTaskError(TypeError): ray::mars.ray.core.remote_runner() (pid=31351, ip=30.225.12.80)
File "python/ray/_raylet.pyx", line 479, in ray._raylet.execute_task
File "/Users/qinxuye/Workspace/mars/mars/ray/core.py", line 144, in remote_runner
return func(results, op)
File "/Users/qinxuye/Workspace/mars/mars/dataframe/datasource/read_csv.py", line 322, in execute
df = cls._cudf_read_csv(op) if op.gpu else cls._pandas_read_csv(f, op)
File "/Users/qinxuye/Workspace/mars/mars/dataframe/datasource/read_csv.py", line 273, in _pandas_read_csv
dtype=dtypes.to_dict(), nrows=op.nrows, **csv_kwargs)
TypeError: parser_f() got an unexpected keyword argument 'outputs_ref'
|
TypeError
|
def __setitem__(self, key, value):
object_id = ray.put(value)
shape = getattr(value, "shape", None)
meta = ChunkMeta(shape=shape, object_id=object_id)
set_meta = self.meta_store.set_meta.remote(key, meta)
ray.wait([object_id, set_meta])
|
def __setitem__(self, key, value):
ray.get(self.ray_dict_ref.setitem.remote(key, value))
|
https://github.com/mars-project/mars/issues/1542
|
In [1]: from mars.session import new_session
In [2]: import mars.dataframe as md
In [3]: new_session(backend='ray').as_default()
2020-09-01 20:05:51,291 INFO resource_spec.py:231 -- Starting Ray with 5.08 GiB memory available for workers and up to 2.56 GiB for objects. You can adjust these settings with ray.init(memory=<bytes>, object_store_memory=<bytes>).
2020-09-01 20:05:51,883 INFO services.py:1193 -- View the Ray dashboard at localhost:8265
Out[3]: <mars.session.Session at 0x7fc51364fb50>
In [4]: df = md.read_csv('Downloads/ratings.csv')
In [5]: df.groupby('userId').agg({'rating': ['min', 'max', 'mean', 'std']}).exec
...: ute()
---------------------------------------------------------------------------
RayTaskError(TypeError) Traceback (most recent call last)
<ipython-input-5-180cc92d1395> in <module>
----> 1 df.groupby('userId').agg({'rating': ['min', 'max', 'mean', 'std']}).execute()
~/Workspace/mars/mars/core.py in execute(self, session, **kw)
576
577 def execute(self, session=None, **kw):
--> 578 self._data.execute(session, **kw)
579 return self
580
~/Workspace/mars/mars/core.py in execute(self, session, **kw)
364
365 # no more fetch, thus just fire run
--> 366 session.run(self, **kw)
367 # return Tileable or ExecutableTuple itself
368 return self
~/Workspace/mars/mars/session.py in run(self, *tileables, **kw)
478 tileables = tuple(mt.tensor(t) if not isinstance(t, (Entity, Base)) else t
479 for t in tileables)
--> 480 result = self._sess.run(*tileables, **kw)
481
482 for t in tileables:
~/Workspace/mars/mars/ray/core.py in run(self, *tileables, **kw)
188 if 'n_parallel' not in kw: # pragma: no cover
189 kw['n_parallel'] = ray.cluster_resources()['CPU']
--> 190 return self._executor.execute_tileables(tileables, **kw)
191
192 def __enter__(self):
~/Workspace/mars/mars/utils.py in _inner(*args, **kwargs)
437 def _inner(*args, **kwargs):
438 with self:
--> 439 return func(*args, **kwargs)
440
441 return _inner
~/Workspace/mars/mars/executor.py in execute_tileables(self, tileables, fetch, n_parallel, n_thread, print_progress, mock, compose, name)
879 n_parallel=n_parallel or n_thread,
880 print_progress=print_progress, mock=mock,
--> 881 chunk_result=chunk_result)
882
883 # update shape of tileable and its chunks whatever it's successful or not
~/Workspace/mars/mars/executor.py in execute_graph(self, graph, keys, n_parallel, print_progress, mock, no_intermediate, compose, retval, chunk_result)
691 print_progress=print_progress, mock=mock, mock_max_memory=self._mock_max_memory,
692 fetch_keys=fetch_keys, no_intermediate=no_intermediate)
--> 693 res = graph_execution.execute(retval)
694 self._mock_max_memory = max(self._mock_max_memory, graph_execution._mock_max_memory)
695 if mock:
~/Workspace/mars/mars/executor.py in execute(self, retval)
572 # wait until all the futures completed
573 for future in executed_futures:
--> 574 future.result()
575
576 if retval:
~/miniconda3/lib/python3.7/concurrent/futures/_base.py in result(self, timeout)
426 raise CancelledError()
427 elif self._state == FINISHED:
--> 428 return self.__get_result()
429
430 self._condition.wait(timeout)
~/miniconda3/lib/python3.7/concurrent/futures/_base.py in __get_result(self)
382 def __get_result(self):
383 if self._exception:
--> 384 raise self._exception
385 else:
386 return self._result
~/miniconda3/lib/python3.7/concurrent/futures/thread.py in run(self)
55
56 try:
---> 57 result = self.fn(*self.args, **self.kwargs)
58 except BaseException as exc:
59 self.future.set_exception(exc)
~/Workspace/mars/mars/utils.py in _inner(*args, **kwargs)
437 def _inner(*args, **kwargs):
438 with self:
--> 439 return func(*args, **kwargs)
440
441 return _inner
~/Workspace/mars/mars/executor.py in _execute_operand(self, op)
444 # so we pass the first operand's first output to Executor.handle
445 first_op = ops[0]
--> 446 self.handle_op(first_op, results, self._mock)
447
448 # update maximal memory usage during execution
~/Workspace/mars/mars/ray/core.py in handle_op(self, *args, **kw)
66 class GraphExecutionForRay(GraphExecution):
67 def handle_op(self, *args, **kw):
---> 68 return RayExecutor.handle(*args, **kw)
69
70
~/Workspace/mars/mars/ray/core.py in handle(cls, op, results, mock)
147
148 try:
--> 149 return ray.get(build_remote_funtion(runner).remote(results, op))
150 except NotImplementedError:
151 for op_cls in mapper.keys():
~/miniconda3/lib/python3.7/site-packages/ray/worker.py in get(object_refs, timeout)
1536 worker.core_worker.dump_object_store_memory_usage()
1537 if isinstance(value, RayTaskError):
-> 1538 raise value.as_instanceof_cause()
1539 else:
1540 raise value
RayTaskError(TypeError): ray::mars.ray.core.remote_runner() (pid=31351, ip=30.225.12.80)
File "python/ray/_raylet.pyx", line 479, in ray._raylet.execute_task
File "/Users/qinxuye/Workspace/mars/mars/ray/core.py", line 144, in remote_runner
return func(results, op)
File "/Users/qinxuye/Workspace/mars/mars/dataframe/datasource/read_csv.py", line 322, in execute
df = cls._cudf_read_csv(op) if op.gpu else cls._pandas_read_csv(f, op)
File "/Users/qinxuye/Workspace/mars/mars/dataframe/datasource/read_csv.py", line 273, in _pandas_read_csv
dtype=dtypes.to_dict(), nrows=op.nrows, **csv_kwargs)
TypeError: parser_f() got an unexpected keyword argument 'outputs_ref'
|
TypeError
|
def copy(self):
return RayStorage(meta_store=self.meta_store)
|
def copy(self):
return RayStorage(ray_dict_ref=self.ray_dict_ref)
|
https://github.com/mars-project/mars/issues/1542
|
In [1]: from mars.session import new_session
In [2]: import mars.dataframe as md
In [3]: new_session(backend='ray').as_default()
2020-09-01 20:05:51,291 INFO resource_spec.py:231 -- Starting Ray with 5.08 GiB memory available for workers and up to 2.56 GiB for objects. You can adjust these settings with ray.init(memory=<bytes>, object_store_memory=<bytes>).
2020-09-01 20:05:51,883 INFO services.py:1193 -- View the Ray dashboard at localhost:8265
Out[3]: <mars.session.Session at 0x7fc51364fb50>
In [4]: df = md.read_csv('Downloads/ratings.csv')
In [5]: df.groupby('userId').agg({'rating': ['min', 'max', 'mean', 'std']}).exec
...: ute()
---------------------------------------------------------------------------
RayTaskError(TypeError) Traceback (most recent call last)
<ipython-input-5-180cc92d1395> in <module>
----> 1 df.groupby('userId').agg({'rating': ['min', 'max', 'mean', 'std']}).execute()
~/Workspace/mars/mars/core.py in execute(self, session, **kw)
576
577 def execute(self, session=None, **kw):
--> 578 self._data.execute(session, **kw)
579 return self
580
~/Workspace/mars/mars/core.py in execute(self, session, **kw)
364
365 # no more fetch, thus just fire run
--> 366 session.run(self, **kw)
367 # return Tileable or ExecutableTuple itself
368 return self
~/Workspace/mars/mars/session.py in run(self, *tileables, **kw)
478 tileables = tuple(mt.tensor(t) if not isinstance(t, (Entity, Base)) else t
479 for t in tileables)
--> 480 result = self._sess.run(*tileables, **kw)
481
482 for t in tileables:
~/Workspace/mars/mars/ray/core.py in run(self, *tileables, **kw)
188 if 'n_parallel' not in kw: # pragma: no cover
189 kw['n_parallel'] = ray.cluster_resources()['CPU']
--> 190 return self._executor.execute_tileables(tileables, **kw)
191
192 def __enter__(self):
~/Workspace/mars/mars/utils.py in _inner(*args, **kwargs)
437 def _inner(*args, **kwargs):
438 with self:
--> 439 return func(*args, **kwargs)
440
441 return _inner
~/Workspace/mars/mars/executor.py in execute_tileables(self, tileables, fetch, n_parallel, n_thread, print_progress, mock, compose, name)
879 n_parallel=n_parallel or n_thread,
880 print_progress=print_progress, mock=mock,
--> 881 chunk_result=chunk_result)
882
883 # update shape of tileable and its chunks whatever it's successful or not
~/Workspace/mars/mars/executor.py in execute_graph(self, graph, keys, n_parallel, print_progress, mock, no_intermediate, compose, retval, chunk_result)
691 print_progress=print_progress, mock=mock, mock_max_memory=self._mock_max_memory,
692 fetch_keys=fetch_keys, no_intermediate=no_intermediate)
--> 693 res = graph_execution.execute(retval)
694 self._mock_max_memory = max(self._mock_max_memory, graph_execution._mock_max_memory)
695 if mock:
~/Workspace/mars/mars/executor.py in execute(self, retval)
572 # wait until all the futures completed
573 for future in executed_futures:
--> 574 future.result()
575
576 if retval:
~/miniconda3/lib/python3.7/concurrent/futures/_base.py in result(self, timeout)
426 raise CancelledError()
427 elif self._state == FINISHED:
--> 428 return self.__get_result()
429
430 self._condition.wait(timeout)
~/miniconda3/lib/python3.7/concurrent/futures/_base.py in __get_result(self)
382 def __get_result(self):
383 if self._exception:
--> 384 raise self._exception
385 else:
386 return self._result
~/miniconda3/lib/python3.7/concurrent/futures/thread.py in run(self)
55
56 try:
---> 57 result = self.fn(*self.args, **self.kwargs)
58 except BaseException as exc:
59 self.future.set_exception(exc)
~/Workspace/mars/mars/utils.py in _inner(*args, **kwargs)
437 def _inner(*args, **kwargs):
438 with self:
--> 439 return func(*args, **kwargs)
440
441 return _inner
~/Workspace/mars/mars/executor.py in _execute_operand(self, op)
444 # so we pass the first operand's first output to Executor.handle
445 first_op = ops[0]
--> 446 self.handle_op(first_op, results, self._mock)
447
448 # update maximal memory usage during execution
~/Workspace/mars/mars/ray/core.py in handle_op(self, *args, **kw)
66 class GraphExecutionForRay(GraphExecution):
67 def handle_op(self, *args, **kw):
---> 68 return RayExecutor.handle(*args, **kw)
69
70
~/Workspace/mars/mars/ray/core.py in handle(cls, op, results, mock)
147
148 try:
--> 149 return ray.get(build_remote_funtion(runner).remote(results, op))
150 except NotImplementedError:
151 for op_cls in mapper.keys():
~/miniconda3/lib/python3.7/site-packages/ray/worker.py in get(object_refs, timeout)
1536 worker.core_worker.dump_object_store_memory_usage()
1537 if isinstance(value, RayTaskError):
-> 1538 raise value.as_instanceof_cause()
1539 else:
1540 raise value
RayTaskError(TypeError): ray::mars.ray.core.remote_runner() (pid=31351, ip=30.225.12.80)
File "python/ray/_raylet.pyx", line 479, in ray._raylet.execute_task
File "/Users/qinxuye/Workspace/mars/mars/ray/core.py", line 144, in remote_runner
return func(results, op)
File "/Users/qinxuye/Workspace/mars/mars/dataframe/datasource/read_csv.py", line 322, in execute
df = cls._cudf_read_csv(op) if op.gpu else cls._pandas_read_csv(f, op)
File "/Users/qinxuye/Workspace/mars/mars/dataframe/datasource/read_csv.py", line 273, in _pandas_read_csv
dtype=dtypes.to_dict(), nrows=op.nrows, **csv_kwargs)
TypeError: parser_f() got an unexpected keyword argument 'outputs_ref'
|
TypeError
|
def update(self, mapping: Dict):
tasks = []
for k, v in mapping.items():
object_id = ray.put(v)
tasks.append(object_id)
shape = getattr(v, "shape", None)
meta = ChunkMeta(shape=shape, object_id=object_id)
set_meta = self.meta_store.set_meta.remote(k, meta)
tasks.append(set_meta)
ray.wait(tasks)
|
def update(self, mapping):
self._dict.update(mapping)
|
https://github.com/mars-project/mars/issues/1542
|
In [1]: from mars.session import new_session
In [2]: import mars.dataframe as md
In [3]: new_session(backend='ray').as_default()
2020-09-01 20:05:51,291 INFO resource_spec.py:231 -- Starting Ray with 5.08 GiB memory available for workers and up to 2.56 GiB for objects. You can adjust these settings with ray.init(memory=<bytes>, object_store_memory=<bytes>).
2020-09-01 20:05:51,883 INFO services.py:1193 -- View the Ray dashboard at localhost:8265
Out[3]: <mars.session.Session at 0x7fc51364fb50>
In [4]: df = md.read_csv('Downloads/ratings.csv')
In [5]: df.groupby('userId').agg({'rating': ['min', 'max', 'mean', 'std']}).exec
...: ute()
---------------------------------------------------------------------------
RayTaskError(TypeError) Traceback (most recent call last)
<ipython-input-5-180cc92d1395> in <module>
----> 1 df.groupby('userId').agg({'rating': ['min', 'max', 'mean', 'std']}).execute()
~/Workspace/mars/mars/core.py in execute(self, session, **kw)
576
577 def execute(self, session=None, **kw):
--> 578 self._data.execute(session, **kw)
579 return self
580
~/Workspace/mars/mars/core.py in execute(self, session, **kw)
364
365 # no more fetch, thus just fire run
--> 366 session.run(self, **kw)
367 # return Tileable or ExecutableTuple itself
368 return self
~/Workspace/mars/mars/session.py in run(self, *tileables, **kw)
478 tileables = tuple(mt.tensor(t) if not isinstance(t, (Entity, Base)) else t
479 for t in tileables)
--> 480 result = self._sess.run(*tileables, **kw)
481
482 for t in tileables:
~/Workspace/mars/mars/ray/core.py in run(self, *tileables, **kw)
188 if 'n_parallel' not in kw: # pragma: no cover
189 kw['n_parallel'] = ray.cluster_resources()['CPU']
--> 190 return self._executor.execute_tileables(tileables, **kw)
191
192 def __enter__(self):
~/Workspace/mars/mars/utils.py in _inner(*args, **kwargs)
437 def _inner(*args, **kwargs):
438 with self:
--> 439 return func(*args, **kwargs)
440
441 return _inner
~/Workspace/mars/mars/executor.py in execute_tileables(self, tileables, fetch, n_parallel, n_thread, print_progress, mock, compose, name)
879 n_parallel=n_parallel or n_thread,
880 print_progress=print_progress, mock=mock,
--> 881 chunk_result=chunk_result)
882
883 # update shape of tileable and its chunks whatever it's successful or not
~/Workspace/mars/mars/executor.py in execute_graph(self, graph, keys, n_parallel, print_progress, mock, no_intermediate, compose, retval, chunk_result)
691 print_progress=print_progress, mock=mock, mock_max_memory=self._mock_max_memory,
692 fetch_keys=fetch_keys, no_intermediate=no_intermediate)
--> 693 res = graph_execution.execute(retval)
694 self._mock_max_memory = max(self._mock_max_memory, graph_execution._mock_max_memory)
695 if mock:
~/Workspace/mars/mars/executor.py in execute(self, retval)
572 # wait until all the futures completed
573 for future in executed_futures:
--> 574 future.result()
575
576 if retval:
~/miniconda3/lib/python3.7/concurrent/futures/_base.py in result(self, timeout)
426 raise CancelledError()
427 elif self._state == FINISHED:
--> 428 return self.__get_result()
429
430 self._condition.wait(timeout)
~/miniconda3/lib/python3.7/concurrent/futures/_base.py in __get_result(self)
382 def __get_result(self):
383 if self._exception:
--> 384 raise self._exception
385 else:
386 return self._result
~/miniconda3/lib/python3.7/concurrent/futures/thread.py in run(self)
55
56 try:
---> 57 result = self.fn(*self.args, **self.kwargs)
58 except BaseException as exc:
59 self.future.set_exception(exc)
~/Workspace/mars/mars/utils.py in _inner(*args, **kwargs)
437 def _inner(*args, **kwargs):
438 with self:
--> 439 return func(*args, **kwargs)
440
441 return _inner
~/Workspace/mars/mars/executor.py in _execute_operand(self, op)
444 # so we pass the first operand's first output to Executor.handle
445 first_op = ops[0]
--> 446 self.handle_op(first_op, results, self._mock)
447
448 # update maximal memory usage during execution
~/Workspace/mars/mars/ray/core.py in handle_op(self, *args, **kw)
66 class GraphExecutionForRay(GraphExecution):
67 def handle_op(self, *args, **kw):
---> 68 return RayExecutor.handle(*args, **kw)
69
70
~/Workspace/mars/mars/ray/core.py in handle(cls, op, results, mock)
147
148 try:
--> 149 return ray.get(build_remote_funtion(runner).remote(results, op))
150 except NotImplementedError:
151 for op_cls in mapper.keys():
~/miniconda3/lib/python3.7/site-packages/ray/worker.py in get(object_refs, timeout)
1536 worker.core_worker.dump_object_store_memory_usage()
1537 if isinstance(value, RayTaskError):
-> 1538 raise value.as_instanceof_cause()
1539 else:
1540 raise value
RayTaskError(TypeError): ray::mars.ray.core.remote_runner() (pid=31351, ip=30.225.12.80)
File "python/ray/_raylet.pyx", line 479, in ray._raylet.execute_task
File "/Users/qinxuye/Workspace/mars/mars/ray/core.py", line 144, in remote_runner
return func(results, op)
File "/Users/qinxuye/Workspace/mars/mars/dataframe/datasource/read_csv.py", line 322, in execute
df = cls._cudf_read_csv(op) if op.gpu else cls._pandas_read_csv(f, op)
File "/Users/qinxuye/Workspace/mars/mars/dataframe/datasource/read_csv.py", line 273, in _pandas_read_csv
dtype=dtypes.to_dict(), nrows=op.nrows, **csv_kwargs)
TypeError: parser_f() got an unexpected keyword argument 'outputs_ref'
|
TypeError
|
def __iter__(self):
return iter(ray.get(self.meta_store.chunk_keys.remote()))
|
def __iter__(self):
return iter(ray.get(self.ray_dict_ref.keys.remote()))
|
https://github.com/mars-project/mars/issues/1542
|
In [1]: from mars.session import new_session
In [2]: import mars.dataframe as md
In [3]: new_session(backend='ray').as_default()
2020-09-01 20:05:51,291 INFO resource_spec.py:231 -- Starting Ray with 5.08 GiB memory available for workers and up to 2.56 GiB for objects. You can adjust these settings with ray.init(memory=<bytes>, object_store_memory=<bytes>).
2020-09-01 20:05:51,883 INFO services.py:1193 -- View the Ray dashboard at localhost:8265
Out[3]: <mars.session.Session at 0x7fc51364fb50>
In [4]: df = md.read_csv('Downloads/ratings.csv')
In [5]: df.groupby('userId').agg({'rating': ['min', 'max', 'mean', 'std']}).exec
...: ute()
---------------------------------------------------------------------------
RayTaskError(TypeError) Traceback (most recent call last)
<ipython-input-5-180cc92d1395> in <module>
----> 1 df.groupby('userId').agg({'rating': ['min', 'max', 'mean', 'std']}).execute()
~/Workspace/mars/mars/core.py in execute(self, session, **kw)
576
577 def execute(self, session=None, **kw):
--> 578 self._data.execute(session, **kw)
579 return self
580
~/Workspace/mars/mars/core.py in execute(self, session, **kw)
364
365 # no more fetch, thus just fire run
--> 366 session.run(self, **kw)
367 # return Tileable or ExecutableTuple itself
368 return self
~/Workspace/mars/mars/session.py in run(self, *tileables, **kw)
478 tileables = tuple(mt.tensor(t) if not isinstance(t, (Entity, Base)) else t
479 for t in tileables)
--> 480 result = self._sess.run(*tileables, **kw)
481
482 for t in tileables:
~/Workspace/mars/mars/ray/core.py in run(self, *tileables, **kw)
188 if 'n_parallel' not in kw: # pragma: no cover
189 kw['n_parallel'] = ray.cluster_resources()['CPU']
--> 190 return self._executor.execute_tileables(tileables, **kw)
191
192 def __enter__(self):
~/Workspace/mars/mars/utils.py in _inner(*args, **kwargs)
437 def _inner(*args, **kwargs):
438 with self:
--> 439 return func(*args, **kwargs)
440
441 return _inner
~/Workspace/mars/mars/executor.py in execute_tileables(self, tileables, fetch, n_parallel, n_thread, print_progress, mock, compose, name)
879 n_parallel=n_parallel or n_thread,
880 print_progress=print_progress, mock=mock,
--> 881 chunk_result=chunk_result)
882
883 # update shape of tileable and its chunks whatever it's successful or not
~/Workspace/mars/mars/executor.py in execute_graph(self, graph, keys, n_parallel, print_progress, mock, no_intermediate, compose, retval, chunk_result)
691 print_progress=print_progress, mock=mock, mock_max_memory=self._mock_max_memory,
692 fetch_keys=fetch_keys, no_intermediate=no_intermediate)
--> 693 res = graph_execution.execute(retval)
694 self._mock_max_memory = max(self._mock_max_memory, graph_execution._mock_max_memory)
695 if mock:
~/Workspace/mars/mars/executor.py in execute(self, retval)
572 # wait until all the futures completed
573 for future in executed_futures:
--> 574 future.result()
575
576 if retval:
~/miniconda3/lib/python3.7/concurrent/futures/_base.py in result(self, timeout)
426 raise CancelledError()
427 elif self._state == FINISHED:
--> 428 return self.__get_result()
429
430 self._condition.wait(timeout)
~/miniconda3/lib/python3.7/concurrent/futures/_base.py in __get_result(self)
382 def __get_result(self):
383 if self._exception:
--> 384 raise self._exception
385 else:
386 return self._result
~/miniconda3/lib/python3.7/concurrent/futures/thread.py in run(self)
55
56 try:
---> 57 result = self.fn(*self.args, **self.kwargs)
58 except BaseException as exc:
59 self.future.set_exception(exc)
~/Workspace/mars/mars/utils.py in _inner(*args, **kwargs)
437 def _inner(*args, **kwargs):
438 with self:
--> 439 return func(*args, **kwargs)
440
441 return _inner
~/Workspace/mars/mars/executor.py in _execute_operand(self, op)
444 # so we pass the first operand's first output to Executor.handle
445 first_op = ops[0]
--> 446 self.handle_op(first_op, results, self._mock)
447
448 # update maximal memory usage during execution
~/Workspace/mars/mars/ray/core.py in handle_op(self, *args, **kw)
66 class GraphExecutionForRay(GraphExecution):
67 def handle_op(self, *args, **kw):
---> 68 return RayExecutor.handle(*args, **kw)
69
70
~/Workspace/mars/mars/ray/core.py in handle(cls, op, results, mock)
147
148 try:
--> 149 return ray.get(build_remote_funtion(runner).remote(results, op))
150 except NotImplementedError:
151 for op_cls in mapper.keys():
~/miniconda3/lib/python3.7/site-packages/ray/worker.py in get(object_refs, timeout)
1536 worker.core_worker.dump_object_store_memory_usage()
1537 if isinstance(value, RayTaskError):
-> 1538 raise value.as_instanceof_cause()
1539 else:
1540 raise value
RayTaskError(TypeError): ray::mars.ray.core.remote_runner() (pid=31351, ip=30.225.12.80)
File "python/ray/_raylet.pyx", line 479, in ray._raylet.execute_task
File "/Users/qinxuye/Workspace/mars/mars/ray/core.py", line 144, in remote_runner
return func(results, op)
File "/Users/qinxuye/Workspace/mars/mars/dataframe/datasource/read_csv.py", line 322, in execute
df = cls._cudf_read_csv(op) if op.gpu else cls._pandas_read_csv(f, op)
File "/Users/qinxuye/Workspace/mars/mars/dataframe/datasource/read_csv.py", line 273, in _pandas_read_csv
dtype=dtypes.to_dict(), nrows=op.nrows, **csv_kwargs)
TypeError: parser_f() got an unexpected keyword argument 'outputs_ref'
|
TypeError
|
def __delitem__(self, key):
ray.wait([self.meta_store.delete_keys.remote(key)])
|
def __delitem__(self, key):
ray.get(self.ray_dict_ref.delitem.remote(key))
|
https://github.com/mars-project/mars/issues/1542
|
In [1]: from mars.session import new_session
In [2]: import mars.dataframe as md
In [3]: new_session(backend='ray').as_default()
2020-09-01 20:05:51,291 INFO resource_spec.py:231 -- Starting Ray with 5.08 GiB memory available for workers and up to 2.56 GiB for objects. You can adjust these settings with ray.init(memory=<bytes>, object_store_memory=<bytes>).
2020-09-01 20:05:51,883 INFO services.py:1193 -- View the Ray dashboard at localhost:8265
Out[3]: <mars.session.Session at 0x7fc51364fb50>
In [4]: df = md.read_csv('Downloads/ratings.csv')
In [5]: df.groupby('userId').agg({'rating': ['min', 'max', 'mean', 'std']}).exec
...: ute()
---------------------------------------------------------------------------
RayTaskError(TypeError) Traceback (most recent call last)
<ipython-input-5-180cc92d1395> in <module>
----> 1 df.groupby('userId').agg({'rating': ['min', 'max', 'mean', 'std']}).execute()
~/Workspace/mars/mars/core.py in execute(self, session, **kw)
576
577 def execute(self, session=None, **kw):
--> 578 self._data.execute(session, **kw)
579 return self
580
~/Workspace/mars/mars/core.py in execute(self, session, **kw)
364
365 # no more fetch, thus just fire run
--> 366 session.run(self, **kw)
367 # return Tileable or ExecutableTuple itself
368 return self
~/Workspace/mars/mars/session.py in run(self, *tileables, **kw)
478 tileables = tuple(mt.tensor(t) if not isinstance(t, (Entity, Base)) else t
479 for t in tileables)
--> 480 result = self._sess.run(*tileables, **kw)
481
482 for t in tileables:
~/Workspace/mars/mars/ray/core.py in run(self, *tileables, **kw)
188 if 'n_parallel' not in kw: # pragma: no cover
189 kw['n_parallel'] = ray.cluster_resources()['CPU']
--> 190 return self._executor.execute_tileables(tileables, **kw)
191
192 def __enter__(self):
~/Workspace/mars/mars/utils.py in _inner(*args, **kwargs)
437 def _inner(*args, **kwargs):
438 with self:
--> 439 return func(*args, **kwargs)
440
441 return _inner
~/Workspace/mars/mars/executor.py in execute_tileables(self, tileables, fetch, n_parallel, n_thread, print_progress, mock, compose, name)
879 n_parallel=n_parallel or n_thread,
880 print_progress=print_progress, mock=mock,
--> 881 chunk_result=chunk_result)
882
883 # update shape of tileable and its chunks whatever it's successful or not
~/Workspace/mars/mars/executor.py in execute_graph(self, graph, keys, n_parallel, print_progress, mock, no_intermediate, compose, retval, chunk_result)
691 print_progress=print_progress, mock=mock, mock_max_memory=self._mock_max_memory,
692 fetch_keys=fetch_keys, no_intermediate=no_intermediate)
--> 693 res = graph_execution.execute(retval)
694 self._mock_max_memory = max(self._mock_max_memory, graph_execution._mock_max_memory)
695 if mock:
~/Workspace/mars/mars/executor.py in execute(self, retval)
572 # wait until all the futures completed
573 for future in executed_futures:
--> 574 future.result()
575
576 if retval:
~/miniconda3/lib/python3.7/concurrent/futures/_base.py in result(self, timeout)
426 raise CancelledError()
427 elif self._state == FINISHED:
--> 428 return self.__get_result()
429
430 self._condition.wait(timeout)
~/miniconda3/lib/python3.7/concurrent/futures/_base.py in __get_result(self)
382 def __get_result(self):
383 if self._exception:
--> 384 raise self._exception
385 else:
386 return self._result
~/miniconda3/lib/python3.7/concurrent/futures/thread.py in run(self)
55
56 try:
---> 57 result = self.fn(*self.args, **self.kwargs)
58 except BaseException as exc:
59 self.future.set_exception(exc)
~/Workspace/mars/mars/utils.py in _inner(*args, **kwargs)
437 def _inner(*args, **kwargs):
438 with self:
--> 439 return func(*args, **kwargs)
440
441 return _inner
~/Workspace/mars/mars/executor.py in _execute_operand(self, op)
444 # so we pass the first operand's first output to Executor.handle
445 first_op = ops[0]
--> 446 self.handle_op(first_op, results, self._mock)
447
448 # update maximal memory usage during execution
~/Workspace/mars/mars/ray/core.py in handle_op(self, *args, **kw)
66 class GraphExecutionForRay(GraphExecution):
67 def handle_op(self, *args, **kw):
---> 68 return RayExecutor.handle(*args, **kw)
69
70
~/Workspace/mars/mars/ray/core.py in handle(cls, op, results, mock)
147
148 try:
--> 149 return ray.get(build_remote_funtion(runner).remote(results, op))
150 except NotImplementedError:
151 for op_cls in mapper.keys():
~/miniconda3/lib/python3.7/site-packages/ray/worker.py in get(object_refs, timeout)
1536 worker.core_worker.dump_object_store_memory_usage()
1537 if isinstance(value, RayTaskError):
-> 1538 raise value.as_instanceof_cause()
1539 else:
1540 raise value
RayTaskError(TypeError): ray::mars.ray.core.remote_runner() (pid=31351, ip=30.225.12.80)
File "python/ray/_raylet.pyx", line 479, in ray._raylet.execute_task
File "/Users/qinxuye/Workspace/mars/mars/ray/core.py", line 144, in remote_runner
return func(results, op)
File "/Users/qinxuye/Workspace/mars/mars/dataframe/datasource/read_csv.py", line 322, in execute
df = cls._cudf_read_csv(op) if op.gpu else cls._pandas_read_csv(f, op)
File "/Users/qinxuye/Workspace/mars/mars/dataframe/datasource/read_csv.py", line 273, in _pandas_read_csv
dtype=dtypes.to_dict(), nrows=op.nrows, **csv_kwargs)
TypeError: parser_f() got an unexpected keyword argument 'outputs_ref'
|
TypeError
|
def handle(cls, op, results, mock=False):
method_name, mapper = (
("execute", cls._op_runners)
if not mock
else ("estimate_size", cls._op_size_estimators)
)
try:
runner = mapper[type(op)]
except KeyError:
runner = getattr(op, method_name)
# register a custom serializer for Mars operand
_register_ray_serializer(op)
try:
ray.wait([execute_on_ray.remote(runner, results, op)])
except NotImplementedError:
for op_cls in mapper.keys():
if isinstance(op, op_cls):
mapper[type(op)] = mapper[op_cls]
runner = mapper[op_cls]
ray.wait([execute_on_ray.remote(runner, results, op)])
raise KeyError(f"No handler found for op: {op}")
|
def handle(cls, op, results, mock=False):
method_name, mapper = (
("execute", cls._op_runners)
if not mock
else ("estimate_size", cls._op_size_estimators)
)
try:
runner = mapper[type(op)]
except KeyError:
runner = getattr(op, method_name)
# register a custom serializer for Mars operand
_register_ray_serializer(op)
@lru_cache(500)
def build_remote_funtion(func):
@ray.remote
def remote_runner(results, op):
return func(results, op)
return remote_runner
try:
return ray.get(build_remote_funtion(runner).remote(results, op))
except NotImplementedError:
for op_cls in mapper.keys():
if isinstance(op, op_cls):
mapper[type(op)] = mapper[op_cls]
runner = mapper[op_cls]
return ray.get(build_remote_funtion(runner).remote(results, op))
raise KeyError(f"No handler found for op: {op}")
|
https://github.com/mars-project/mars/issues/1542
|
In [1]: from mars.session import new_session
In [2]: import mars.dataframe as md
In [3]: new_session(backend='ray').as_default()
2020-09-01 20:05:51,291 INFO resource_spec.py:231 -- Starting Ray with 5.08 GiB memory available for workers and up to 2.56 GiB for objects. You can adjust these settings with ray.init(memory=<bytes>, object_store_memory=<bytes>).
2020-09-01 20:05:51,883 INFO services.py:1193 -- View the Ray dashboard at localhost:8265
Out[3]: <mars.session.Session at 0x7fc51364fb50>
In [4]: df = md.read_csv('Downloads/ratings.csv')
In [5]: df.groupby('userId').agg({'rating': ['min', 'max', 'mean', 'std']}).exec
...: ute()
---------------------------------------------------------------------------
RayTaskError(TypeError) Traceback (most recent call last)
<ipython-input-5-180cc92d1395> in <module>
----> 1 df.groupby('userId').agg({'rating': ['min', 'max', 'mean', 'std']}).execute()
~/Workspace/mars/mars/core.py in execute(self, session, **kw)
576
577 def execute(self, session=None, **kw):
--> 578 self._data.execute(session, **kw)
579 return self
580
~/Workspace/mars/mars/core.py in execute(self, session, **kw)
364
365 # no more fetch, thus just fire run
--> 366 session.run(self, **kw)
367 # return Tileable or ExecutableTuple itself
368 return self
~/Workspace/mars/mars/session.py in run(self, *tileables, **kw)
478 tileables = tuple(mt.tensor(t) if not isinstance(t, (Entity, Base)) else t
479 for t in tileables)
--> 480 result = self._sess.run(*tileables, **kw)
481
482 for t in tileables:
~/Workspace/mars/mars/ray/core.py in run(self, *tileables, **kw)
188 if 'n_parallel' not in kw: # pragma: no cover
189 kw['n_parallel'] = ray.cluster_resources()['CPU']
--> 190 return self._executor.execute_tileables(tileables, **kw)
191
192 def __enter__(self):
~/Workspace/mars/mars/utils.py in _inner(*args, **kwargs)
437 def _inner(*args, **kwargs):
438 with self:
--> 439 return func(*args, **kwargs)
440
441 return _inner
~/Workspace/mars/mars/executor.py in execute_tileables(self, tileables, fetch, n_parallel, n_thread, print_progress, mock, compose, name)
879 n_parallel=n_parallel or n_thread,
880 print_progress=print_progress, mock=mock,
--> 881 chunk_result=chunk_result)
882
883 # update shape of tileable and its chunks whatever it's successful or not
~/Workspace/mars/mars/executor.py in execute_graph(self, graph, keys, n_parallel, print_progress, mock, no_intermediate, compose, retval, chunk_result)
691 print_progress=print_progress, mock=mock, mock_max_memory=self._mock_max_memory,
692 fetch_keys=fetch_keys, no_intermediate=no_intermediate)
--> 693 res = graph_execution.execute(retval)
694 self._mock_max_memory = max(self._mock_max_memory, graph_execution._mock_max_memory)
695 if mock:
~/Workspace/mars/mars/executor.py in execute(self, retval)
572 # wait until all the futures completed
573 for future in executed_futures:
--> 574 future.result()
575
576 if retval:
~/miniconda3/lib/python3.7/concurrent/futures/_base.py in result(self, timeout)
426 raise CancelledError()
427 elif self._state == FINISHED:
--> 428 return self.__get_result()
429
430 self._condition.wait(timeout)
~/miniconda3/lib/python3.7/concurrent/futures/_base.py in __get_result(self)
382 def __get_result(self):
383 if self._exception:
--> 384 raise self._exception
385 else:
386 return self._result
~/miniconda3/lib/python3.7/concurrent/futures/thread.py in run(self)
55
56 try:
---> 57 result = self.fn(*self.args, **self.kwargs)
58 except BaseException as exc:
59 self.future.set_exception(exc)
~/Workspace/mars/mars/utils.py in _inner(*args, **kwargs)
437 def _inner(*args, **kwargs):
438 with self:
--> 439 return func(*args, **kwargs)
440
441 return _inner
~/Workspace/mars/mars/executor.py in _execute_operand(self, op)
444 # so we pass the first operand's first output to Executor.handle
445 first_op = ops[0]
--> 446 self.handle_op(first_op, results, self._mock)
447
448 # update maximal memory usage during execution
~/Workspace/mars/mars/ray/core.py in handle_op(self, *args, **kw)
66 class GraphExecutionForRay(GraphExecution):
67 def handle_op(self, *args, **kw):
---> 68 return RayExecutor.handle(*args, **kw)
69
70
~/Workspace/mars/mars/ray/core.py in handle(cls, op, results, mock)
147
148 try:
--> 149 return ray.get(build_remote_funtion(runner).remote(results, op))
150 except NotImplementedError:
151 for op_cls in mapper.keys():
~/miniconda3/lib/python3.7/site-packages/ray/worker.py in get(object_refs, timeout)
1536 worker.core_worker.dump_object_store_memory_usage()
1537 if isinstance(value, RayTaskError):
-> 1538 raise value.as_instanceof_cause()
1539 else:
1540 raise value
RayTaskError(TypeError): ray::mars.ray.core.remote_runner() (pid=31351, ip=30.225.12.80)
File "python/ray/_raylet.pyx", line 479, in ray._raylet.execute_task
File "/Users/qinxuye/Workspace/mars/mars/ray/core.py", line 144, in remote_runner
return func(results, op)
File "/Users/qinxuye/Workspace/mars/mars/dataframe/datasource/read_csv.py", line 322, in execute
df = cls._cudf_read_csv(op) if op.gpu else cls._pandas_read_csv(f, op)
File "/Users/qinxuye/Workspace/mars/mars/dataframe/datasource/read_csv.py", line 273, in _pandas_read_csv
dtype=dtypes.to_dict(), nrows=op.nrows, **csv_kwargs)
TypeError: parser_f() got an unexpected keyword argument 'outputs_ref'
|
TypeError
|
def __init__(self, **kwargs):
# as we cannot serialize fuse chunk for now,
# we just disable numexpr for ray executor
engine = kwargs.pop("engine", ["numpy", "dataframe"])
if not ray.is_initialized():
ray.init(**kwargs)
self._session_id = uuid.uuid4()
self._executor = RayExecutor(engine=engine, storage=RayStorage())
|
def __init__(self, **kwargs):
if not ray.is_initialized():
ray.init(**kwargs)
self._session_id = uuid.uuid4()
self._executor = RayExecutor(storage=RayStorage())
|
https://github.com/mars-project/mars/issues/1542
|
In [1]: from mars.session import new_session
In [2]: import mars.dataframe as md
In [3]: new_session(backend='ray').as_default()
2020-09-01 20:05:51,291 INFO resource_spec.py:231 -- Starting Ray with 5.08 GiB memory available for workers and up to 2.56 GiB for objects. You can adjust these settings with ray.init(memory=<bytes>, object_store_memory=<bytes>).
2020-09-01 20:05:51,883 INFO services.py:1193 -- View the Ray dashboard at localhost:8265
Out[3]: <mars.session.Session at 0x7fc51364fb50>
In [4]: df = md.read_csv('Downloads/ratings.csv')
In [5]: df.groupby('userId').agg({'rating': ['min', 'max', 'mean', 'std']}).exec
...: ute()
---------------------------------------------------------------------------
RayTaskError(TypeError) Traceback (most recent call last)
<ipython-input-5-180cc92d1395> in <module>
----> 1 df.groupby('userId').agg({'rating': ['min', 'max', 'mean', 'std']}).execute()
~/Workspace/mars/mars/core.py in execute(self, session, **kw)
576
577 def execute(self, session=None, **kw):
--> 578 self._data.execute(session, **kw)
579 return self
580
~/Workspace/mars/mars/core.py in execute(self, session, **kw)
364
365 # no more fetch, thus just fire run
--> 366 session.run(self, **kw)
367 # return Tileable or ExecutableTuple itself
368 return self
~/Workspace/mars/mars/session.py in run(self, *tileables, **kw)
478 tileables = tuple(mt.tensor(t) if not isinstance(t, (Entity, Base)) else t
479 for t in tileables)
--> 480 result = self._sess.run(*tileables, **kw)
481
482 for t in tileables:
~/Workspace/mars/mars/ray/core.py in run(self, *tileables, **kw)
188 if 'n_parallel' not in kw: # pragma: no cover
189 kw['n_parallel'] = ray.cluster_resources()['CPU']
--> 190 return self._executor.execute_tileables(tileables, **kw)
191
192 def __enter__(self):
~/Workspace/mars/mars/utils.py in _inner(*args, **kwargs)
437 def _inner(*args, **kwargs):
438 with self:
--> 439 return func(*args, **kwargs)
440
441 return _inner
~/Workspace/mars/mars/executor.py in execute_tileables(self, tileables, fetch, n_parallel, n_thread, print_progress, mock, compose, name)
879 n_parallel=n_parallel or n_thread,
880 print_progress=print_progress, mock=mock,
--> 881 chunk_result=chunk_result)
882
883 # update shape of tileable and its chunks whatever it's successful or not
~/Workspace/mars/mars/executor.py in execute_graph(self, graph, keys, n_parallel, print_progress, mock, no_intermediate, compose, retval, chunk_result)
691 print_progress=print_progress, mock=mock, mock_max_memory=self._mock_max_memory,
692 fetch_keys=fetch_keys, no_intermediate=no_intermediate)
--> 693 res = graph_execution.execute(retval)
694 self._mock_max_memory = max(self._mock_max_memory, graph_execution._mock_max_memory)
695 if mock:
~/Workspace/mars/mars/executor.py in execute(self, retval)
572 # wait until all the futures completed
573 for future in executed_futures:
--> 574 future.result()
575
576 if retval:
~/miniconda3/lib/python3.7/concurrent/futures/_base.py in result(self, timeout)
426 raise CancelledError()
427 elif self._state == FINISHED:
--> 428 return self.__get_result()
429
430 self._condition.wait(timeout)
~/miniconda3/lib/python3.7/concurrent/futures/_base.py in __get_result(self)
382 def __get_result(self):
383 if self._exception:
--> 384 raise self._exception
385 else:
386 return self._result
~/miniconda3/lib/python3.7/concurrent/futures/thread.py in run(self)
55
56 try:
---> 57 result = self.fn(*self.args, **self.kwargs)
58 except BaseException as exc:
59 self.future.set_exception(exc)
~/Workspace/mars/mars/utils.py in _inner(*args, **kwargs)
437 def _inner(*args, **kwargs):
438 with self:
--> 439 return func(*args, **kwargs)
440
441 return _inner
~/Workspace/mars/mars/executor.py in _execute_operand(self, op)
444 # so we pass the first operand's first output to Executor.handle
445 first_op = ops[0]
--> 446 self.handle_op(first_op, results, self._mock)
447
448 # update maximal memory usage during execution
~/Workspace/mars/mars/ray/core.py in handle_op(self, *args, **kw)
66 class GraphExecutionForRay(GraphExecution):
67 def handle_op(self, *args, **kw):
---> 68 return RayExecutor.handle(*args, **kw)
69
70
~/Workspace/mars/mars/ray/core.py in handle(cls, op, results, mock)
147
148 try:
--> 149 return ray.get(build_remote_funtion(runner).remote(results, op))
150 except NotImplementedError:
151 for op_cls in mapper.keys():
~/miniconda3/lib/python3.7/site-packages/ray/worker.py in get(object_refs, timeout)
1536 worker.core_worker.dump_object_store_memory_usage()
1537 if isinstance(value, RayTaskError):
-> 1538 raise value.as_instanceof_cause()
1539 else:
1540 raise value
RayTaskError(TypeError): ray::mars.ray.core.remote_runner() (pid=31351, ip=30.225.12.80)
File "python/ray/_raylet.pyx", line 479, in ray._raylet.execute_task
File "/Users/qinxuye/Workspace/mars/mars/ray/core.py", line 144, in remote_runner
return func(results, op)
File "/Users/qinxuye/Workspace/mars/mars/dataframe/datasource/read_csv.py", line 322, in execute
df = cls._cudf_read_csv(op) if op.gpu else cls._pandas_read_csv(f, op)
File "/Users/qinxuye/Workspace/mars/mars/dataframe/datasource/read_csv.py", line 273, in _pandas_read_csv
dtype=dtypes.to_dict(), nrows=op.nrows, **csv_kwargs)
TypeError: parser_f() got an unexpected keyword argument 'outputs_ref'
|
TypeError
|
def __init__(self, **kwargs):
engine = kwargs.pop("engine", None)
self._endpoint = None
self._session_id = uuid.uuid4()
self._context = LocalContext(self)
self._executor = Executor(engine=engine, storage=self._context)
self._mut_tensor = dict()
self._mut_tensor_data = dict()
if kwargs:
unexpected_keys = ", ".join(list(kwargs.keys()))
raise TypeError(f"Local session got unexpected arguments: {unexpected_keys}")
|
def __init__(self, **kwargs):
self._endpoint = None
self._session_id = uuid.uuid4()
self._context = LocalContext(self)
self._executor = Executor(storage=self._context)
self._mut_tensor = dict()
self._mut_tensor_data = dict()
if kwargs:
unexpected_keys = ", ".join(list(kwargs.keys()))
raise TypeError(f"Local session got unexpected arguments: {unexpected_keys}")
|
https://github.com/mars-project/mars/issues/1542
|
In [1]: from mars.session import new_session
In [2]: import mars.dataframe as md
In [3]: new_session(backend='ray').as_default()
2020-09-01 20:05:51,291 INFO resource_spec.py:231 -- Starting Ray with 5.08 GiB memory available for workers and up to 2.56 GiB for objects. You can adjust these settings with ray.init(memory=<bytes>, object_store_memory=<bytes>).
2020-09-01 20:05:51,883 INFO services.py:1193 -- View the Ray dashboard at localhost:8265
Out[3]: <mars.session.Session at 0x7fc51364fb50>
In [4]: df = md.read_csv('Downloads/ratings.csv')
In [5]: df.groupby('userId').agg({'rating': ['min', 'max', 'mean', 'std']}).exec
...: ute()
---------------------------------------------------------------------------
RayTaskError(TypeError) Traceback (most recent call last)
<ipython-input-5-180cc92d1395> in <module>
----> 1 df.groupby('userId').agg({'rating': ['min', 'max', 'mean', 'std']}).execute()
~/Workspace/mars/mars/core.py in execute(self, session, **kw)
576
577 def execute(self, session=None, **kw):
--> 578 self._data.execute(session, **kw)
579 return self
580
~/Workspace/mars/mars/core.py in execute(self, session, **kw)
364
365 # no more fetch, thus just fire run
--> 366 session.run(self, **kw)
367 # return Tileable or ExecutableTuple itself
368 return self
~/Workspace/mars/mars/session.py in run(self, *tileables, **kw)
478 tileables = tuple(mt.tensor(t) if not isinstance(t, (Entity, Base)) else t
479 for t in tileables)
--> 480 result = self._sess.run(*tileables, **kw)
481
482 for t in tileables:
~/Workspace/mars/mars/ray/core.py in run(self, *tileables, **kw)
188 if 'n_parallel' not in kw: # pragma: no cover
189 kw['n_parallel'] = ray.cluster_resources()['CPU']
--> 190 return self._executor.execute_tileables(tileables, **kw)
191
192 def __enter__(self):
~/Workspace/mars/mars/utils.py in _inner(*args, **kwargs)
437 def _inner(*args, **kwargs):
438 with self:
--> 439 return func(*args, **kwargs)
440
441 return _inner
~/Workspace/mars/mars/executor.py in execute_tileables(self, tileables, fetch, n_parallel, n_thread, print_progress, mock, compose, name)
879 n_parallel=n_parallel or n_thread,
880 print_progress=print_progress, mock=mock,
--> 881 chunk_result=chunk_result)
882
883 # update shape of tileable and its chunks whatever it's successful or not
~/Workspace/mars/mars/executor.py in execute_graph(self, graph, keys, n_parallel, print_progress, mock, no_intermediate, compose, retval, chunk_result)
691 print_progress=print_progress, mock=mock, mock_max_memory=self._mock_max_memory,
692 fetch_keys=fetch_keys, no_intermediate=no_intermediate)
--> 693 res = graph_execution.execute(retval)
694 self._mock_max_memory = max(self._mock_max_memory, graph_execution._mock_max_memory)
695 if mock:
~/Workspace/mars/mars/executor.py in execute(self, retval)
572 # wait until all the futures completed
573 for future in executed_futures:
--> 574 future.result()
575
576 if retval:
~/miniconda3/lib/python3.7/concurrent/futures/_base.py in result(self, timeout)
426 raise CancelledError()
427 elif self._state == FINISHED:
--> 428 return self.__get_result()
429
430 self._condition.wait(timeout)
~/miniconda3/lib/python3.7/concurrent/futures/_base.py in __get_result(self)
382 def __get_result(self):
383 if self._exception:
--> 384 raise self._exception
385 else:
386 return self._result
~/miniconda3/lib/python3.7/concurrent/futures/thread.py in run(self)
55
56 try:
---> 57 result = self.fn(*self.args, **self.kwargs)
58 except BaseException as exc:
59 self.future.set_exception(exc)
~/Workspace/mars/mars/utils.py in _inner(*args, **kwargs)
437 def _inner(*args, **kwargs):
438 with self:
--> 439 return func(*args, **kwargs)
440
441 return _inner
~/Workspace/mars/mars/executor.py in _execute_operand(self, op)
444 # so we pass the first operand's first output to Executor.handle
445 first_op = ops[0]
--> 446 self.handle_op(first_op, results, self._mock)
447
448 # update maximal memory usage during execution
~/Workspace/mars/mars/ray/core.py in handle_op(self, *args, **kw)
66 class GraphExecutionForRay(GraphExecution):
67 def handle_op(self, *args, **kw):
---> 68 return RayExecutor.handle(*args, **kw)
69
70
~/Workspace/mars/mars/ray/core.py in handle(cls, op, results, mock)
147
148 try:
--> 149 return ray.get(build_remote_funtion(runner).remote(results, op))
150 except NotImplementedError:
151 for op_cls in mapper.keys():
~/miniconda3/lib/python3.7/site-packages/ray/worker.py in get(object_refs, timeout)
1536 worker.core_worker.dump_object_store_memory_usage()
1537 if isinstance(value, RayTaskError):
-> 1538 raise value.as_instanceof_cause()
1539 else:
1540 raise value
RayTaskError(TypeError): ray::mars.ray.core.remote_runner() (pid=31351, ip=30.225.12.80)
File "python/ray/_raylet.pyx", line 479, in ray._raylet.execute_task
File "/Users/qinxuye/Workspace/mars/mars/ray/core.py", line 144, in remote_runner
return func(results, op)
File "/Users/qinxuye/Workspace/mars/mars/dataframe/datasource/read_csv.py", line 322, in execute
df = cls._cudf_read_csv(op) if op.gpu else cls._pandas_read_csv(f, op)
File "/Users/qinxuye/Workspace/mars/mars/dataframe/datasource/read_csv.py", line 273, in _pandas_read_csv
dtype=dtypes.to_dict(), nrows=op.nrows, **csv_kwargs)
TypeError: parser_f() got an unexpected keyword argument 'outputs_ref'
|
TypeError
|
def _init(self):
endpoint, kwargs = self._endpoint, self._kws
if self._backend is None:
if endpoint is not None:
if "http" in endpoint:
# connect to web
self._init_web_session(endpoint, **kwargs)
else:
# connect to local cluster
self._init_cluster_session(endpoint, **kwargs)
else:
try:
endpoint = os.environ["MARS_SCHEDULER_ADDRESS"]
session_id = os.environ.get("MARS_SESSION_ID", None)
kwargs["session_id"] = session_id
self._init_cluster_session(endpoint, **kwargs)
except KeyError:
self._init_local_session(**kwargs)
elif self._backend == "ray":
self._init_ray_session(**kwargs)
else: # pragma: no cover
raise ValueError(
"Either endpoint or backend should be provided to create a session"
)
|
def _init(self):
endpoint, kwargs = self._endpoint, self._kws
if self._backend is None:
if endpoint is not None:
if "http" in endpoint:
# connect to web
self._init_web_session(endpoint, **kwargs)
else:
# connect to local cluster
self._init_cluster_session(endpoint, **kwargs)
else:
try:
endpoint = os.environ["MARS_SCHEDULER_ADDRESS"]
session_id = os.environ.get("MARS_SESSION_ID", None)
kwargs["session_id"] = session_id
self._init_cluster_session(endpoint, **kwargs)
except KeyError:
self._init_local_session(**kwargs)
elif self._backend == "ray":
self._init_ray_session(**kwargs)
|
https://github.com/mars-project/mars/issues/1542
|
In [1]: from mars.session import new_session
In [2]: import mars.dataframe as md
In [3]: new_session(backend='ray').as_default()
2020-09-01 20:05:51,291 INFO resource_spec.py:231 -- Starting Ray with 5.08 GiB memory available for workers and up to 2.56 GiB for objects. You can adjust these settings with ray.init(memory=<bytes>, object_store_memory=<bytes>).
2020-09-01 20:05:51,883 INFO services.py:1193 -- View the Ray dashboard at localhost:8265
Out[3]: <mars.session.Session at 0x7fc51364fb50>
In [4]: df = md.read_csv('Downloads/ratings.csv')
In [5]: df.groupby('userId').agg({'rating': ['min', 'max', 'mean', 'std']}).exec
...: ute()
---------------------------------------------------------------------------
RayTaskError(TypeError) Traceback (most recent call last)
<ipython-input-5-180cc92d1395> in <module>
----> 1 df.groupby('userId').agg({'rating': ['min', 'max', 'mean', 'std']}).execute()
~/Workspace/mars/mars/core.py in execute(self, session, **kw)
576
577 def execute(self, session=None, **kw):
--> 578 self._data.execute(session, **kw)
579 return self
580
~/Workspace/mars/mars/core.py in execute(self, session, **kw)
364
365 # no more fetch, thus just fire run
--> 366 session.run(self, **kw)
367 # return Tileable or ExecutableTuple itself
368 return self
~/Workspace/mars/mars/session.py in run(self, *tileables, **kw)
478 tileables = tuple(mt.tensor(t) if not isinstance(t, (Entity, Base)) else t
479 for t in tileables)
--> 480 result = self._sess.run(*tileables, **kw)
481
482 for t in tileables:
~/Workspace/mars/mars/ray/core.py in run(self, *tileables, **kw)
188 if 'n_parallel' not in kw: # pragma: no cover
189 kw['n_parallel'] = ray.cluster_resources()['CPU']
--> 190 return self._executor.execute_tileables(tileables, **kw)
191
192 def __enter__(self):
~/Workspace/mars/mars/utils.py in _inner(*args, **kwargs)
437 def _inner(*args, **kwargs):
438 with self:
--> 439 return func(*args, **kwargs)
440
441 return _inner
~/Workspace/mars/mars/executor.py in execute_tileables(self, tileables, fetch, n_parallel, n_thread, print_progress, mock, compose, name)
879 n_parallel=n_parallel or n_thread,
880 print_progress=print_progress, mock=mock,
--> 881 chunk_result=chunk_result)
882
883 # update shape of tileable and its chunks whatever it's successful or not
~/Workspace/mars/mars/executor.py in execute_graph(self, graph, keys, n_parallel, print_progress, mock, no_intermediate, compose, retval, chunk_result)
691 print_progress=print_progress, mock=mock, mock_max_memory=self._mock_max_memory,
692 fetch_keys=fetch_keys, no_intermediate=no_intermediate)
--> 693 res = graph_execution.execute(retval)
694 self._mock_max_memory = max(self._mock_max_memory, graph_execution._mock_max_memory)
695 if mock:
~/Workspace/mars/mars/executor.py in execute(self, retval)
572 # wait until all the futures completed
573 for future in executed_futures:
--> 574 future.result()
575
576 if retval:
~/miniconda3/lib/python3.7/concurrent/futures/_base.py in result(self, timeout)
426 raise CancelledError()
427 elif self._state == FINISHED:
--> 428 return self.__get_result()
429
430 self._condition.wait(timeout)
~/miniconda3/lib/python3.7/concurrent/futures/_base.py in __get_result(self)
382 def __get_result(self):
383 if self._exception:
--> 384 raise self._exception
385 else:
386 return self._result
~/miniconda3/lib/python3.7/concurrent/futures/thread.py in run(self)
55
56 try:
---> 57 result = self.fn(*self.args, **self.kwargs)
58 except BaseException as exc:
59 self.future.set_exception(exc)
~/Workspace/mars/mars/utils.py in _inner(*args, **kwargs)
437 def _inner(*args, **kwargs):
438 with self:
--> 439 return func(*args, **kwargs)
440
441 return _inner
~/Workspace/mars/mars/executor.py in _execute_operand(self, op)
444 # so we pass the first operand's first output to Executor.handle
445 first_op = ops[0]
--> 446 self.handle_op(first_op, results, self._mock)
447
448 # update maximal memory usage during execution
~/Workspace/mars/mars/ray/core.py in handle_op(self, *args, **kw)
66 class GraphExecutionForRay(GraphExecution):
67 def handle_op(self, *args, **kw):
---> 68 return RayExecutor.handle(*args, **kw)
69
70
~/Workspace/mars/mars/ray/core.py in handle(cls, op, results, mock)
147
148 try:
--> 149 return ray.get(build_remote_funtion(runner).remote(results, op))
150 except NotImplementedError:
151 for op_cls in mapper.keys():
~/miniconda3/lib/python3.7/site-packages/ray/worker.py in get(object_refs, timeout)
1536 worker.core_worker.dump_object_store_memory_usage()
1537 if isinstance(value, RayTaskError):
-> 1538 raise value.as_instanceof_cause()
1539 else:
1540 raise value
RayTaskError(TypeError): ray::mars.ray.core.remote_runner() (pid=31351, ip=30.225.12.80)
File "python/ray/_raylet.pyx", line 479, in ray._raylet.execute_task
File "/Users/qinxuye/Workspace/mars/mars/ray/core.py", line 144, in remote_runner
return func(results, op)
File "/Users/qinxuye/Workspace/mars/mars/dataframe/datasource/read_csv.py", line 322, in execute
df = cls._cudf_read_csv(op) if op.gpu else cls._pandas_read_csv(f, op)
File "/Users/qinxuye/Workspace/mars/mars/dataframe/datasource/read_csv.py", line 273, in _pandas_read_csv
dtype=dtypes.to_dict(), nrows=op.nrows, **csv_kwargs)
TypeError: parser_f() got an unexpected keyword argument 'outputs_ref'
|
TypeError
|
def estimate_fuse_size(ctx, op):
from ...graph import DAG
from ...executor import Executor
from ...utils import build_fetch_chunk
chunk = op.outputs[0]
dag = DAG()
size_ctx = dict()
keys = set(c.key for c in chunk.composed)
for c in chunk.composed:
dag.add_node(c)
for inp in c.inputs:
if inp.key not in keys:
size_ctx[inp.key] = ctx[inp.key]
inp = build_fetch_chunk(inp).data
if inp not in dag:
dag.add_node(inp)
dag.add_edge(inp, c)
executor = Executor(storage=size_ctx)
output_keys = [o.key for o in op.outputs]
results = executor.execute_graph(dag, output_keys, mock=True, no_intermediate=True)
ctx.update(zip(output_keys, results))
# update with the maximal memory cost during the whole execution
total_mem = sum(ctx[key][1] for key in output_keys)
if total_mem:
for key in output_keys:
r = ctx[key]
ctx[key] = (r[0], max(r[1], r[1] * executor.mock_max_memory // total_mem))
|
def estimate_fuse_size(ctx, op):
from ...graph import DAG
from ...executor import Executor
chunk = op.outputs[0]
dag = DAG()
size_ctx = dict()
keys = set(c.key for c in chunk.composed)
for c in chunk.composed:
dag.add_node(c)
for inp in c.inputs:
if inp.key not in keys:
size_ctx[inp.key] = ctx[inp.key]
if inp not in dag:
dag.add_node(inp)
dag.add_edge(inp, c)
executor = Executor(storage=size_ctx)
output_keys = [o.key for o in op.outputs]
results = executor.execute_graph(dag, output_keys, mock=True, no_intermediate=True)
ctx.update(zip(output_keys, results))
# update with the maximal memory cost during the whole execution
total_mem = sum(ctx[key][1] for key in output_keys)
if total_mem:
for key in output_keys:
r = ctx[key]
ctx[key] = (r[0], max(r[1], r[1] * executor.mock_max_memory // total_mem))
|
https://github.com/mars-project/mars/issues/1542
|
In [1]: from mars.session import new_session
In [2]: import mars.dataframe as md
In [3]: new_session(backend='ray').as_default()
2020-09-01 20:05:51,291 INFO resource_spec.py:231 -- Starting Ray with 5.08 GiB memory available for workers and up to 2.56 GiB for objects. You can adjust these settings with ray.init(memory=<bytes>, object_store_memory=<bytes>).
2020-09-01 20:05:51,883 INFO services.py:1193 -- View the Ray dashboard at localhost:8265
Out[3]: <mars.session.Session at 0x7fc51364fb50>
In [4]: df = md.read_csv('Downloads/ratings.csv')
In [5]: df.groupby('userId').agg({'rating': ['min', 'max', 'mean', 'std']}).exec
...: ute()
---------------------------------------------------------------------------
RayTaskError(TypeError) Traceback (most recent call last)
<ipython-input-5-180cc92d1395> in <module>
----> 1 df.groupby('userId').agg({'rating': ['min', 'max', 'mean', 'std']}).execute()
~/Workspace/mars/mars/core.py in execute(self, session, **kw)
576
577 def execute(self, session=None, **kw):
--> 578 self._data.execute(session, **kw)
579 return self
580
~/Workspace/mars/mars/core.py in execute(self, session, **kw)
364
365 # no more fetch, thus just fire run
--> 366 session.run(self, **kw)
367 # return Tileable or ExecutableTuple itself
368 return self
~/Workspace/mars/mars/session.py in run(self, *tileables, **kw)
478 tileables = tuple(mt.tensor(t) if not isinstance(t, (Entity, Base)) else t
479 for t in tileables)
--> 480 result = self._sess.run(*tileables, **kw)
481
482 for t in tileables:
~/Workspace/mars/mars/ray/core.py in run(self, *tileables, **kw)
188 if 'n_parallel' not in kw: # pragma: no cover
189 kw['n_parallel'] = ray.cluster_resources()['CPU']
--> 190 return self._executor.execute_tileables(tileables, **kw)
191
192 def __enter__(self):
~/Workspace/mars/mars/utils.py in _inner(*args, **kwargs)
437 def _inner(*args, **kwargs):
438 with self:
--> 439 return func(*args, **kwargs)
440
441 return _inner
~/Workspace/mars/mars/executor.py in execute_tileables(self, tileables, fetch, n_parallel, n_thread, print_progress, mock, compose, name)
879 n_parallel=n_parallel or n_thread,
880 print_progress=print_progress, mock=mock,
--> 881 chunk_result=chunk_result)
882
883 # update shape of tileable and its chunks whatever it's successful or not
~/Workspace/mars/mars/executor.py in execute_graph(self, graph, keys, n_parallel, print_progress, mock, no_intermediate, compose, retval, chunk_result)
691 print_progress=print_progress, mock=mock, mock_max_memory=self._mock_max_memory,
692 fetch_keys=fetch_keys, no_intermediate=no_intermediate)
--> 693 res = graph_execution.execute(retval)
694 self._mock_max_memory = max(self._mock_max_memory, graph_execution._mock_max_memory)
695 if mock:
~/Workspace/mars/mars/executor.py in execute(self, retval)
572 # wait until all the futures completed
573 for future in executed_futures:
--> 574 future.result()
575
576 if retval:
~/miniconda3/lib/python3.7/concurrent/futures/_base.py in result(self, timeout)
426 raise CancelledError()
427 elif self._state == FINISHED:
--> 428 return self.__get_result()
429
430 self._condition.wait(timeout)
~/miniconda3/lib/python3.7/concurrent/futures/_base.py in __get_result(self)
382 def __get_result(self):
383 if self._exception:
--> 384 raise self._exception
385 else:
386 return self._result
~/miniconda3/lib/python3.7/concurrent/futures/thread.py in run(self)
55
56 try:
---> 57 result = self.fn(*self.args, **self.kwargs)
58 except BaseException as exc:
59 self.future.set_exception(exc)
~/Workspace/mars/mars/utils.py in _inner(*args, **kwargs)
437 def _inner(*args, **kwargs):
438 with self:
--> 439 return func(*args, **kwargs)
440
441 return _inner
~/Workspace/mars/mars/executor.py in _execute_operand(self, op)
444 # so we pass the first operand's first output to Executor.handle
445 first_op = ops[0]
--> 446 self.handle_op(first_op, results, self._mock)
447
448 # update maximal memory usage during execution
~/Workspace/mars/mars/ray/core.py in handle_op(self, *args, **kw)
66 class GraphExecutionForRay(GraphExecution):
67 def handle_op(self, *args, **kw):
---> 68 return RayExecutor.handle(*args, **kw)
69
70
~/Workspace/mars/mars/ray/core.py in handle(cls, op, results, mock)
147
148 try:
--> 149 return ray.get(build_remote_funtion(runner).remote(results, op))
150 except NotImplementedError:
151 for op_cls in mapper.keys():
~/miniconda3/lib/python3.7/site-packages/ray/worker.py in get(object_refs, timeout)
1536 worker.core_worker.dump_object_store_memory_usage()
1537 if isinstance(value, RayTaskError):
-> 1538 raise value.as_instanceof_cause()
1539 else:
1540 raise value
RayTaskError(TypeError): ray::mars.ray.core.remote_runner() (pid=31351, ip=30.225.12.80)
File "python/ray/_raylet.pyx", line 479, in ray._raylet.execute_task
File "/Users/qinxuye/Workspace/mars/mars/ray/core.py", line 144, in remote_runner
return func(results, op)
File "/Users/qinxuye/Workspace/mars/mars/dataframe/datasource/read_csv.py", line 322, in execute
df = cls._cudf_read_csv(op) if op.gpu else cls._pandas_read_csv(f, op)
File "/Users/qinxuye/Workspace/mars/mars/dataframe/datasource/read_csv.py", line 273, in _pandas_read_csv
dtype=dtypes.to_dict(), nrows=op.nrows, **csv_kwargs)
TypeError: parser_f() got an unexpected keyword argument 'outputs_ref'
|
TypeError
|
def execute(cls, ctx, op):
def _base_concat(chunk, inputs):
# auto generated concat when executing a DataFrame, Series or Index
if chunk.op.output_types[0] == OutputType.dataframe:
return _auto_concat_dataframe_chunks(chunk, inputs)
elif chunk.op.output_types[0] == OutputType.series:
return _auto_concat_series_chunks(chunk, inputs)
elif chunk.op.output_types[0] == OutputType.index:
return _auto_concat_index_chunks(chunk, inputs)
elif chunk.op.output_types[0] == OutputType.categorical:
return _auto_concat_categorical_chunks(chunk, inputs)
else: # pragma: no cover
raise TypeError(
"Only DataFrameChunk, SeriesChunk, IndexChunk, "
"and CategoricalChunk can be automatically concatenated"
)
def _auto_concat_dataframe_chunks(chunk, inputs):
xdf = pd if isinstance(inputs[0], (pd.DataFrame, pd.Series)) else cudf
if chunk.op.axis is not None:
return xdf.concat(inputs, axis=op.axis)
# auto generated concat when executing a DataFrame
if len(inputs) == 1:
ret = inputs[0]
else:
max_rows = max(inp.index[0] for inp in chunk.inputs)
min_rows = min(inp.index[0] for inp in chunk.inputs)
n_rows = max_rows - min_rows + 1
n_cols = int(len(inputs) // n_rows)
assert n_rows * n_cols == len(inputs)
concats = []
for i in range(n_rows):
if n_cols == 1:
concats.append(inputs[i])
else:
concat = xdf.concat(
[inputs[i * n_cols + j] for j in range(n_cols)], axis=1
)
concats.append(concat)
if xdf is pd:
# The `sort=False` is to suppress a `FutureWarning` of pandas,
# when the index or column of chunks to concatenate is not aligned,
# which may happens for certain ops.
#
# See also Note [Columns of Left Join] in test_merge_execution.py.
ret = xdf.concat(concats, sort=False)
else:
ret = xdf.concat(concats)
# cuDF will lost index name when concat two seriess.
ret.index.name = concats[0].index.name
if getattr(chunk.index_value, "should_be_monotonic", False):
ret.sort_index(inplace=True)
if getattr(chunk.columns_value, "should_be_monotonic", False):
ret.sort_index(axis=1, inplace=True)
return ret
def _auto_concat_series_chunks(chunk, inputs):
# auto generated concat when executing a Series
if all(np.isscalar(inp) for inp in inputs):
return pd.Series(inputs)
else:
if len(inputs) == 1:
concat = inputs[0]
else:
xdf = pd if isinstance(inputs[0], pd.Series) else cudf
if chunk.op.axis is not None:
concat = xdf.concat(inputs, axis=chunk.op.axis)
else:
concat = xdf.concat(inputs)
if getattr(chunk.index_value, "should_be_monotonic", False):
concat.sort_index(inplace=True)
return concat
def _auto_concat_index_chunks(chunk, inputs):
if len(inputs) == 1:
xdf = pd if isinstance(inputs[0], pd.Index) else cudf
concat_df = xdf.DataFrame(index=inputs[0])
else:
xdf = pd if isinstance(inputs[0], pd.Index) else cudf
empty_dfs = [xdf.DataFrame(index=inp) for inp in inputs]
concat_df = xdf.concat(empty_dfs, axis=0)
if getattr(chunk.index_value, "should_be_monotonic", False):
concat_df.sort_index(inplace=True)
return concat_df.index
def _auto_concat_categorical_chunks(_, inputs):
if len(inputs) == 1: # pragma: no cover
return inputs[0]
else:
# convert categorical into array
arrays = [np.asarray(inp) for inp in inputs]
array = np.concatenate(arrays)
return pd.Categorical(
array, categories=inputs[0].categories, ordered=inputs[0].ordered
)
chunk = op.outputs[0]
inputs = [ctx[input.key] for input in op.inputs]
if isinstance(inputs[0], tuple):
ctx[chunk.key] = tuple(
_base_concat(chunk, [input[i] for input in inputs])
for i in range(len(inputs[0]))
)
else:
ctx[chunk.key] = _base_concat(chunk, inputs)
|
def execute(cls, ctx, op):
def _base_concat(chunk, inputs):
# auto generated concat when executing a DataFrame, Series or Index
if chunk.op.output_types[0] == OutputType.dataframe:
return _auto_concat_dataframe_chunks(chunk, inputs)
elif chunk.op.output_types[0] == OutputType.series:
return _auto_concat_series_chunks(chunk, inputs)
elif chunk.op.output_types[0] == OutputType.index:
return _auto_concat_index_chunks(chunk, inputs)
elif chunk.op.output_types[0] == OutputType.categorical:
return _auto_concat_categorical_chunks(chunk, inputs)
else: # pragma: no cover
raise TypeError(
"Only DataFrameChunk, SeriesChunk, IndexChunk, "
"and CategoricalChunk can be automatically concatenated"
)
def _auto_concat_dataframe_chunks(chunk, inputs):
if chunk.op.axis is not None:
return pd.concat(inputs, axis=op.axis)
# auto generated concat when executing a DataFrame
if len(inputs) == 1:
ret = inputs[0]
else:
max_rows = max(inp.index[0] for inp in chunk.inputs)
min_rows = min(inp.index[0] for inp in chunk.inputs)
n_rows = max_rows - min_rows + 1
n_cols = int(len(inputs) // n_rows)
assert n_rows * n_cols == len(inputs)
xdf = pd if isinstance(inputs[0], (pd.DataFrame, pd.Series)) else cudf
concats = []
for i in range(n_rows):
if n_cols == 1:
concats.append(inputs[i])
else:
concat = xdf.concat(
[inputs[i * n_cols + j] for j in range(n_cols)], axis=1
)
concats.append(concat)
if xdf is pd:
# The `sort=False` is to suppress a `FutureWarning` of pandas,
# when the index or column of chunks to concatenate is not aligned,
# which may happens for certain ops.
#
# See also Note [Columns of Left Join] in test_merge_execution.py.
ret = xdf.concat(concats, sort=False)
else:
ret = xdf.concat(concats)
# cuDF will lost index name when concat two seriess.
ret.index.name = concats[0].index.name
if getattr(chunk.index_value, "should_be_monotonic", False):
ret.sort_index(inplace=True)
if getattr(chunk.columns_value, "should_be_monotonic", False):
ret.sort_index(axis=1, inplace=True)
return ret
def _auto_concat_series_chunks(chunk, inputs):
# auto generated concat when executing a Series
if all(np.isscalar(inp) for inp in inputs):
return pd.Series(inputs)
else:
if len(inputs) == 1:
concat = inputs[0]
else:
xdf = pd if isinstance(inputs[0], pd.Series) else cudf
if chunk.op.axis is not None:
concat = xdf.concat(inputs, axis=chunk.op.axis)
else:
concat = xdf.concat(inputs)
if getattr(chunk.index_value, "should_be_monotonic", False):
concat.sort_index(inplace=True)
return concat
def _auto_concat_index_chunks(chunk, inputs):
if len(inputs) == 1:
xdf = pd if isinstance(inputs[0], pd.Index) else cudf
concat_df = xdf.DataFrame(index=inputs[0])
else:
xdf = pd if isinstance(inputs[0], pd.Index) else cudf
empty_dfs = [xdf.DataFrame(index=inp) for inp in inputs]
concat_df = xdf.concat(empty_dfs, axis=0)
if getattr(chunk.index_value, "should_be_monotonic", False):
concat_df.sort_index(inplace=True)
return concat_df.index
def _auto_concat_categorical_chunks(_, inputs):
if len(inputs) == 1: # pragma: no cover
return inputs[0]
else:
# convert categorical into array
arrays = [np.asarray(inp) for inp in inputs]
array = np.concatenate(arrays)
return pd.Categorical(
array, categories=inputs[0].categories, ordered=inputs[0].ordered
)
chunk = op.outputs[0]
inputs = [ctx[input.key] for input in op.inputs]
if isinstance(inputs[0], tuple):
ctx[chunk.key] = tuple(
_base_concat(chunk, [input[i] for input in inputs])
for i in range(len(inputs[0]))
)
else:
ctx[chunk.key] = _base_concat(chunk, inputs)
|
https://github.com/mars-project/mars/issues/1533
|
AttributeError Traceback (most recent call last)
<ipython-input-3-a85925f048d0> in <module>
1 start=time.time()
2 df_mars=df_mars.to_gpu()
----> 3 print(df_mars.sum().to_frame(name="sum").execute())
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/core.py in execute(self, session, **kw)
580
581 def execute(self, session=None, **kw):
--> 582 self._data.execute(session, **kw)
583 return self
584
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/core.py in execute(self, session, **kw)
368
369 # no more fetch, thus just fire run
--> 370 session.run(self, **kw)
371 # return Tileable or ExecutableTuple itself
372 return self
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/session.py in run(self, *tileables, **kw)
460 tileables = tuple(mt.tensor(t) if not isinstance(t, (Entity, Base)) else t
461 for t in tileables)
--> 462 result = self._sess.run(*tileables, **kw)
463
464 for t in tileables:
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/session.py in run(self, *tileables, **kw)
105 # set number of running cores
106 self.context.set_ncores(kw['n_parallel'])
--> 107 res = self._executor.execute_tileables(tileables, **kw)
108 return res
109
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/utils.py in _wrapped(*args, **kwargs)
406 _kernel_mode.eager = False
407 _kernel_mode.eager_count = enter_eager_count + 1
--> 408 return func(*args, **kwargs)
409 finally:
410 _kernel_mode.eager_count -= 1
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/utils.py in inner(*args, **kwargs)
500 def inner(*args, **kwargs):
501 with build_mode():
--> 502 return func(*args, **kwargs)
503 return inner
504
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in execute_tileables(self, tileables, fetch, n_parallel, n_thread, print_progress, mock, compose, name)
878 n_parallel=n_parallel or n_thread,
879 print_progress=print_progress, mock=mock,
--> 880 chunk_result=chunk_result)
881
882 # update shape of tileable and its chunks whatever it's successful or not
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in execute_graph(self, graph, keys, n_parallel, print_progress, mock, no_intermediate, compose, retval, chunk_result)
688 print_progress=print_progress, mock=mock, mock_max_memory=self._mock_max_memory,
689 fetch_keys=fetch_keys, no_intermediate=no_intermediate)
--> 690 res = graph_execution.execute(retval)
691 self._mock_max_memory = max(self._mock_max_memory, graph_execution._mock_max_memory)
692 if mock:
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in execute(self, retval)
569 # wait until all the futures completed
570 for future in executed_futures:
--> 571 future.result()
572
573 if retval:
/opt/conda/envs/rapids/lib/python3.6/concurrent/futures/_base.py in result(self, timeout)
423 raise CancelledError()
424 elif self._state == FINISHED:
--> 425 return self.__get_result()
426
427 self._condition.wait(timeout)
/opt/conda/envs/rapids/lib/python3.6/concurrent/futures/_base.py in __get_result(self)
382 def __get_result(self):
383 if self._exception:
--> 384 raise self._exception
385 else:
386 return self._result
/opt/conda/envs/rapids/lib/python3.6/concurrent/futures/thread.py in run(self)
54
55 try:
---> 56 result = self.fn(*self.args, **self.kwargs)
57 except BaseException as exc:
58 self.future.set_exception(exc)
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in _execute_operand(self, op)
441 # so we pass the first operand's first output to Executor.handle
442 first_op = ops[0]
--> 443 Executor.handle(first_op, results, self._mock)
444
445 # update maximal memory usage during execution
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in handle(cls, op, results, mock)
639 # The `UFuncTypeError` was introduced by numpy#12593 since v1.17.0.
640 try:
--> 641 return runner(results, op)
642 except UFuncTypeError as e:
643 raise TypeError(str(e)).with_traceback(sys.exc_info()[2]) from None
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/dataframe/reduction/core.py in execute(cls, ctx, op)
405 cls._execute_agg(ctx, op)
406 elif op.stage == OperandStage.map:
--> 407 cls._execute_map(ctx, op)
408 else:
409 in_data = ctx[op.inputs[0].key]
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/dataframe/reduction/core.py in _execute_map(cls, ctx, op)
380 cls._execute_map_with_count(ctx, op)
381 else:
--> 382 cls._execute_without_count(ctx, op)
383
384 @classmethod
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/dataframe/reduction/core.py in _execute_without_count(cls, ctx, op, reduction_func)
370 # cannot just do xdf.DataFrame(r).T
371 # cuz the dtype will be object since pandas 1.0
--> 372 df = xdf.DataFrame(OrderedDict((d, [v]) for d, v in r.iteritems()))
373 else:
374 df = xdf.DataFrame(r)
AttributeError: 'Series' object has no attribute 'iteritems'
|
AttributeError
|
def _auto_concat_dataframe_chunks(chunk, inputs):
xdf = pd if isinstance(inputs[0], (pd.DataFrame, pd.Series)) else cudf
if chunk.op.axis is not None:
return xdf.concat(inputs, axis=op.axis)
# auto generated concat when executing a DataFrame
if len(inputs) == 1:
ret = inputs[0]
else:
max_rows = max(inp.index[0] for inp in chunk.inputs)
min_rows = min(inp.index[0] for inp in chunk.inputs)
n_rows = max_rows - min_rows + 1
n_cols = int(len(inputs) // n_rows)
assert n_rows * n_cols == len(inputs)
concats = []
for i in range(n_rows):
if n_cols == 1:
concats.append(inputs[i])
else:
concat = xdf.concat(
[inputs[i * n_cols + j] for j in range(n_cols)], axis=1
)
concats.append(concat)
if xdf is pd:
# The `sort=False` is to suppress a `FutureWarning` of pandas,
# when the index or column of chunks to concatenate is not aligned,
# which may happens for certain ops.
#
# See also Note [Columns of Left Join] in test_merge_execution.py.
ret = xdf.concat(concats, sort=False)
else:
ret = xdf.concat(concats)
# cuDF will lost index name when concat two seriess.
ret.index.name = concats[0].index.name
if getattr(chunk.index_value, "should_be_monotonic", False):
ret.sort_index(inplace=True)
if getattr(chunk.columns_value, "should_be_monotonic", False):
ret.sort_index(axis=1, inplace=True)
return ret
|
def _auto_concat_dataframe_chunks(chunk, inputs):
if chunk.op.axis is not None:
return pd.concat(inputs, axis=op.axis)
# auto generated concat when executing a DataFrame
if len(inputs) == 1:
ret = inputs[0]
else:
max_rows = max(inp.index[0] for inp in chunk.inputs)
min_rows = min(inp.index[0] for inp in chunk.inputs)
n_rows = max_rows - min_rows + 1
n_cols = int(len(inputs) // n_rows)
assert n_rows * n_cols == len(inputs)
xdf = pd if isinstance(inputs[0], (pd.DataFrame, pd.Series)) else cudf
concats = []
for i in range(n_rows):
if n_cols == 1:
concats.append(inputs[i])
else:
concat = xdf.concat(
[inputs[i * n_cols + j] for j in range(n_cols)], axis=1
)
concats.append(concat)
if xdf is pd:
# The `sort=False` is to suppress a `FutureWarning` of pandas,
# when the index or column of chunks to concatenate is not aligned,
# which may happens for certain ops.
#
# See also Note [Columns of Left Join] in test_merge_execution.py.
ret = xdf.concat(concats, sort=False)
else:
ret = xdf.concat(concats)
# cuDF will lost index name when concat two seriess.
ret.index.name = concats[0].index.name
if getattr(chunk.index_value, "should_be_monotonic", False):
ret.sort_index(inplace=True)
if getattr(chunk.columns_value, "should_be_monotonic", False):
ret.sort_index(axis=1, inplace=True)
return ret
|
https://github.com/mars-project/mars/issues/1533
|
AttributeError Traceback (most recent call last)
<ipython-input-3-a85925f048d0> in <module>
1 start=time.time()
2 df_mars=df_mars.to_gpu()
----> 3 print(df_mars.sum().to_frame(name="sum").execute())
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/core.py in execute(self, session, **kw)
580
581 def execute(self, session=None, **kw):
--> 582 self._data.execute(session, **kw)
583 return self
584
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/core.py in execute(self, session, **kw)
368
369 # no more fetch, thus just fire run
--> 370 session.run(self, **kw)
371 # return Tileable or ExecutableTuple itself
372 return self
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/session.py in run(self, *tileables, **kw)
460 tileables = tuple(mt.tensor(t) if not isinstance(t, (Entity, Base)) else t
461 for t in tileables)
--> 462 result = self._sess.run(*tileables, **kw)
463
464 for t in tileables:
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/session.py in run(self, *tileables, **kw)
105 # set number of running cores
106 self.context.set_ncores(kw['n_parallel'])
--> 107 res = self._executor.execute_tileables(tileables, **kw)
108 return res
109
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/utils.py in _wrapped(*args, **kwargs)
406 _kernel_mode.eager = False
407 _kernel_mode.eager_count = enter_eager_count + 1
--> 408 return func(*args, **kwargs)
409 finally:
410 _kernel_mode.eager_count -= 1
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/utils.py in inner(*args, **kwargs)
500 def inner(*args, **kwargs):
501 with build_mode():
--> 502 return func(*args, **kwargs)
503 return inner
504
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in execute_tileables(self, tileables, fetch, n_parallel, n_thread, print_progress, mock, compose, name)
878 n_parallel=n_parallel or n_thread,
879 print_progress=print_progress, mock=mock,
--> 880 chunk_result=chunk_result)
881
882 # update shape of tileable and its chunks whatever it's successful or not
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in execute_graph(self, graph, keys, n_parallel, print_progress, mock, no_intermediate, compose, retval, chunk_result)
688 print_progress=print_progress, mock=mock, mock_max_memory=self._mock_max_memory,
689 fetch_keys=fetch_keys, no_intermediate=no_intermediate)
--> 690 res = graph_execution.execute(retval)
691 self._mock_max_memory = max(self._mock_max_memory, graph_execution._mock_max_memory)
692 if mock:
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in execute(self, retval)
569 # wait until all the futures completed
570 for future in executed_futures:
--> 571 future.result()
572
573 if retval:
/opt/conda/envs/rapids/lib/python3.6/concurrent/futures/_base.py in result(self, timeout)
423 raise CancelledError()
424 elif self._state == FINISHED:
--> 425 return self.__get_result()
426
427 self._condition.wait(timeout)
/opt/conda/envs/rapids/lib/python3.6/concurrent/futures/_base.py in __get_result(self)
382 def __get_result(self):
383 if self._exception:
--> 384 raise self._exception
385 else:
386 return self._result
/opt/conda/envs/rapids/lib/python3.6/concurrent/futures/thread.py in run(self)
54
55 try:
---> 56 result = self.fn(*self.args, **self.kwargs)
57 except BaseException as exc:
58 self.future.set_exception(exc)
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in _execute_operand(self, op)
441 # so we pass the first operand's first output to Executor.handle
442 first_op = ops[0]
--> 443 Executor.handle(first_op, results, self._mock)
444
445 # update maximal memory usage during execution
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in handle(cls, op, results, mock)
639 # The `UFuncTypeError` was introduced by numpy#12593 since v1.17.0.
640 try:
--> 641 return runner(results, op)
642 except UFuncTypeError as e:
643 raise TypeError(str(e)).with_traceback(sys.exc_info()[2]) from None
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/dataframe/reduction/core.py in execute(cls, ctx, op)
405 cls._execute_agg(ctx, op)
406 elif op.stage == OperandStage.map:
--> 407 cls._execute_map(ctx, op)
408 else:
409 in_data = ctx[op.inputs[0].key]
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/dataframe/reduction/core.py in _execute_map(cls, ctx, op)
380 cls._execute_map_with_count(ctx, op)
381 else:
--> 382 cls._execute_without_count(ctx, op)
383
384 @classmethod
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/dataframe/reduction/core.py in _execute_without_count(cls, ctx, op, reduction_func)
370 # cannot just do xdf.DataFrame(r).T
371 # cuz the dtype will be object since pandas 1.0
--> 372 df = xdf.DataFrame(OrderedDict((d, [v]) for d, v in r.iteritems()))
373 else:
374 df = xdf.DataFrame(r)
AttributeError: 'Series' object has no attribute 'iteritems'
|
AttributeError
|
def _execute_without_count(cls, ctx, op, reduction_func=None):
# Execution for normal reduction operands.
# For dataframe, will keep dimensions for intermediate results.
xdf = cudf if op.gpu else pd
in_data = ctx[op.inputs[0].key]
r = cls._execute_reduction(
in_data, op, min_count=op.min_count, reduction_func=reduction_func
)
if isinstance(in_data, xdf.Series) or op.output_types[0] == OutputType.series:
ctx[op.outputs[0].key] = r
else:
if op.axis == 0:
if op.gpu:
df = xdf.DataFrame(r).transpose()
df.columns = r.index.to_arrow().to_pylist()
else:
# cannot just do xdf.DataFrame(r).T
# cuz the dtype will be object since pandas 1.0
df = xdf.DataFrame(OrderedDict((d, [v]) for d, v in r.iteritems()))
else:
df = xdf.DataFrame(r)
ctx[op.outputs[0].key] = df
|
def _execute_without_count(cls, ctx, op, reduction_func=None):
# Execution for normal reduction operands.
# For dataframe, will keep dimensions for intermediate results.
xdf = cudf if op.gpu else pd
in_data = ctx[op.inputs[0].key]
r = cls._execute_reduction(
in_data, op, min_count=op.min_count, reduction_func=reduction_func
)
if isinstance(in_data, xdf.Series) or op.output_types[0] == OutputType.series:
ctx[op.outputs[0].key] = r
else:
if op.axis == 0:
# cannot just do xdf.DataFrame(r).T
# cuz the dtype will be object since pandas 1.0
df = xdf.DataFrame(OrderedDict((d, [v]) for d, v in r.iteritems()))
else:
df = xdf.DataFrame(r)
ctx[op.outputs[0].key] = df
|
https://github.com/mars-project/mars/issues/1533
|
AttributeError Traceback (most recent call last)
<ipython-input-3-a85925f048d0> in <module>
1 start=time.time()
2 df_mars=df_mars.to_gpu()
----> 3 print(df_mars.sum().to_frame(name="sum").execute())
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/core.py in execute(self, session, **kw)
580
581 def execute(self, session=None, **kw):
--> 582 self._data.execute(session, **kw)
583 return self
584
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/core.py in execute(self, session, **kw)
368
369 # no more fetch, thus just fire run
--> 370 session.run(self, **kw)
371 # return Tileable or ExecutableTuple itself
372 return self
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/session.py in run(self, *tileables, **kw)
460 tileables = tuple(mt.tensor(t) if not isinstance(t, (Entity, Base)) else t
461 for t in tileables)
--> 462 result = self._sess.run(*tileables, **kw)
463
464 for t in tileables:
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/session.py in run(self, *tileables, **kw)
105 # set number of running cores
106 self.context.set_ncores(kw['n_parallel'])
--> 107 res = self._executor.execute_tileables(tileables, **kw)
108 return res
109
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/utils.py in _wrapped(*args, **kwargs)
406 _kernel_mode.eager = False
407 _kernel_mode.eager_count = enter_eager_count + 1
--> 408 return func(*args, **kwargs)
409 finally:
410 _kernel_mode.eager_count -= 1
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/utils.py in inner(*args, **kwargs)
500 def inner(*args, **kwargs):
501 with build_mode():
--> 502 return func(*args, **kwargs)
503 return inner
504
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in execute_tileables(self, tileables, fetch, n_parallel, n_thread, print_progress, mock, compose, name)
878 n_parallel=n_parallel or n_thread,
879 print_progress=print_progress, mock=mock,
--> 880 chunk_result=chunk_result)
881
882 # update shape of tileable and its chunks whatever it's successful or not
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in execute_graph(self, graph, keys, n_parallel, print_progress, mock, no_intermediate, compose, retval, chunk_result)
688 print_progress=print_progress, mock=mock, mock_max_memory=self._mock_max_memory,
689 fetch_keys=fetch_keys, no_intermediate=no_intermediate)
--> 690 res = graph_execution.execute(retval)
691 self._mock_max_memory = max(self._mock_max_memory, graph_execution._mock_max_memory)
692 if mock:
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in execute(self, retval)
569 # wait until all the futures completed
570 for future in executed_futures:
--> 571 future.result()
572
573 if retval:
/opt/conda/envs/rapids/lib/python3.6/concurrent/futures/_base.py in result(self, timeout)
423 raise CancelledError()
424 elif self._state == FINISHED:
--> 425 return self.__get_result()
426
427 self._condition.wait(timeout)
/opt/conda/envs/rapids/lib/python3.6/concurrent/futures/_base.py in __get_result(self)
382 def __get_result(self):
383 if self._exception:
--> 384 raise self._exception
385 else:
386 return self._result
/opt/conda/envs/rapids/lib/python3.6/concurrent/futures/thread.py in run(self)
54
55 try:
---> 56 result = self.fn(*self.args, **self.kwargs)
57 except BaseException as exc:
58 self.future.set_exception(exc)
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in _execute_operand(self, op)
441 # so we pass the first operand's first output to Executor.handle
442 first_op = ops[0]
--> 443 Executor.handle(first_op, results, self._mock)
444
445 # update maximal memory usage during execution
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in handle(cls, op, results, mock)
639 # The `UFuncTypeError` was introduced by numpy#12593 since v1.17.0.
640 try:
--> 641 return runner(results, op)
642 except UFuncTypeError as e:
643 raise TypeError(str(e)).with_traceback(sys.exc_info()[2]) from None
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/dataframe/reduction/core.py in execute(cls, ctx, op)
405 cls._execute_agg(ctx, op)
406 elif op.stage == OperandStage.map:
--> 407 cls._execute_map(ctx, op)
408 else:
409 in_data = ctx[op.inputs[0].key]
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/dataframe/reduction/core.py in _execute_map(cls, ctx, op)
380 cls._execute_map_with_count(ctx, op)
381 else:
--> 382 cls._execute_without_count(ctx, op)
383
384 @classmethod
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/dataframe/reduction/core.py in _execute_without_count(cls, ctx, op, reduction_func)
370 # cannot just do xdf.DataFrame(r).T
371 # cuz the dtype will be object since pandas 1.0
--> 372 df = xdf.DataFrame(OrderedDict((d, [v]) for d, v in r.iteritems()))
373 else:
374 df = xdf.DataFrame(r)
AttributeError: 'Series' object has no attribute 'iteritems'
|
AttributeError
|
def allocate_top_resources(self, fetch_requests=False):
"""
Allocate resources given the order in AssignerActor
"""
t = time.time()
if self._worker_metrics is None or self._worker_metric_time + 1 < time.time():
# update worker metrics from ResourceActor
self._worker_metrics = self._resource_ref.get_workers_meta()
self._worker_metric_time = t
if not self._worker_metrics:
return
if fetch_requests:
requests = self._assigner_ref.get_allocate_requests()
if not requests:
return
max_allocates = (
sys.maxsize if any(v is None for v in requests) else sum(requests)
)
else:
max_allocates = sys.maxsize
unassigned = []
reject_workers = set()
assigned = 0
# the assigning procedure will continue till all workers rejected
# or max_allocates reached
while len(reject_workers) < len(self._worker_metrics) and assigned < max_allocates:
item = self._assigner_ref.pop_head()
if not item:
break
try:
alloc_ep, rejects = self._allocate_resource(
item.session_id,
item.op_key,
item.op_info,
item.target_worker,
reject_workers=reject_workers,
)
except: # noqa: E722
logger.exception("Unexpected error occurred in %s", self.uid)
if item.callback: # pragma: no branch
self.tell_promise(item.callback, *sys.exc_info(), _accept=False)
else:
self.get_actor_ref(
BaseOperandActor.gen_uid(item.session_id, item.op_key)
).handle_unexpected_failure(*sys.exc_info(), _tell=True, _wait=False)
continue
# collect workers failed to assign operand to
reject_workers.update(rejects)
if alloc_ep:
# assign successfully, we remove the application
self._assigner_ref.remove_apply(item.op_key, _tell=True)
self._session_last_assigns[item.session_id] = time.time()
assigned += 1
else:
# put the unassigned item into unassigned list to add back to the queue later
unassigned.append(item)
if unassigned:
# put unassigned back to the queue, if any
self._assigner_ref.extend(unassigned, _tell=True)
if not fetch_requests:
self._assigner_ref.get_allocate_requests(_tell=True, _wait=False)
|
def allocate_top_resources(self, fetch_requests=False):
"""
Allocate resources given the order in AssignerActor
"""
t = time.time()
if self._worker_metrics is None or self._worker_metric_time + 1 < time.time():
# update worker metrics from ResourceActor
self._worker_metrics = self._resource_ref.get_workers_meta()
self._worker_metric_time = t
if not self._worker_metrics:
return
if fetch_requests:
requests = self._assigner_ref.get_allocate_requests()
if not requests:
return
max_allocates = (
sys.maxsize if any(v is None for v in requests) else sum(requests)
)
else:
max_allocates = sys.maxsize
unassigned = []
reject_workers = set()
assigned = 0
# the assigning procedure will continue till all workers rejected
# or max_allocates reached
while len(reject_workers) < len(self._worker_metrics) and assigned < max_allocates:
item = self._assigner_ref.pop_head()
if not item:
break
try:
alloc_ep, rejects = self._allocate_resource(
item.session_id,
item.op_key,
item.op_info,
item.target_worker,
reject_workers=reject_workers,
)
except: # noqa: E722
logger.exception("Unexpected error occurred in %s", self.uid)
if item.callback: # pragma: no branch
self.tell_promise(item.callback, *sys.exc_info(), _accept=False)
continue
# collect workers failed to assign operand to
reject_workers.update(rejects)
if alloc_ep:
# assign successfully, we remove the application
self._assigner_ref.remove_apply(item.op_key, _tell=True)
self._session_last_assigns[item.session_id] = time.time()
assigned += 1
else:
# put the unassigned item into unassigned list to add back to the queue later
unassigned.append(item)
if unassigned:
# put unassigned back to the queue, if any
self._assigner_ref.extend(unassigned, _tell=True)
if not fetch_requests:
self._assigner_ref.get_allocate_requests(_tell=True, _wait=False)
|
https://github.com/mars-project/mars/issues/1533
|
AttributeError Traceback (most recent call last)
<ipython-input-3-a85925f048d0> in <module>
1 start=time.time()
2 df_mars=df_mars.to_gpu()
----> 3 print(df_mars.sum().to_frame(name="sum").execute())
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/core.py in execute(self, session, **kw)
580
581 def execute(self, session=None, **kw):
--> 582 self._data.execute(session, **kw)
583 return self
584
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/core.py in execute(self, session, **kw)
368
369 # no more fetch, thus just fire run
--> 370 session.run(self, **kw)
371 # return Tileable or ExecutableTuple itself
372 return self
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/session.py in run(self, *tileables, **kw)
460 tileables = tuple(mt.tensor(t) if not isinstance(t, (Entity, Base)) else t
461 for t in tileables)
--> 462 result = self._sess.run(*tileables, **kw)
463
464 for t in tileables:
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/session.py in run(self, *tileables, **kw)
105 # set number of running cores
106 self.context.set_ncores(kw['n_parallel'])
--> 107 res = self._executor.execute_tileables(tileables, **kw)
108 return res
109
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/utils.py in _wrapped(*args, **kwargs)
406 _kernel_mode.eager = False
407 _kernel_mode.eager_count = enter_eager_count + 1
--> 408 return func(*args, **kwargs)
409 finally:
410 _kernel_mode.eager_count -= 1
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/utils.py in inner(*args, **kwargs)
500 def inner(*args, **kwargs):
501 with build_mode():
--> 502 return func(*args, **kwargs)
503 return inner
504
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in execute_tileables(self, tileables, fetch, n_parallel, n_thread, print_progress, mock, compose, name)
878 n_parallel=n_parallel or n_thread,
879 print_progress=print_progress, mock=mock,
--> 880 chunk_result=chunk_result)
881
882 # update shape of tileable and its chunks whatever it's successful or not
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in execute_graph(self, graph, keys, n_parallel, print_progress, mock, no_intermediate, compose, retval, chunk_result)
688 print_progress=print_progress, mock=mock, mock_max_memory=self._mock_max_memory,
689 fetch_keys=fetch_keys, no_intermediate=no_intermediate)
--> 690 res = graph_execution.execute(retval)
691 self._mock_max_memory = max(self._mock_max_memory, graph_execution._mock_max_memory)
692 if mock:
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in execute(self, retval)
569 # wait until all the futures completed
570 for future in executed_futures:
--> 571 future.result()
572
573 if retval:
/opt/conda/envs/rapids/lib/python3.6/concurrent/futures/_base.py in result(self, timeout)
423 raise CancelledError()
424 elif self._state == FINISHED:
--> 425 return self.__get_result()
426
427 self._condition.wait(timeout)
/opt/conda/envs/rapids/lib/python3.6/concurrent/futures/_base.py in __get_result(self)
382 def __get_result(self):
383 if self._exception:
--> 384 raise self._exception
385 else:
386 return self._result
/opt/conda/envs/rapids/lib/python3.6/concurrent/futures/thread.py in run(self)
54
55 try:
---> 56 result = self.fn(*self.args, **self.kwargs)
57 except BaseException as exc:
58 self.future.set_exception(exc)
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in _execute_operand(self, op)
441 # so we pass the first operand's first output to Executor.handle
442 first_op = ops[0]
--> 443 Executor.handle(first_op, results, self._mock)
444
445 # update maximal memory usage during execution
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in handle(cls, op, results, mock)
639 # The `UFuncTypeError` was introduced by numpy#12593 since v1.17.0.
640 try:
--> 641 return runner(results, op)
642 except UFuncTypeError as e:
643 raise TypeError(str(e)).with_traceback(sys.exc_info()[2]) from None
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/dataframe/reduction/core.py in execute(cls, ctx, op)
405 cls._execute_agg(ctx, op)
406 elif op.stage == OperandStage.map:
--> 407 cls._execute_map(ctx, op)
408 else:
409 in_data = ctx[op.inputs[0].key]
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/dataframe/reduction/core.py in _execute_map(cls, ctx, op)
380 cls._execute_map_with_count(ctx, op)
381 else:
--> 382 cls._execute_without_count(ctx, op)
383
384 @classmethod
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/dataframe/reduction/core.py in _execute_without_count(cls, ctx, op, reduction_func)
370 # cannot just do xdf.DataFrame(r).T
371 # cuz the dtype will be object since pandas 1.0
--> 372 df = xdf.DataFrame(OrderedDict((d, [v]) for d, v in r.iteritems()))
373 else:
374 df = xdf.DataFrame(r)
AttributeError: 'Series' object has no attribute 'iteritems'
|
AttributeError
|
def _on_ready(self):
self.worker = None
self._execution_ref = None
def _apply_fail(*exc_info):
if issubclass(exc_info[0], DependencyMissing):
logger.warning(
"DependencyMissing met, operand %s will be back to UNSCHEDULED.",
self._op_key,
)
self.worker = None
self.ref().start_operand(OperandState.UNSCHEDULED, _tell=True)
else:
self.handle_unexpected_failure(*exc_info)
# if under retry, give application a delay
delay = options.scheduler.retry_delay if self.retries else 0
# Send resource application. Submit job when worker assigned
if not self._allocated:
self._assigner_ref.apply_for_resource(
self._session_id, self._op_key, self._info, _delay=delay, _promise=True
).catch(_apply_fail)
|
def _on_ready(self):
self.worker = None
self._execution_ref = None
def _apply_fail(*exc_info):
if issubclass(exc_info[0], DependencyMissing):
logger.warning(
"DependencyMissing met, operand %s will be back to UNSCHEDULED.",
self._op_key,
)
self.worker = None
self.ref().start_operand(OperandState.UNSCHEDULED, _tell=True)
else:
raise exc_info[1].with_traceback(exc_info[2]) from None
# if under retry, give application a delay
delay = options.scheduler.retry_delay if self.retries else 0
# Send resource application. Submit job when worker assigned
if not self._allocated:
self._assigner_ref.apply_for_resource(
self._session_id, self._op_key, self._info, _delay=delay, _promise=True
).catch(_apply_fail)
|
https://github.com/mars-project/mars/issues/1533
|
AttributeError Traceback (most recent call last)
<ipython-input-3-a85925f048d0> in <module>
1 start=time.time()
2 df_mars=df_mars.to_gpu()
----> 3 print(df_mars.sum().to_frame(name="sum").execute())
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/core.py in execute(self, session, **kw)
580
581 def execute(self, session=None, **kw):
--> 582 self._data.execute(session, **kw)
583 return self
584
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/core.py in execute(self, session, **kw)
368
369 # no more fetch, thus just fire run
--> 370 session.run(self, **kw)
371 # return Tileable or ExecutableTuple itself
372 return self
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/session.py in run(self, *tileables, **kw)
460 tileables = tuple(mt.tensor(t) if not isinstance(t, (Entity, Base)) else t
461 for t in tileables)
--> 462 result = self._sess.run(*tileables, **kw)
463
464 for t in tileables:
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/session.py in run(self, *tileables, **kw)
105 # set number of running cores
106 self.context.set_ncores(kw['n_parallel'])
--> 107 res = self._executor.execute_tileables(tileables, **kw)
108 return res
109
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/utils.py in _wrapped(*args, **kwargs)
406 _kernel_mode.eager = False
407 _kernel_mode.eager_count = enter_eager_count + 1
--> 408 return func(*args, **kwargs)
409 finally:
410 _kernel_mode.eager_count -= 1
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/utils.py in inner(*args, **kwargs)
500 def inner(*args, **kwargs):
501 with build_mode():
--> 502 return func(*args, **kwargs)
503 return inner
504
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in execute_tileables(self, tileables, fetch, n_parallel, n_thread, print_progress, mock, compose, name)
878 n_parallel=n_parallel or n_thread,
879 print_progress=print_progress, mock=mock,
--> 880 chunk_result=chunk_result)
881
882 # update shape of tileable and its chunks whatever it's successful or not
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in execute_graph(self, graph, keys, n_parallel, print_progress, mock, no_intermediate, compose, retval, chunk_result)
688 print_progress=print_progress, mock=mock, mock_max_memory=self._mock_max_memory,
689 fetch_keys=fetch_keys, no_intermediate=no_intermediate)
--> 690 res = graph_execution.execute(retval)
691 self._mock_max_memory = max(self._mock_max_memory, graph_execution._mock_max_memory)
692 if mock:
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in execute(self, retval)
569 # wait until all the futures completed
570 for future in executed_futures:
--> 571 future.result()
572
573 if retval:
/opt/conda/envs/rapids/lib/python3.6/concurrent/futures/_base.py in result(self, timeout)
423 raise CancelledError()
424 elif self._state == FINISHED:
--> 425 return self.__get_result()
426
427 self._condition.wait(timeout)
/opt/conda/envs/rapids/lib/python3.6/concurrent/futures/_base.py in __get_result(self)
382 def __get_result(self):
383 if self._exception:
--> 384 raise self._exception
385 else:
386 return self._result
/opt/conda/envs/rapids/lib/python3.6/concurrent/futures/thread.py in run(self)
54
55 try:
---> 56 result = self.fn(*self.args, **self.kwargs)
57 except BaseException as exc:
58 self.future.set_exception(exc)
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in _execute_operand(self, op)
441 # so we pass the first operand's first output to Executor.handle
442 first_op = ops[0]
--> 443 Executor.handle(first_op, results, self._mock)
444
445 # update maximal memory usage during execution
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in handle(cls, op, results, mock)
639 # The `UFuncTypeError` was introduced by numpy#12593 since v1.17.0.
640 try:
--> 641 return runner(results, op)
642 except UFuncTypeError as e:
643 raise TypeError(str(e)).with_traceback(sys.exc_info()[2]) from None
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/dataframe/reduction/core.py in execute(cls, ctx, op)
405 cls._execute_agg(ctx, op)
406 elif op.stage == OperandStage.map:
--> 407 cls._execute_map(ctx, op)
408 else:
409 in_data = ctx[op.inputs[0].key]
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/dataframe/reduction/core.py in _execute_map(cls, ctx, op)
380 cls._execute_map_with_count(ctx, op)
381 else:
--> 382 cls._execute_without_count(ctx, op)
383
384 @classmethod
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/dataframe/reduction/core.py in _execute_without_count(cls, ctx, op, reduction_func)
370 # cannot just do xdf.DataFrame(r).T
371 # cuz the dtype will be object since pandas 1.0
--> 372 df = xdf.DataFrame(OrderedDict((d, [v]) for d, v in r.iteritems()))
373 else:
374 df = xdf.DataFrame(r)
AttributeError: 'Series' object has no attribute 'iteritems'
|
AttributeError
|
def _apply_fail(*exc_info):
if issubclass(exc_info[0], DependencyMissing):
logger.warning(
"DependencyMissing met, operand %s will be back to UNSCHEDULED.",
self._op_key,
)
self.worker = None
self.ref().start_operand(OperandState.UNSCHEDULED, _tell=True)
else:
self.handle_unexpected_failure(*exc_info)
|
def _apply_fail(*exc_info):
if issubclass(exc_info[0], DependencyMissing):
logger.warning(
"DependencyMissing met, operand %s will be back to UNSCHEDULED.",
self._op_key,
)
self.worker = None
self.ref().start_operand(OperandState.UNSCHEDULED, _tell=True)
else:
raise exc_info[1].with_traceback(exc_info[2]) from None
|
https://github.com/mars-project/mars/issues/1533
|
AttributeError Traceback (most recent call last)
<ipython-input-3-a85925f048d0> in <module>
1 start=time.time()
2 df_mars=df_mars.to_gpu()
----> 3 print(df_mars.sum().to_frame(name="sum").execute())
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/core.py in execute(self, session, **kw)
580
581 def execute(self, session=None, **kw):
--> 582 self._data.execute(session, **kw)
583 return self
584
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/core.py in execute(self, session, **kw)
368
369 # no more fetch, thus just fire run
--> 370 session.run(self, **kw)
371 # return Tileable or ExecutableTuple itself
372 return self
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/session.py in run(self, *tileables, **kw)
460 tileables = tuple(mt.tensor(t) if not isinstance(t, (Entity, Base)) else t
461 for t in tileables)
--> 462 result = self._sess.run(*tileables, **kw)
463
464 for t in tileables:
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/session.py in run(self, *tileables, **kw)
105 # set number of running cores
106 self.context.set_ncores(kw['n_parallel'])
--> 107 res = self._executor.execute_tileables(tileables, **kw)
108 return res
109
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/utils.py in _wrapped(*args, **kwargs)
406 _kernel_mode.eager = False
407 _kernel_mode.eager_count = enter_eager_count + 1
--> 408 return func(*args, **kwargs)
409 finally:
410 _kernel_mode.eager_count -= 1
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/utils.py in inner(*args, **kwargs)
500 def inner(*args, **kwargs):
501 with build_mode():
--> 502 return func(*args, **kwargs)
503 return inner
504
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in execute_tileables(self, tileables, fetch, n_parallel, n_thread, print_progress, mock, compose, name)
878 n_parallel=n_parallel or n_thread,
879 print_progress=print_progress, mock=mock,
--> 880 chunk_result=chunk_result)
881
882 # update shape of tileable and its chunks whatever it's successful or not
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in execute_graph(self, graph, keys, n_parallel, print_progress, mock, no_intermediate, compose, retval, chunk_result)
688 print_progress=print_progress, mock=mock, mock_max_memory=self._mock_max_memory,
689 fetch_keys=fetch_keys, no_intermediate=no_intermediate)
--> 690 res = graph_execution.execute(retval)
691 self._mock_max_memory = max(self._mock_max_memory, graph_execution._mock_max_memory)
692 if mock:
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in execute(self, retval)
569 # wait until all the futures completed
570 for future in executed_futures:
--> 571 future.result()
572
573 if retval:
/opt/conda/envs/rapids/lib/python3.6/concurrent/futures/_base.py in result(self, timeout)
423 raise CancelledError()
424 elif self._state == FINISHED:
--> 425 return self.__get_result()
426
427 self._condition.wait(timeout)
/opt/conda/envs/rapids/lib/python3.6/concurrent/futures/_base.py in __get_result(self)
382 def __get_result(self):
383 if self._exception:
--> 384 raise self._exception
385 else:
386 return self._result
/opt/conda/envs/rapids/lib/python3.6/concurrent/futures/thread.py in run(self)
54
55 try:
---> 56 result = self.fn(*self.args, **self.kwargs)
57 except BaseException as exc:
58 self.future.set_exception(exc)
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in _execute_operand(self, op)
441 # so we pass the first operand's first output to Executor.handle
442 first_op = ops[0]
--> 443 Executor.handle(first_op, results, self._mock)
444
445 # update maximal memory usage during execution
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in handle(cls, op, results, mock)
639 # The `UFuncTypeError` was introduced by numpy#12593 since v1.17.0.
640 try:
--> 641 return runner(results, op)
642 except UFuncTypeError as e:
643 raise TypeError(str(e)).with_traceback(sys.exc_info()[2]) from None
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/dataframe/reduction/core.py in execute(cls, ctx, op)
405 cls._execute_agg(ctx, op)
406 elif op.stage == OperandStage.map:
--> 407 cls._execute_map(ctx, op)
408 else:
409 in_data = ctx[op.inputs[0].key]
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/dataframe/reduction/core.py in _execute_map(cls, ctx, op)
380 cls._execute_map_with_count(ctx, op)
381 else:
--> 382 cls._execute_without_count(ctx, op)
383
384 @classmethod
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/dataframe/reduction/core.py in _execute_without_count(cls, ctx, op, reduction_func)
370 # cannot just do xdf.DataFrame(r).T
371 # cuz the dtype will be object since pandas 1.0
--> 372 df = xdf.DataFrame(OrderedDict((d, [v]) for d, v in r.iteritems()))
373 else:
374 df = xdf.DataFrame(r)
AttributeError: 'Series' object has no attribute 'iteritems'
|
AttributeError
|
def _on_running(self):
self._execution_ref = self._get_execution_ref()
# notify successors to propagate priority changes
for out_key in self._succ_keys:
self._get_operand_actor(out_key).add_running_predecessor(
self._op_key, self.worker, _tell=True, _wait=False
)
@log_unhandled
def _acceptor(data_sizes, data_shapes):
self._allocated = False
if not self._is_worker_alive():
return
self._resource_ref.deallocate_resource(
self._session_id, self._op_key, self.worker, _tell=True, _wait=False
)
self._data_sizes = data_sizes
self._data_shapes = data_shapes
self._io_meta["data_targets"] = list(data_sizes)
self.start_operand(OperandState.FINISHED)
@log_unhandled
def _rejecter(*exc):
self._allocated = False
# handling exception occurrence of operand execution
exc_type = exc[0]
self._resource_ref.deallocate_resource(
self._session_id, self._op_key, self.worker, _tell=True, _wait=False
)
if self.state == OperandState.CANCELLING:
logger.warning("Execution of operand %s cancelled.", self._op_key)
self.free_data(OperandState.CANCELLED)
return
if issubclass(exc_type, ExecutionInterrupted):
# job cancelled: switch to cancelled
logger.warning("Execution of operand %s interrupted.", self._op_key)
self.free_data(OperandState.CANCELLED)
elif issubclass(exc_type, DependencyMissing):
logger.warning(
"Operand %s moved to UNSCHEDULED because of DependencyMissing.",
self._op_key,
)
self.ref().start_operand(OperandState.UNSCHEDULED, _tell=True)
else:
self.handle_unexpected_failure(*exc)
try:
with rewrite_worker_errors():
if self._submit_promise is None:
self._submit_promise = self._execution_ref.add_finish_callback(
self._session_id, self._op_key, _promise=True, _spawn=False
)
self._submit_promise.then(_acceptor, _rejecter)
except WorkerDead:
logger.debug(
"Worker %s dead when adding callback for operand %s",
self.worker,
self._op_key,
)
self._resource_ref.detach_dead_workers([self.worker], _tell=True)
finally:
self._submit_promise = None
|
def _on_running(self):
self._execution_ref = self._get_execution_ref()
# notify successors to propagate priority changes
for out_key in self._succ_keys:
self._get_operand_actor(out_key).add_running_predecessor(
self._op_key, self.worker, _tell=True, _wait=False
)
@log_unhandled
def _acceptor(data_sizes, data_shapes):
self._allocated = False
if not self._is_worker_alive():
return
self._resource_ref.deallocate_resource(
self._session_id, self._op_key, self.worker, _tell=True, _wait=False
)
self._data_sizes = data_sizes
self._data_shapes = data_shapes
self._io_meta["data_targets"] = list(data_sizes)
self.start_operand(OperandState.FINISHED)
@log_unhandled
def _rejecter(*exc):
self._allocated = False
# handling exception occurrence of operand execution
exc_type = exc[0]
self._resource_ref.deallocate_resource(
self._session_id, self._op_key, self.worker, _tell=True, _wait=False
)
if self.state == OperandState.CANCELLING:
logger.warning("Execution of operand %s cancelled.", self._op_key)
self.free_data(OperandState.CANCELLED)
return
if issubclass(exc_type, ExecutionInterrupted):
# job cancelled: switch to cancelled
logger.warning("Execution of operand %s interrupted.", self._op_key)
self.free_data(OperandState.CANCELLED)
elif issubclass(exc_type, DependencyMissing):
logger.warning(
"Operand %s moved to UNSCHEDULED because of DependencyMissing.",
self._op_key,
)
self.ref().start_operand(OperandState.UNSCHEDULED, _tell=True)
else:
logger.exception(
"Attempt %d: Unexpected error %s occurred in executing operand %s in %s",
self.retries + 1,
exc_type.__name__,
self._op_key,
self.worker,
exc_info=exc,
)
# increase retry times
self.retries += 1
if (
not self._info["retryable"]
or self.retries >= options.scheduler.retry_num
):
# no further trial
self.state = OperandState.FATAL
self._exc = exc
else:
self.state = OperandState.READY
self.ref().start_operand(_tell=True)
try:
with rewrite_worker_errors():
if self._submit_promise is None:
self._submit_promise = self._execution_ref.add_finish_callback(
self._session_id, self._op_key, _promise=True, _spawn=False
)
self._submit_promise.then(_acceptor, _rejecter)
except WorkerDead:
logger.debug(
"Worker %s dead when adding callback for operand %s",
self.worker,
self._op_key,
)
self._resource_ref.detach_dead_workers([self.worker], _tell=True)
finally:
self._submit_promise = None
|
https://github.com/mars-project/mars/issues/1533
|
AttributeError Traceback (most recent call last)
<ipython-input-3-a85925f048d0> in <module>
1 start=time.time()
2 df_mars=df_mars.to_gpu()
----> 3 print(df_mars.sum().to_frame(name="sum").execute())
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/core.py in execute(self, session, **kw)
580
581 def execute(self, session=None, **kw):
--> 582 self._data.execute(session, **kw)
583 return self
584
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/core.py in execute(self, session, **kw)
368
369 # no more fetch, thus just fire run
--> 370 session.run(self, **kw)
371 # return Tileable or ExecutableTuple itself
372 return self
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/session.py in run(self, *tileables, **kw)
460 tileables = tuple(mt.tensor(t) if not isinstance(t, (Entity, Base)) else t
461 for t in tileables)
--> 462 result = self._sess.run(*tileables, **kw)
463
464 for t in tileables:
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/session.py in run(self, *tileables, **kw)
105 # set number of running cores
106 self.context.set_ncores(kw['n_parallel'])
--> 107 res = self._executor.execute_tileables(tileables, **kw)
108 return res
109
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/utils.py in _wrapped(*args, **kwargs)
406 _kernel_mode.eager = False
407 _kernel_mode.eager_count = enter_eager_count + 1
--> 408 return func(*args, **kwargs)
409 finally:
410 _kernel_mode.eager_count -= 1
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/utils.py in inner(*args, **kwargs)
500 def inner(*args, **kwargs):
501 with build_mode():
--> 502 return func(*args, **kwargs)
503 return inner
504
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in execute_tileables(self, tileables, fetch, n_parallel, n_thread, print_progress, mock, compose, name)
878 n_parallel=n_parallel or n_thread,
879 print_progress=print_progress, mock=mock,
--> 880 chunk_result=chunk_result)
881
882 # update shape of tileable and its chunks whatever it's successful or not
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in execute_graph(self, graph, keys, n_parallel, print_progress, mock, no_intermediate, compose, retval, chunk_result)
688 print_progress=print_progress, mock=mock, mock_max_memory=self._mock_max_memory,
689 fetch_keys=fetch_keys, no_intermediate=no_intermediate)
--> 690 res = graph_execution.execute(retval)
691 self._mock_max_memory = max(self._mock_max_memory, graph_execution._mock_max_memory)
692 if mock:
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in execute(self, retval)
569 # wait until all the futures completed
570 for future in executed_futures:
--> 571 future.result()
572
573 if retval:
/opt/conda/envs/rapids/lib/python3.6/concurrent/futures/_base.py in result(self, timeout)
423 raise CancelledError()
424 elif self._state == FINISHED:
--> 425 return self.__get_result()
426
427 self._condition.wait(timeout)
/opt/conda/envs/rapids/lib/python3.6/concurrent/futures/_base.py in __get_result(self)
382 def __get_result(self):
383 if self._exception:
--> 384 raise self._exception
385 else:
386 return self._result
/opt/conda/envs/rapids/lib/python3.6/concurrent/futures/thread.py in run(self)
54
55 try:
---> 56 result = self.fn(*self.args, **self.kwargs)
57 except BaseException as exc:
58 self.future.set_exception(exc)
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in _execute_operand(self, op)
441 # so we pass the first operand's first output to Executor.handle
442 first_op = ops[0]
--> 443 Executor.handle(first_op, results, self._mock)
444
445 # update maximal memory usage during execution
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in handle(cls, op, results, mock)
639 # The `UFuncTypeError` was introduced by numpy#12593 since v1.17.0.
640 try:
--> 641 return runner(results, op)
642 except UFuncTypeError as e:
643 raise TypeError(str(e)).with_traceback(sys.exc_info()[2]) from None
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/dataframe/reduction/core.py in execute(cls, ctx, op)
405 cls._execute_agg(ctx, op)
406 elif op.stage == OperandStage.map:
--> 407 cls._execute_map(ctx, op)
408 else:
409 in_data = ctx[op.inputs[0].key]
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/dataframe/reduction/core.py in _execute_map(cls, ctx, op)
380 cls._execute_map_with_count(ctx, op)
381 else:
--> 382 cls._execute_without_count(ctx, op)
383
384 @classmethod
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/dataframe/reduction/core.py in _execute_without_count(cls, ctx, op, reduction_func)
370 # cannot just do xdf.DataFrame(r).T
371 # cuz the dtype will be object since pandas 1.0
--> 372 df = xdf.DataFrame(OrderedDict((d, [v]) for d, v in r.iteritems()))
373 else:
374 df = xdf.DataFrame(r)
AttributeError: 'Series' object has no attribute 'iteritems'
|
AttributeError
|
def _rejecter(*exc):
self._allocated = False
# handling exception occurrence of operand execution
exc_type = exc[0]
self._resource_ref.deallocate_resource(
self._session_id, self._op_key, self.worker, _tell=True, _wait=False
)
if self.state == OperandState.CANCELLING:
logger.warning("Execution of operand %s cancelled.", self._op_key)
self.free_data(OperandState.CANCELLED)
return
if issubclass(exc_type, ExecutionInterrupted):
# job cancelled: switch to cancelled
logger.warning("Execution of operand %s interrupted.", self._op_key)
self.free_data(OperandState.CANCELLED)
elif issubclass(exc_type, DependencyMissing):
logger.warning(
"Operand %s moved to UNSCHEDULED because of DependencyMissing.",
self._op_key,
)
self.ref().start_operand(OperandState.UNSCHEDULED, _tell=True)
else:
self.handle_unexpected_failure(*exc)
|
def _rejecter(*exc):
self._allocated = False
# handling exception occurrence of operand execution
exc_type = exc[0]
self._resource_ref.deallocate_resource(
self._session_id, self._op_key, self.worker, _tell=True, _wait=False
)
if self.state == OperandState.CANCELLING:
logger.warning("Execution of operand %s cancelled.", self._op_key)
self.free_data(OperandState.CANCELLED)
return
if issubclass(exc_type, ExecutionInterrupted):
# job cancelled: switch to cancelled
logger.warning("Execution of operand %s interrupted.", self._op_key)
self.free_data(OperandState.CANCELLED)
elif issubclass(exc_type, DependencyMissing):
logger.warning(
"Operand %s moved to UNSCHEDULED because of DependencyMissing.",
self._op_key,
)
self.ref().start_operand(OperandState.UNSCHEDULED, _tell=True)
else:
logger.exception(
"Attempt %d: Unexpected error %s occurred in executing operand %s in %s",
self.retries + 1,
exc_type.__name__,
self._op_key,
self.worker,
exc_info=exc,
)
# increase retry times
self.retries += 1
if not self._info["retryable"] or self.retries >= options.scheduler.retry_num:
# no further trial
self.state = OperandState.FATAL
self._exc = exc
else:
self.state = OperandState.READY
self.ref().start_operand(_tell=True)
|
https://github.com/mars-project/mars/issues/1533
|
AttributeError Traceback (most recent call last)
<ipython-input-3-a85925f048d0> in <module>
1 start=time.time()
2 df_mars=df_mars.to_gpu()
----> 3 print(df_mars.sum().to_frame(name="sum").execute())
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/core.py in execute(self, session, **kw)
580
581 def execute(self, session=None, **kw):
--> 582 self._data.execute(session, **kw)
583 return self
584
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/core.py in execute(self, session, **kw)
368
369 # no more fetch, thus just fire run
--> 370 session.run(self, **kw)
371 # return Tileable or ExecutableTuple itself
372 return self
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/session.py in run(self, *tileables, **kw)
460 tileables = tuple(mt.tensor(t) if not isinstance(t, (Entity, Base)) else t
461 for t in tileables)
--> 462 result = self._sess.run(*tileables, **kw)
463
464 for t in tileables:
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/session.py in run(self, *tileables, **kw)
105 # set number of running cores
106 self.context.set_ncores(kw['n_parallel'])
--> 107 res = self._executor.execute_tileables(tileables, **kw)
108 return res
109
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/utils.py in _wrapped(*args, **kwargs)
406 _kernel_mode.eager = False
407 _kernel_mode.eager_count = enter_eager_count + 1
--> 408 return func(*args, **kwargs)
409 finally:
410 _kernel_mode.eager_count -= 1
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/utils.py in inner(*args, **kwargs)
500 def inner(*args, **kwargs):
501 with build_mode():
--> 502 return func(*args, **kwargs)
503 return inner
504
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in execute_tileables(self, tileables, fetch, n_parallel, n_thread, print_progress, mock, compose, name)
878 n_parallel=n_parallel or n_thread,
879 print_progress=print_progress, mock=mock,
--> 880 chunk_result=chunk_result)
881
882 # update shape of tileable and its chunks whatever it's successful or not
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in execute_graph(self, graph, keys, n_parallel, print_progress, mock, no_intermediate, compose, retval, chunk_result)
688 print_progress=print_progress, mock=mock, mock_max_memory=self._mock_max_memory,
689 fetch_keys=fetch_keys, no_intermediate=no_intermediate)
--> 690 res = graph_execution.execute(retval)
691 self._mock_max_memory = max(self._mock_max_memory, graph_execution._mock_max_memory)
692 if mock:
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in execute(self, retval)
569 # wait until all the futures completed
570 for future in executed_futures:
--> 571 future.result()
572
573 if retval:
/opt/conda/envs/rapids/lib/python3.6/concurrent/futures/_base.py in result(self, timeout)
423 raise CancelledError()
424 elif self._state == FINISHED:
--> 425 return self.__get_result()
426
427 self._condition.wait(timeout)
/opt/conda/envs/rapids/lib/python3.6/concurrent/futures/_base.py in __get_result(self)
382 def __get_result(self):
383 if self._exception:
--> 384 raise self._exception
385 else:
386 return self._result
/opt/conda/envs/rapids/lib/python3.6/concurrent/futures/thread.py in run(self)
54
55 try:
---> 56 result = self.fn(*self.args, **self.kwargs)
57 except BaseException as exc:
58 self.future.set_exception(exc)
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in _execute_operand(self, op)
441 # so we pass the first operand's first output to Executor.handle
442 first_op = ops[0]
--> 443 Executor.handle(first_op, results, self._mock)
444
445 # update maximal memory usage during execution
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in handle(cls, op, results, mock)
639 # The `UFuncTypeError` was introduced by numpy#12593 since v1.17.0.
640 try:
--> 641 return runner(results, op)
642 except UFuncTypeError as e:
643 raise TypeError(str(e)).with_traceback(sys.exc_info()[2]) from None
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/dataframe/reduction/core.py in execute(cls, ctx, op)
405 cls._execute_agg(ctx, op)
406 elif op.stage == OperandStage.map:
--> 407 cls._execute_map(ctx, op)
408 else:
409 in_data = ctx[op.inputs[0].key]
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/dataframe/reduction/core.py in _execute_map(cls, ctx, op)
380 cls._execute_map_with_count(ctx, op)
381 else:
--> 382 cls._execute_without_count(ctx, op)
383
384 @classmethod
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/dataframe/reduction/core.py in _execute_without_count(cls, ctx, op, reduction_func)
370 # cannot just do xdf.DataFrame(r).T
371 # cuz the dtype will be object since pandas 1.0
--> 372 df = xdf.DataFrame(OrderedDict((d, [v]) for d, v in r.iteritems()))
373 else:
374 df = xdf.DataFrame(r)
AttributeError: 'Series' object has no attribute 'iteritems'
|
AttributeError
|
def run(self, *tileables, **kw):
with self.context:
if self._executor is None:
raise RuntimeError("Session has closed")
dest_gpu = all(tileable.op.gpu for tileable in tileables)
if dest_gpu:
self._executor._engine = "cupy"
else:
self._executor._engine = None
if "n_parallel" not in kw:
if dest_gpu:
# GPU
cnt = cuda_count() or 0
if cnt == 0:
raise RuntimeError(
"No GPU found for execution. "
"Make sure NVML library is in your library path."
)
kw["n_parallel"] = cnt
else:
# CPU
kw["n_parallel"] = cpu_count()
# set number of running cores
self.context.set_ncores(kw["n_parallel"])
res = self._executor.execute_tileables(tileables, **kw)
return res
|
def run(self, *tileables, **kw):
with self.context:
if self._executor is None:
raise RuntimeError("Session has closed")
dest_gpu = all(tileable.op.gpu for tileable in tileables)
if dest_gpu:
self._executor._engine = "cupy"
else:
self._executor._engine = None
if "n_parallel" not in kw:
if dest_gpu:
# GPU
cnt = cuda_count() if cuda_count is not None else 0
if cnt == 0:
raise RuntimeError(
"No GPU found for execution. "
"Make sure NVML library is in your library path."
)
kw["n_parallel"] = cnt
else:
# CPU
kw["n_parallel"] = cpu_count()
# set number of running cores
self.context.set_ncores(kw["n_parallel"])
res = self._executor.execute_tileables(tileables, **kw)
return res
|
https://github.com/mars-project/mars/issues/1533
|
AttributeError Traceback (most recent call last)
<ipython-input-3-a85925f048d0> in <module>
1 start=time.time()
2 df_mars=df_mars.to_gpu()
----> 3 print(df_mars.sum().to_frame(name="sum").execute())
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/core.py in execute(self, session, **kw)
580
581 def execute(self, session=None, **kw):
--> 582 self._data.execute(session, **kw)
583 return self
584
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/core.py in execute(self, session, **kw)
368
369 # no more fetch, thus just fire run
--> 370 session.run(self, **kw)
371 # return Tileable or ExecutableTuple itself
372 return self
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/session.py in run(self, *tileables, **kw)
460 tileables = tuple(mt.tensor(t) if not isinstance(t, (Entity, Base)) else t
461 for t in tileables)
--> 462 result = self._sess.run(*tileables, **kw)
463
464 for t in tileables:
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/session.py in run(self, *tileables, **kw)
105 # set number of running cores
106 self.context.set_ncores(kw['n_parallel'])
--> 107 res = self._executor.execute_tileables(tileables, **kw)
108 return res
109
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/utils.py in _wrapped(*args, **kwargs)
406 _kernel_mode.eager = False
407 _kernel_mode.eager_count = enter_eager_count + 1
--> 408 return func(*args, **kwargs)
409 finally:
410 _kernel_mode.eager_count -= 1
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/utils.py in inner(*args, **kwargs)
500 def inner(*args, **kwargs):
501 with build_mode():
--> 502 return func(*args, **kwargs)
503 return inner
504
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in execute_tileables(self, tileables, fetch, n_parallel, n_thread, print_progress, mock, compose, name)
878 n_parallel=n_parallel or n_thread,
879 print_progress=print_progress, mock=mock,
--> 880 chunk_result=chunk_result)
881
882 # update shape of tileable and its chunks whatever it's successful or not
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in execute_graph(self, graph, keys, n_parallel, print_progress, mock, no_intermediate, compose, retval, chunk_result)
688 print_progress=print_progress, mock=mock, mock_max_memory=self._mock_max_memory,
689 fetch_keys=fetch_keys, no_intermediate=no_intermediate)
--> 690 res = graph_execution.execute(retval)
691 self._mock_max_memory = max(self._mock_max_memory, graph_execution._mock_max_memory)
692 if mock:
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in execute(self, retval)
569 # wait until all the futures completed
570 for future in executed_futures:
--> 571 future.result()
572
573 if retval:
/opt/conda/envs/rapids/lib/python3.6/concurrent/futures/_base.py in result(self, timeout)
423 raise CancelledError()
424 elif self._state == FINISHED:
--> 425 return self.__get_result()
426
427 self._condition.wait(timeout)
/opt/conda/envs/rapids/lib/python3.6/concurrent/futures/_base.py in __get_result(self)
382 def __get_result(self):
383 if self._exception:
--> 384 raise self._exception
385 else:
386 return self._result
/opt/conda/envs/rapids/lib/python3.6/concurrent/futures/thread.py in run(self)
54
55 try:
---> 56 result = self.fn(*self.args, **self.kwargs)
57 except BaseException as exc:
58 self.future.set_exception(exc)
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in _execute_operand(self, op)
441 # so we pass the first operand's first output to Executor.handle
442 first_op = ops[0]
--> 443 Executor.handle(first_op, results, self._mock)
444
445 # update maximal memory usage during execution
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in handle(cls, op, results, mock)
639 # The `UFuncTypeError` was introduced by numpy#12593 since v1.17.0.
640 try:
--> 641 return runner(results, op)
642 except UFuncTypeError as e:
643 raise TypeError(str(e)).with_traceback(sys.exc_info()[2]) from None
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/dataframe/reduction/core.py in execute(cls, ctx, op)
405 cls._execute_agg(ctx, op)
406 elif op.stage == OperandStage.map:
--> 407 cls._execute_map(ctx, op)
408 else:
409 in_data = ctx[op.inputs[0].key]
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/dataframe/reduction/core.py in _execute_map(cls, ctx, op)
380 cls._execute_map_with_count(ctx, op)
381 else:
--> 382 cls._execute_without_count(ctx, op)
383
384 @classmethod
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/dataframe/reduction/core.py in _execute_without_count(cls, ctx, op, reduction_func)
370 # cannot just do xdf.DataFrame(r).T
371 # cuz the dtype will be object since pandas 1.0
--> 372 df = xdf.DataFrame(OrderedDict((d, [v]) for d, v in r.iteritems()))
373 else:
374 df = xdf.DataFrame(r)
AttributeError: 'Series' object has no attribute 'iteritems'
|
AttributeError
|
def lazy_import(name, package=None, globals=None, locals=None, rename=None):
rename = rename or name
prefix_name = name.split(".", 1)[0]
class LazyModule(object):
def __getattr__(self, item):
if item.startswith("_pytest") or item in ("__bases__", "__test__"):
raise AttributeError(item)
real_mod = importlib.import_module(name, package=package)
if globals is not None and rename in globals:
globals[rename] = real_mod
elif locals is not None:
locals[rename] = real_mod
return getattr(real_mod, item)
if pkgutil.find_loader(prefix_name) is not None:
return LazyModule()
else:
return None
|
def lazy_import(name, package=None, globals=None, locals=None, rename=None):
rename = rename or name
prefix_name = name.split(".", 1)[0]
class LazyModule(object):
def __getattr__(self, item):
real_mod = importlib.import_module(name, package=package)
if globals is not None and rename in globals:
globals[rename] = real_mod
elif locals is not None:
locals[rename] = real_mod
return getattr(real_mod, item)
if pkgutil.find_loader(prefix_name) is not None:
return LazyModule()
else:
return None
|
https://github.com/mars-project/mars/issues/1533
|
AttributeError Traceback (most recent call last)
<ipython-input-3-a85925f048d0> in <module>
1 start=time.time()
2 df_mars=df_mars.to_gpu()
----> 3 print(df_mars.sum().to_frame(name="sum").execute())
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/core.py in execute(self, session, **kw)
580
581 def execute(self, session=None, **kw):
--> 582 self._data.execute(session, **kw)
583 return self
584
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/core.py in execute(self, session, **kw)
368
369 # no more fetch, thus just fire run
--> 370 session.run(self, **kw)
371 # return Tileable or ExecutableTuple itself
372 return self
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/session.py in run(self, *tileables, **kw)
460 tileables = tuple(mt.tensor(t) if not isinstance(t, (Entity, Base)) else t
461 for t in tileables)
--> 462 result = self._sess.run(*tileables, **kw)
463
464 for t in tileables:
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/session.py in run(self, *tileables, **kw)
105 # set number of running cores
106 self.context.set_ncores(kw['n_parallel'])
--> 107 res = self._executor.execute_tileables(tileables, **kw)
108 return res
109
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/utils.py in _wrapped(*args, **kwargs)
406 _kernel_mode.eager = False
407 _kernel_mode.eager_count = enter_eager_count + 1
--> 408 return func(*args, **kwargs)
409 finally:
410 _kernel_mode.eager_count -= 1
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/utils.py in inner(*args, **kwargs)
500 def inner(*args, **kwargs):
501 with build_mode():
--> 502 return func(*args, **kwargs)
503 return inner
504
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in execute_tileables(self, tileables, fetch, n_parallel, n_thread, print_progress, mock, compose, name)
878 n_parallel=n_parallel or n_thread,
879 print_progress=print_progress, mock=mock,
--> 880 chunk_result=chunk_result)
881
882 # update shape of tileable and its chunks whatever it's successful or not
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in execute_graph(self, graph, keys, n_parallel, print_progress, mock, no_intermediate, compose, retval, chunk_result)
688 print_progress=print_progress, mock=mock, mock_max_memory=self._mock_max_memory,
689 fetch_keys=fetch_keys, no_intermediate=no_intermediate)
--> 690 res = graph_execution.execute(retval)
691 self._mock_max_memory = max(self._mock_max_memory, graph_execution._mock_max_memory)
692 if mock:
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in execute(self, retval)
569 # wait until all the futures completed
570 for future in executed_futures:
--> 571 future.result()
572
573 if retval:
/opt/conda/envs/rapids/lib/python3.6/concurrent/futures/_base.py in result(self, timeout)
423 raise CancelledError()
424 elif self._state == FINISHED:
--> 425 return self.__get_result()
426
427 self._condition.wait(timeout)
/opt/conda/envs/rapids/lib/python3.6/concurrent/futures/_base.py in __get_result(self)
382 def __get_result(self):
383 if self._exception:
--> 384 raise self._exception
385 else:
386 return self._result
/opt/conda/envs/rapids/lib/python3.6/concurrent/futures/thread.py in run(self)
54
55 try:
---> 56 result = self.fn(*self.args, **self.kwargs)
57 except BaseException as exc:
58 self.future.set_exception(exc)
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in _execute_operand(self, op)
441 # so we pass the first operand's first output to Executor.handle
442 first_op = ops[0]
--> 443 Executor.handle(first_op, results, self._mock)
444
445 # update maximal memory usage during execution
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in handle(cls, op, results, mock)
639 # The `UFuncTypeError` was introduced by numpy#12593 since v1.17.0.
640 try:
--> 641 return runner(results, op)
642 except UFuncTypeError as e:
643 raise TypeError(str(e)).with_traceback(sys.exc_info()[2]) from None
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/dataframe/reduction/core.py in execute(cls, ctx, op)
405 cls._execute_agg(ctx, op)
406 elif op.stage == OperandStage.map:
--> 407 cls._execute_map(ctx, op)
408 else:
409 in_data = ctx[op.inputs[0].key]
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/dataframe/reduction/core.py in _execute_map(cls, ctx, op)
380 cls._execute_map_with_count(ctx, op)
381 else:
--> 382 cls._execute_without_count(ctx, op)
383
384 @classmethod
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/dataframe/reduction/core.py in _execute_without_count(cls, ctx, op, reduction_func)
370 # cannot just do xdf.DataFrame(r).T
371 # cuz the dtype will be object since pandas 1.0
--> 372 df = xdf.DataFrame(OrderedDict((d, [v]) for d, v in r.iteritems()))
373 else:
374 df = xdf.DataFrame(r)
AttributeError: 'Series' object has no attribute 'iteritems'
|
AttributeError
|
def __getattr__(self, item):
if item.startswith("_pytest") or item in ("__bases__", "__test__"):
raise AttributeError(item)
real_mod = importlib.import_module(name, package=package)
if globals is not None and rename in globals:
globals[rename] = real_mod
elif locals is not None:
locals[rename] = real_mod
return getattr(real_mod, item)
|
def __getattr__(self, item):
real_mod = importlib.import_module(name, package=package)
if globals is not None and rename in globals:
globals[rename] = real_mod
elif locals is not None:
locals[rename] = real_mod
return getattr(real_mod, item)
|
https://github.com/mars-project/mars/issues/1533
|
AttributeError Traceback (most recent call last)
<ipython-input-3-a85925f048d0> in <module>
1 start=time.time()
2 df_mars=df_mars.to_gpu()
----> 3 print(df_mars.sum().to_frame(name="sum").execute())
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/core.py in execute(self, session, **kw)
580
581 def execute(self, session=None, **kw):
--> 582 self._data.execute(session, **kw)
583 return self
584
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/core.py in execute(self, session, **kw)
368
369 # no more fetch, thus just fire run
--> 370 session.run(self, **kw)
371 # return Tileable or ExecutableTuple itself
372 return self
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/session.py in run(self, *tileables, **kw)
460 tileables = tuple(mt.tensor(t) if not isinstance(t, (Entity, Base)) else t
461 for t in tileables)
--> 462 result = self._sess.run(*tileables, **kw)
463
464 for t in tileables:
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/session.py in run(self, *tileables, **kw)
105 # set number of running cores
106 self.context.set_ncores(kw['n_parallel'])
--> 107 res = self._executor.execute_tileables(tileables, **kw)
108 return res
109
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/utils.py in _wrapped(*args, **kwargs)
406 _kernel_mode.eager = False
407 _kernel_mode.eager_count = enter_eager_count + 1
--> 408 return func(*args, **kwargs)
409 finally:
410 _kernel_mode.eager_count -= 1
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/utils.py in inner(*args, **kwargs)
500 def inner(*args, **kwargs):
501 with build_mode():
--> 502 return func(*args, **kwargs)
503 return inner
504
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in execute_tileables(self, tileables, fetch, n_parallel, n_thread, print_progress, mock, compose, name)
878 n_parallel=n_parallel or n_thread,
879 print_progress=print_progress, mock=mock,
--> 880 chunk_result=chunk_result)
881
882 # update shape of tileable and its chunks whatever it's successful or not
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in execute_graph(self, graph, keys, n_parallel, print_progress, mock, no_intermediate, compose, retval, chunk_result)
688 print_progress=print_progress, mock=mock, mock_max_memory=self._mock_max_memory,
689 fetch_keys=fetch_keys, no_intermediate=no_intermediate)
--> 690 res = graph_execution.execute(retval)
691 self._mock_max_memory = max(self._mock_max_memory, graph_execution._mock_max_memory)
692 if mock:
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in execute(self, retval)
569 # wait until all the futures completed
570 for future in executed_futures:
--> 571 future.result()
572
573 if retval:
/opt/conda/envs/rapids/lib/python3.6/concurrent/futures/_base.py in result(self, timeout)
423 raise CancelledError()
424 elif self._state == FINISHED:
--> 425 return self.__get_result()
426
427 self._condition.wait(timeout)
/opt/conda/envs/rapids/lib/python3.6/concurrent/futures/_base.py in __get_result(self)
382 def __get_result(self):
383 if self._exception:
--> 384 raise self._exception
385 else:
386 return self._result
/opt/conda/envs/rapids/lib/python3.6/concurrent/futures/thread.py in run(self)
54
55 try:
---> 56 result = self.fn(*self.args, **self.kwargs)
57 except BaseException as exc:
58 self.future.set_exception(exc)
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in _execute_operand(self, op)
441 # so we pass the first operand's first output to Executor.handle
442 first_op = ops[0]
--> 443 Executor.handle(first_op, results, self._mock)
444
445 # update maximal memory usage during execution
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in handle(cls, op, results, mock)
639 # The `UFuncTypeError` was introduced by numpy#12593 since v1.17.0.
640 try:
--> 641 return runner(results, op)
642 except UFuncTypeError as e:
643 raise TypeError(str(e)).with_traceback(sys.exc_info()[2]) from None
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/dataframe/reduction/core.py in execute(cls, ctx, op)
405 cls._execute_agg(ctx, op)
406 elif op.stage == OperandStage.map:
--> 407 cls._execute_map(ctx, op)
408 else:
409 in_data = ctx[op.inputs[0].key]
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/dataframe/reduction/core.py in _execute_map(cls, ctx, op)
380 cls._execute_map_with_count(ctx, op)
381 else:
--> 382 cls._execute_without_count(ctx, op)
383
384 @classmethod
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/dataframe/reduction/core.py in _execute_without_count(cls, ctx, op, reduction_func)
370 # cannot just do xdf.DataFrame(r).T
371 # cuz the dtype will be object since pandas 1.0
--> 372 df = xdf.DataFrame(OrderedDict((d, [v]) for d, v in r.iteritems()))
373 else:
374 df = xdf.DataFrame(r)
AttributeError: 'Series' object has no attribute 'iteritems'
|
AttributeError
|
def _main(self):
if pyarrow is None:
self._serial_type = dataserializer.SerialType.PICKLE
else:
self._serial_type = dataserializer.SerialType(
options.client.serial_type.lower()
)
args = self._args.copy()
args["pyver"] = ".".join(str(v) for v in sys.version_info[:3])
args["pickle_protocol"] = self._pickle_protocol
if pyarrow is not None:
args["arrow_version"] = pyarrow.__version__
if self._session_id is None:
resp = self._req_session.post(self._endpoint + "/api/session", data=args)
if resp.status_code >= 400:
raise SystemError("Failed to create mars session: " + resp.reason)
else:
resp = self._req_session.get(
self._endpoint + "/api/session/" + self._session_id, params=args
)
if resp.status_code == 404:
raise ValueError(f"The session with id = {self._session_id} doesn't exist")
if resp.status_code >= 400:
raise SystemError("Failed to check mars session.")
content = json.loads(resp.text)
self._session_id = content["session_id"]
self._pickle_protocol = content.get("pickle_protocol", pickle.HIGHEST_PROTOCOL)
# as pyarrow will use pickle.HIGHEST_PROTOCOL to pickle, we need to use
# SerialType.PICKLE when pickle protocol between client and server
# does not agree with each other
if (
not content.get("arrow_compatible")
or self._pickle_protocol != pickle.HIGHEST_PROTOCOL
):
self._serial_type = dataserializer.SerialType.PICKLE
|
def _main(self):
try:
import pyarrow
self._serial_type = dataserializer.SerialType(
options.client.serial_type.lower()
)
except ImportError:
pyarrow = None
self._serial_type = dataserializer.SerialType.PICKLE
args = self._args.copy()
args["pyver"] = ".".join(str(v) for v in sys.version_info[:3])
args["pickle_protocol"] = self._pickle_protocol
if pyarrow is not None:
args["arrow_version"] = pyarrow.__version__
if self._session_id is None:
resp = self._req_session.post(self._endpoint + "/api/session", data=args)
if resp.status_code >= 400:
raise SystemError("Failed to create mars session: " + resp.reason)
else:
resp = self._req_session.get(
self._endpoint + "/api/session/" + self._session_id, params=args
)
if resp.status_code == 404:
raise ValueError(f"The session with id = {self._session_id} doesn't exist")
if resp.status_code >= 400:
raise SystemError("Failed to check mars session.")
content = json.loads(resp.text)
self._session_id = content["session_id"]
self._pickle_protocol = content.get("pickle_protocol", pickle.HIGHEST_PROTOCOL)
# as pyarrow will use pickle.HIGHEST_PROTOCOL to pickle, we need to use
# SerialType.PICKLE when pickle protocol between client and server
# does not agree with each other
if (
not content.get("arrow_compatible")
or self._pickle_protocol != pickle.HIGHEST_PROTOCOL
):
self._serial_type = dataserializer.SerialType.PICKLE
|
https://github.com/mars-project/mars/issues/1533
|
AttributeError Traceback (most recent call last)
<ipython-input-3-a85925f048d0> in <module>
1 start=time.time()
2 df_mars=df_mars.to_gpu()
----> 3 print(df_mars.sum().to_frame(name="sum").execute())
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/core.py in execute(self, session, **kw)
580
581 def execute(self, session=None, **kw):
--> 582 self._data.execute(session, **kw)
583 return self
584
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/core.py in execute(self, session, **kw)
368
369 # no more fetch, thus just fire run
--> 370 session.run(self, **kw)
371 # return Tileable or ExecutableTuple itself
372 return self
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/session.py in run(self, *tileables, **kw)
460 tileables = tuple(mt.tensor(t) if not isinstance(t, (Entity, Base)) else t
461 for t in tileables)
--> 462 result = self._sess.run(*tileables, **kw)
463
464 for t in tileables:
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/session.py in run(self, *tileables, **kw)
105 # set number of running cores
106 self.context.set_ncores(kw['n_parallel'])
--> 107 res = self._executor.execute_tileables(tileables, **kw)
108 return res
109
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/utils.py in _wrapped(*args, **kwargs)
406 _kernel_mode.eager = False
407 _kernel_mode.eager_count = enter_eager_count + 1
--> 408 return func(*args, **kwargs)
409 finally:
410 _kernel_mode.eager_count -= 1
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/utils.py in inner(*args, **kwargs)
500 def inner(*args, **kwargs):
501 with build_mode():
--> 502 return func(*args, **kwargs)
503 return inner
504
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in execute_tileables(self, tileables, fetch, n_parallel, n_thread, print_progress, mock, compose, name)
878 n_parallel=n_parallel or n_thread,
879 print_progress=print_progress, mock=mock,
--> 880 chunk_result=chunk_result)
881
882 # update shape of tileable and its chunks whatever it's successful or not
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in execute_graph(self, graph, keys, n_parallel, print_progress, mock, no_intermediate, compose, retval, chunk_result)
688 print_progress=print_progress, mock=mock, mock_max_memory=self._mock_max_memory,
689 fetch_keys=fetch_keys, no_intermediate=no_intermediate)
--> 690 res = graph_execution.execute(retval)
691 self._mock_max_memory = max(self._mock_max_memory, graph_execution._mock_max_memory)
692 if mock:
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in execute(self, retval)
569 # wait until all the futures completed
570 for future in executed_futures:
--> 571 future.result()
572
573 if retval:
/opt/conda/envs/rapids/lib/python3.6/concurrent/futures/_base.py in result(self, timeout)
423 raise CancelledError()
424 elif self._state == FINISHED:
--> 425 return self.__get_result()
426
427 self._condition.wait(timeout)
/opt/conda/envs/rapids/lib/python3.6/concurrent/futures/_base.py in __get_result(self)
382 def __get_result(self):
383 if self._exception:
--> 384 raise self._exception
385 else:
386 return self._result
/opt/conda/envs/rapids/lib/python3.6/concurrent/futures/thread.py in run(self)
54
55 try:
---> 56 result = self.fn(*self.args, **self.kwargs)
57 except BaseException as exc:
58 self.future.set_exception(exc)
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in _execute_operand(self, op)
441 # so we pass the first operand's first output to Executor.handle
442 first_op = ops[0]
--> 443 Executor.handle(first_op, results, self._mock)
444
445 # update maximal memory usage during execution
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in handle(cls, op, results, mock)
639 # The `UFuncTypeError` was introduced by numpy#12593 since v1.17.0.
640 try:
--> 641 return runner(results, op)
642 except UFuncTypeError as e:
643 raise TypeError(str(e)).with_traceback(sys.exc_info()[2]) from None
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/dataframe/reduction/core.py in execute(cls, ctx, op)
405 cls._execute_agg(ctx, op)
406 elif op.stage == OperandStage.map:
--> 407 cls._execute_map(ctx, op)
408 else:
409 in_data = ctx[op.inputs[0].key]
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/dataframe/reduction/core.py in _execute_map(cls, ctx, op)
380 cls._execute_map_with_count(ctx, op)
381 else:
--> 382 cls._execute_without_count(ctx, op)
383
384 @classmethod
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/dataframe/reduction/core.py in _execute_without_count(cls, ctx, op, reduction_func)
370 # cannot just do xdf.DataFrame(r).T
371 # cuz the dtype will be object since pandas 1.0
--> 372 df = xdf.DataFrame(OrderedDict((d, [v]) for d, v in r.iteritems()))
373 else:
374 df = xdf.DataFrame(r)
AttributeError: 'Series' object has no attribute 'iteritems'
|
AttributeError
|
def _calc_results(self, session_id, graph_key, graph, context_dict, chunk_targets):
_, op_name = concat_operand_keys(graph, "_")
logger.debug("Start calculating operand %s in %s.", graph_key, self.uid)
start_time = time.time()
for chunk in graph:
for inp, prepare_inp in zip(chunk.inputs, chunk.op.prepare_inputs):
if not prepare_inp:
context_dict[inp.key] = None
local_context_dict = DistributedDictContext(
self.get_scheduler(self.default_uid()),
session_id,
actor_ctx=self.ctx,
address=self.address,
n_cpu=self._get_n_cpu(),
)
local_context_dict["_actor_cls"] = type(self)
local_context_dict["_actor_uid"] = self.uid
local_context_dict["_op_key"] = graph_key
local_context_dict.update(context_dict)
context_dict.clear()
if self._execution_ref:
self._execution_ref.deallocate_scheduler_resource(
session_id, graph_key, delay=0.5, _tell=True, _wait=False
)
# start actual execution
executor = Executor(storage=local_context_dict)
with EventContext(
self._events_ref,
EventCategory.PROCEDURE,
EventLevel.NORMAL,
self._calc_event_type,
self.uid,
):
self._execution_pool.submit(
executor.execute_graph, graph, chunk_targets, retval=False
).result()
end_time = time.time()
# collect results
result_keys = []
result_values, result_sizes, result_shapes = [], [], []
collected_chunk_keys = set()
for k in list(local_context_dict.keys()):
v = local_context_dict[k]
if isinstance(k, tuple):
k = tuple(to_str(i) for i in k)
else:
k = to_str(k)
chunk_key = get_chunk_key(k)
if chunk_key in chunk_targets:
result_keys.append(k)
if self._calc_intermediate_device in self._calc_dest_devices:
result_values.append(v)
result_sizes.append(calc_data_size(v))
else:
result_values.append(dataserializer.serialize(v))
result_sizes.append(result_values[-1].total_bytes)
result_shapes.append(getattr(v, "shape", None))
collected_chunk_keys.add(chunk_key)
local_context_dict.pop(k)
# check if all targets generated
if any(k not in collected_chunk_keys for k in chunk_targets):
raise KeyError([k for k in chunk_targets if k not in collected_chunk_keys])
# adjust sizes in allocation
apply_allocs = defaultdict(lambda: 0)
for k, size in zip(result_keys, result_sizes):
apply_allocs[get_chunk_key(k)] += size
apply_alloc_quota_keys, apply_alloc_sizes = [], []
for k, v in apply_allocs.items():
apply_alloc_quota_keys.append(
build_quota_key(session_id, k, owner=self.proc_id)
)
apply_alloc_sizes.append(v)
self._mem_quota_ref.alter_allocations(
apply_alloc_quota_keys, apply_alloc_sizes, _tell=True, _wait=False
)
self._mem_quota_ref.hold_quotas(apply_alloc_quota_keys, _tell=True)
if self._status_ref:
self._status_ref.update_mean_stats(
"calc_speed." + op_name,
sum(apply_alloc_sizes) * 1.0 / (end_time - start_time),
_tell=True,
_wait=False,
)
return self.storage_client.put_objects(
session_id,
result_keys,
result_values,
[self._calc_intermediate_device],
sizes=result_sizes,
shapes=result_shapes,
).then(lambda *_: result_keys)
|
def _calc_results(self, session_id, graph_key, graph, context_dict, chunk_targets):
_, op_name = concat_operand_keys(graph, "_")
logger.debug("Start calculating operand %s in %s.", graph_key, self.uid)
start_time = time.time()
for chunk in graph:
for inp, prepare_inp in zip(chunk.inputs, chunk.op.prepare_inputs):
if not prepare_inp:
context_dict[inp.key] = None
local_context_dict = DistributedDictContext(
self.get_scheduler(self.default_uid()),
session_id,
actor_ctx=self.ctx,
address=self.address,
n_cpu=self._get_n_cpu(),
)
local_context_dict["_actor_cls"] = type(self)
local_context_dict["_actor_uid"] = self.uid
local_context_dict["_op_key"] = graph_key
local_context_dict.update(context_dict)
context_dict.clear()
if self._execution_ref:
self._execution_ref.deallocate_scheduler_resource(
session_id, graph_key, delay=0.5, _tell=True, _wait=False
)
# start actual execution
executor = Executor(storage=local_context_dict)
with EventContext(
self._events_ref,
EventCategory.PROCEDURE,
EventLevel.NORMAL,
self._calc_event_type,
self.uid,
):
self._execution_pool.submit(
executor.execute_graph, graph, chunk_targets, retval=False
).result()
end_time = time.time()
# collect results
result_keys = []
result_values, result_sizes, result_shapes = [], [], []
collected_chunk_keys = set()
for k in list(local_context_dict.keys()):
v = local_context_dict[k]
if isinstance(k, tuple):
k = tuple(to_str(i) for i in k)
else:
k = to_str(k)
chunk_key = get_chunk_key(k)
if chunk_key in chunk_targets:
result_keys.append(k)
result_values.append(dataserializer.serialize(v))
result_sizes.append(result_values[-1].total_bytes)
result_shapes.append(getattr(v, "shape", None))
collected_chunk_keys.add(chunk_key)
local_context_dict.pop(k)
# check if all targets generated
if any(k not in collected_chunk_keys for k in chunk_targets):
raise KeyError([k for k in chunk_targets if k not in collected_chunk_keys])
# adjust sizes in allocation
apply_allocs = defaultdict(lambda: 0)
for k, size in zip(result_keys, result_sizes):
apply_allocs[get_chunk_key(k)] += size
apply_alloc_quota_keys, apply_alloc_sizes = [], []
for k, v in apply_allocs.items():
apply_alloc_quota_keys.append(
build_quota_key(session_id, k, owner=self.proc_id)
)
apply_alloc_sizes.append(v)
self._mem_quota_ref.alter_allocations(
apply_alloc_quota_keys, apply_alloc_sizes, _tell=True, _wait=False
)
self._mem_quota_ref.hold_quotas(apply_alloc_quota_keys, _tell=True)
if self._status_ref:
self._status_ref.update_mean_stats(
"calc_speed." + op_name,
sum(apply_alloc_sizes) * 1.0 / (end_time - start_time),
_tell=True,
_wait=False,
)
return self.storage_client.put_objects(
session_id,
result_keys,
result_values,
[self._calc_intermediate_device],
sizes=result_sizes,
shapes=result_shapes,
).then(lambda *_: result_keys)
|
https://github.com/mars-project/mars/issues/1533
|
AttributeError Traceback (most recent call last)
<ipython-input-3-a85925f048d0> in <module>
1 start=time.time()
2 df_mars=df_mars.to_gpu()
----> 3 print(df_mars.sum().to_frame(name="sum").execute())
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/core.py in execute(self, session, **kw)
580
581 def execute(self, session=None, **kw):
--> 582 self._data.execute(session, **kw)
583 return self
584
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/core.py in execute(self, session, **kw)
368
369 # no more fetch, thus just fire run
--> 370 session.run(self, **kw)
371 # return Tileable or ExecutableTuple itself
372 return self
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/session.py in run(self, *tileables, **kw)
460 tileables = tuple(mt.tensor(t) if not isinstance(t, (Entity, Base)) else t
461 for t in tileables)
--> 462 result = self._sess.run(*tileables, **kw)
463
464 for t in tileables:
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/session.py in run(self, *tileables, **kw)
105 # set number of running cores
106 self.context.set_ncores(kw['n_parallel'])
--> 107 res = self._executor.execute_tileables(tileables, **kw)
108 return res
109
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/utils.py in _wrapped(*args, **kwargs)
406 _kernel_mode.eager = False
407 _kernel_mode.eager_count = enter_eager_count + 1
--> 408 return func(*args, **kwargs)
409 finally:
410 _kernel_mode.eager_count -= 1
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/utils.py in inner(*args, **kwargs)
500 def inner(*args, **kwargs):
501 with build_mode():
--> 502 return func(*args, **kwargs)
503 return inner
504
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in execute_tileables(self, tileables, fetch, n_parallel, n_thread, print_progress, mock, compose, name)
878 n_parallel=n_parallel or n_thread,
879 print_progress=print_progress, mock=mock,
--> 880 chunk_result=chunk_result)
881
882 # update shape of tileable and its chunks whatever it's successful or not
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in execute_graph(self, graph, keys, n_parallel, print_progress, mock, no_intermediate, compose, retval, chunk_result)
688 print_progress=print_progress, mock=mock, mock_max_memory=self._mock_max_memory,
689 fetch_keys=fetch_keys, no_intermediate=no_intermediate)
--> 690 res = graph_execution.execute(retval)
691 self._mock_max_memory = max(self._mock_max_memory, graph_execution._mock_max_memory)
692 if mock:
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in execute(self, retval)
569 # wait until all the futures completed
570 for future in executed_futures:
--> 571 future.result()
572
573 if retval:
/opt/conda/envs/rapids/lib/python3.6/concurrent/futures/_base.py in result(self, timeout)
423 raise CancelledError()
424 elif self._state == FINISHED:
--> 425 return self.__get_result()
426
427 self._condition.wait(timeout)
/opt/conda/envs/rapids/lib/python3.6/concurrent/futures/_base.py in __get_result(self)
382 def __get_result(self):
383 if self._exception:
--> 384 raise self._exception
385 else:
386 return self._result
/opt/conda/envs/rapids/lib/python3.6/concurrent/futures/thread.py in run(self)
54
55 try:
---> 56 result = self.fn(*self.args, **self.kwargs)
57 except BaseException as exc:
58 self.future.set_exception(exc)
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in _execute_operand(self, op)
441 # so we pass the first operand's first output to Executor.handle
442 first_op = ops[0]
--> 443 Executor.handle(first_op, results, self._mock)
444
445 # update maximal memory usage during execution
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in handle(cls, op, results, mock)
639 # The `UFuncTypeError` was introduced by numpy#12593 since v1.17.0.
640 try:
--> 641 return runner(results, op)
642 except UFuncTypeError as e:
643 raise TypeError(str(e)).with_traceback(sys.exc_info()[2]) from None
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/dataframe/reduction/core.py in execute(cls, ctx, op)
405 cls._execute_agg(ctx, op)
406 elif op.stage == OperandStage.map:
--> 407 cls._execute_map(ctx, op)
408 else:
409 in_data = ctx[op.inputs[0].key]
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/dataframe/reduction/core.py in _execute_map(cls, ctx, op)
380 cls._execute_map_with_count(ctx, op)
381 else:
--> 382 cls._execute_without_count(ctx, op)
383
384 @classmethod
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/dataframe/reduction/core.py in _execute_without_count(cls, ctx, op, reduction_func)
370 # cannot just do xdf.DataFrame(r).T
371 # cuz the dtype will be object since pandas 1.0
--> 372 df = xdf.DataFrame(OrderedDict((d, [v]) for d, v in r.iteritems()))
373 else:
374 df = xdf.DataFrame(r)
AttributeError: 'Series' object has no attribute 'iteritems'
|
AttributeError
|
def start_plasma(self):
from pyarrow import plasma
self._plasma_store = plasma.start_plasma_store(
self._cache_mem_limit, plasma_directory=self._plasma_dir
)
options.worker.plasma_socket, _ = self._plasma_store.__enter__()
|
def start_plasma(self):
self._plasma_store = plasma.start_plasma_store(
self._cache_mem_limit, plasma_directory=self._plasma_dir
)
options.worker.plasma_socket, _ = self._plasma_store.__enter__()
|
https://github.com/mars-project/mars/issues/1533
|
AttributeError Traceback (most recent call last)
<ipython-input-3-a85925f048d0> in <module>
1 start=time.time()
2 df_mars=df_mars.to_gpu()
----> 3 print(df_mars.sum().to_frame(name="sum").execute())
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/core.py in execute(self, session, **kw)
580
581 def execute(self, session=None, **kw):
--> 582 self._data.execute(session, **kw)
583 return self
584
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/core.py in execute(self, session, **kw)
368
369 # no more fetch, thus just fire run
--> 370 session.run(self, **kw)
371 # return Tileable or ExecutableTuple itself
372 return self
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/session.py in run(self, *tileables, **kw)
460 tileables = tuple(mt.tensor(t) if not isinstance(t, (Entity, Base)) else t
461 for t in tileables)
--> 462 result = self._sess.run(*tileables, **kw)
463
464 for t in tileables:
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/session.py in run(self, *tileables, **kw)
105 # set number of running cores
106 self.context.set_ncores(kw['n_parallel'])
--> 107 res = self._executor.execute_tileables(tileables, **kw)
108 return res
109
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/utils.py in _wrapped(*args, **kwargs)
406 _kernel_mode.eager = False
407 _kernel_mode.eager_count = enter_eager_count + 1
--> 408 return func(*args, **kwargs)
409 finally:
410 _kernel_mode.eager_count -= 1
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/utils.py in inner(*args, **kwargs)
500 def inner(*args, **kwargs):
501 with build_mode():
--> 502 return func(*args, **kwargs)
503 return inner
504
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in execute_tileables(self, tileables, fetch, n_parallel, n_thread, print_progress, mock, compose, name)
878 n_parallel=n_parallel or n_thread,
879 print_progress=print_progress, mock=mock,
--> 880 chunk_result=chunk_result)
881
882 # update shape of tileable and its chunks whatever it's successful or not
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in execute_graph(self, graph, keys, n_parallel, print_progress, mock, no_intermediate, compose, retval, chunk_result)
688 print_progress=print_progress, mock=mock, mock_max_memory=self._mock_max_memory,
689 fetch_keys=fetch_keys, no_intermediate=no_intermediate)
--> 690 res = graph_execution.execute(retval)
691 self._mock_max_memory = max(self._mock_max_memory, graph_execution._mock_max_memory)
692 if mock:
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in execute(self, retval)
569 # wait until all the futures completed
570 for future in executed_futures:
--> 571 future.result()
572
573 if retval:
/opt/conda/envs/rapids/lib/python3.6/concurrent/futures/_base.py in result(self, timeout)
423 raise CancelledError()
424 elif self._state == FINISHED:
--> 425 return self.__get_result()
426
427 self._condition.wait(timeout)
/opt/conda/envs/rapids/lib/python3.6/concurrent/futures/_base.py in __get_result(self)
382 def __get_result(self):
383 if self._exception:
--> 384 raise self._exception
385 else:
386 return self._result
/opt/conda/envs/rapids/lib/python3.6/concurrent/futures/thread.py in run(self)
54
55 try:
---> 56 result = self.fn(*self.args, **self.kwargs)
57 except BaseException as exc:
58 self.future.set_exception(exc)
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in _execute_operand(self, op)
441 # so we pass the first operand's first output to Executor.handle
442 first_op = ops[0]
--> 443 Executor.handle(first_op, results, self._mock)
444
445 # update maximal memory usage during execution
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in handle(cls, op, results, mock)
639 # The `UFuncTypeError` was introduced by numpy#12593 since v1.17.0.
640 try:
--> 641 return runner(results, op)
642 except UFuncTypeError as e:
643 raise TypeError(str(e)).with_traceback(sys.exc_info()[2]) from None
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/dataframe/reduction/core.py in execute(cls, ctx, op)
405 cls._execute_agg(ctx, op)
406 elif op.stage == OperandStage.map:
--> 407 cls._execute_map(ctx, op)
408 else:
409 in_data = ctx[op.inputs[0].key]
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/dataframe/reduction/core.py in _execute_map(cls, ctx, op)
380 cls._execute_map_with_count(ctx, op)
381 else:
--> 382 cls._execute_without_count(ctx, op)
383
384 @classmethod
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/dataframe/reduction/core.py in _execute_without_count(cls, ctx, op, reduction_func)
370 # cannot just do xdf.DataFrame(r).T
371 # cuz the dtype will be object since pandas 1.0
--> 372 df = xdf.DataFrame(OrderedDict((d, [v]) for d, v in r.iteritems()))
373 else:
374 df = xdf.DataFrame(r)
AttributeError: 'Series' object has no attribute 'iteritems'
|
AttributeError
|
def get_actual_capacity(self, store_limit):
"""
Get actual capacity of plasma store
:return: actual storage size in bytes
"""
try:
store_limit = min(store_limit, self._plasma_client.store_capacity())
except AttributeError: # pragma: no cover
pass
if self._size_limit is None:
left_size = store_limit
alloc_fraction = 1
while True:
allocate_size = int(left_size * alloc_fraction / PAGE_SIZE) * PAGE_SIZE
try:
obj_id = plasma.ObjectID.from_random()
buf = [self._plasma_client.create(obj_id, allocate_size)]
self._plasma_client.seal(obj_id)
del buf[:]
break
except plasma_errors.PlasmaStoreFull:
alloc_fraction *= 0.99
finally:
self._plasma_client.evict(allocate_size)
self._size_limit = allocate_size
return self._size_limit
|
def get_actual_capacity(self, store_limit):
"""
Get actual capacity of plasma store
:return: actual storage size in bytes
"""
try:
store_limit = min(store_limit, self._plasma_client.store_capacity())
except AttributeError: # pragma: no cover
pass
if self._size_limit is None:
left_size = store_limit
alloc_fraction = 1
while True:
allocate_size = int(left_size * alloc_fraction / PAGE_SIZE) * PAGE_SIZE
try:
obj_id = plasma.ObjectID.from_random()
buf = [self._plasma_client.create(obj_id, allocate_size)]
self._plasma_client.seal(obj_id)
del buf[:]
break
except PlasmaStoreFull:
alloc_fraction *= 0.99
finally:
self._plasma_client.evict(allocate_size)
self._size_limit = allocate_size
return self._size_limit
|
https://github.com/mars-project/mars/issues/1533
|
AttributeError Traceback (most recent call last)
<ipython-input-3-a85925f048d0> in <module>
1 start=time.time()
2 df_mars=df_mars.to_gpu()
----> 3 print(df_mars.sum().to_frame(name="sum").execute())
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/core.py in execute(self, session, **kw)
580
581 def execute(self, session=None, **kw):
--> 582 self._data.execute(session, **kw)
583 return self
584
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/core.py in execute(self, session, **kw)
368
369 # no more fetch, thus just fire run
--> 370 session.run(self, **kw)
371 # return Tileable or ExecutableTuple itself
372 return self
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/session.py in run(self, *tileables, **kw)
460 tileables = tuple(mt.tensor(t) if not isinstance(t, (Entity, Base)) else t
461 for t in tileables)
--> 462 result = self._sess.run(*tileables, **kw)
463
464 for t in tileables:
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/session.py in run(self, *tileables, **kw)
105 # set number of running cores
106 self.context.set_ncores(kw['n_parallel'])
--> 107 res = self._executor.execute_tileables(tileables, **kw)
108 return res
109
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/utils.py in _wrapped(*args, **kwargs)
406 _kernel_mode.eager = False
407 _kernel_mode.eager_count = enter_eager_count + 1
--> 408 return func(*args, **kwargs)
409 finally:
410 _kernel_mode.eager_count -= 1
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/utils.py in inner(*args, **kwargs)
500 def inner(*args, **kwargs):
501 with build_mode():
--> 502 return func(*args, **kwargs)
503 return inner
504
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in execute_tileables(self, tileables, fetch, n_parallel, n_thread, print_progress, mock, compose, name)
878 n_parallel=n_parallel or n_thread,
879 print_progress=print_progress, mock=mock,
--> 880 chunk_result=chunk_result)
881
882 # update shape of tileable and its chunks whatever it's successful or not
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in execute_graph(self, graph, keys, n_parallel, print_progress, mock, no_intermediate, compose, retval, chunk_result)
688 print_progress=print_progress, mock=mock, mock_max_memory=self._mock_max_memory,
689 fetch_keys=fetch_keys, no_intermediate=no_intermediate)
--> 690 res = graph_execution.execute(retval)
691 self._mock_max_memory = max(self._mock_max_memory, graph_execution._mock_max_memory)
692 if mock:
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in execute(self, retval)
569 # wait until all the futures completed
570 for future in executed_futures:
--> 571 future.result()
572
573 if retval:
/opt/conda/envs/rapids/lib/python3.6/concurrent/futures/_base.py in result(self, timeout)
423 raise CancelledError()
424 elif self._state == FINISHED:
--> 425 return self.__get_result()
426
427 self._condition.wait(timeout)
/opt/conda/envs/rapids/lib/python3.6/concurrent/futures/_base.py in __get_result(self)
382 def __get_result(self):
383 if self._exception:
--> 384 raise self._exception
385 else:
386 return self._result
/opt/conda/envs/rapids/lib/python3.6/concurrent/futures/thread.py in run(self)
54
55 try:
---> 56 result = self.fn(*self.args, **self.kwargs)
57 except BaseException as exc:
58 self.future.set_exception(exc)
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in _execute_operand(self, op)
441 # so we pass the first operand's first output to Executor.handle
442 first_op = ops[0]
--> 443 Executor.handle(first_op, results, self._mock)
444
445 # update maximal memory usage during execution
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in handle(cls, op, results, mock)
639 # The `UFuncTypeError` was introduced by numpy#12593 since v1.17.0.
640 try:
--> 641 return runner(results, op)
642 except UFuncTypeError as e:
643 raise TypeError(str(e)).with_traceback(sys.exc_info()[2]) from None
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/dataframe/reduction/core.py in execute(cls, ctx, op)
405 cls._execute_agg(ctx, op)
406 elif op.stage == OperandStage.map:
--> 407 cls._execute_map(ctx, op)
408 else:
409 in_data = ctx[op.inputs[0].key]
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/dataframe/reduction/core.py in _execute_map(cls, ctx, op)
380 cls._execute_map_with_count(ctx, op)
381 else:
--> 382 cls._execute_without_count(ctx, op)
383
384 @classmethod
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/dataframe/reduction/core.py in _execute_without_count(cls, ctx, op, reduction_func)
370 # cannot just do xdf.DataFrame(r).T
371 # cuz the dtype will be object since pandas 1.0
--> 372 df = xdf.DataFrame(OrderedDict((d, [v]) for d, v in r.iteritems()))
373 else:
374 df = xdf.DataFrame(r)
AttributeError: 'Series' object has no attribute 'iteritems'
|
AttributeError
|
def create(self, session_id, data_key, size):
obj_id = self._new_object_id(session_id, data_key)
try:
self._plasma_client.evict(size)
buffer = self._plasma_client.create(obj_id, size)
return buffer
except plasma_errors.PlasmaStoreFull:
exc_type = plasma_errors.PlasmaStoreFull
self._mapper_ref.delete(session_id, data_key)
logger.warning(
"Data %s(%d) failed to store to plasma due to StorageFull", data_key, size
)
except: # noqa: E722
self._mapper_ref.delete(session_id, data_key)
raise
if exc_type is plasma_errors.PlasmaStoreFull:
raise StorageFull(
request_size=size, capacity=self._size_limit, affected_keys=[data_key]
)
|
def create(self, session_id, data_key, size):
obj_id = self._new_object_id(session_id, data_key)
try:
self._plasma_client.evict(size)
buffer = self._plasma_client.create(obj_id, size)
return buffer
except PlasmaStoreFull:
exc_type = PlasmaStoreFull
self._mapper_ref.delete(session_id, data_key)
logger.warning(
"Data %s(%d) failed to store to plasma due to StorageFull", data_key, size
)
except: # noqa: E722
self._mapper_ref.delete(session_id, data_key)
raise
if exc_type is PlasmaStoreFull:
raise StorageFull(
request_size=size, capacity=self._size_limit, affected_keys=[data_key]
)
|
https://github.com/mars-project/mars/issues/1533
|
AttributeError Traceback (most recent call last)
<ipython-input-3-a85925f048d0> in <module>
1 start=time.time()
2 df_mars=df_mars.to_gpu()
----> 3 print(df_mars.sum().to_frame(name="sum").execute())
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/core.py in execute(self, session, **kw)
580
581 def execute(self, session=None, **kw):
--> 582 self._data.execute(session, **kw)
583 return self
584
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/core.py in execute(self, session, **kw)
368
369 # no more fetch, thus just fire run
--> 370 session.run(self, **kw)
371 # return Tileable or ExecutableTuple itself
372 return self
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/session.py in run(self, *tileables, **kw)
460 tileables = tuple(mt.tensor(t) if not isinstance(t, (Entity, Base)) else t
461 for t in tileables)
--> 462 result = self._sess.run(*tileables, **kw)
463
464 for t in tileables:
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/session.py in run(self, *tileables, **kw)
105 # set number of running cores
106 self.context.set_ncores(kw['n_parallel'])
--> 107 res = self._executor.execute_tileables(tileables, **kw)
108 return res
109
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/utils.py in _wrapped(*args, **kwargs)
406 _kernel_mode.eager = False
407 _kernel_mode.eager_count = enter_eager_count + 1
--> 408 return func(*args, **kwargs)
409 finally:
410 _kernel_mode.eager_count -= 1
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/utils.py in inner(*args, **kwargs)
500 def inner(*args, **kwargs):
501 with build_mode():
--> 502 return func(*args, **kwargs)
503 return inner
504
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in execute_tileables(self, tileables, fetch, n_parallel, n_thread, print_progress, mock, compose, name)
878 n_parallel=n_parallel or n_thread,
879 print_progress=print_progress, mock=mock,
--> 880 chunk_result=chunk_result)
881
882 # update shape of tileable and its chunks whatever it's successful or not
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in execute_graph(self, graph, keys, n_parallel, print_progress, mock, no_intermediate, compose, retval, chunk_result)
688 print_progress=print_progress, mock=mock, mock_max_memory=self._mock_max_memory,
689 fetch_keys=fetch_keys, no_intermediate=no_intermediate)
--> 690 res = graph_execution.execute(retval)
691 self._mock_max_memory = max(self._mock_max_memory, graph_execution._mock_max_memory)
692 if mock:
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in execute(self, retval)
569 # wait until all the futures completed
570 for future in executed_futures:
--> 571 future.result()
572
573 if retval:
/opt/conda/envs/rapids/lib/python3.6/concurrent/futures/_base.py in result(self, timeout)
423 raise CancelledError()
424 elif self._state == FINISHED:
--> 425 return self.__get_result()
426
427 self._condition.wait(timeout)
/opt/conda/envs/rapids/lib/python3.6/concurrent/futures/_base.py in __get_result(self)
382 def __get_result(self):
383 if self._exception:
--> 384 raise self._exception
385 else:
386 return self._result
/opt/conda/envs/rapids/lib/python3.6/concurrent/futures/thread.py in run(self)
54
55 try:
---> 56 result = self.fn(*self.args, **self.kwargs)
57 except BaseException as exc:
58 self.future.set_exception(exc)
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in _execute_operand(self, op)
441 # so we pass the first operand's first output to Executor.handle
442 first_op = ops[0]
--> 443 Executor.handle(first_op, results, self._mock)
444
445 # update maximal memory usage during execution
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in handle(cls, op, results, mock)
639 # The `UFuncTypeError` was introduced by numpy#12593 since v1.17.0.
640 try:
--> 641 return runner(results, op)
642 except UFuncTypeError as e:
643 raise TypeError(str(e)).with_traceback(sys.exc_info()[2]) from None
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/dataframe/reduction/core.py in execute(cls, ctx, op)
405 cls._execute_agg(ctx, op)
406 elif op.stage == OperandStage.map:
--> 407 cls._execute_map(ctx, op)
408 else:
409 in_data = ctx[op.inputs[0].key]
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/dataframe/reduction/core.py in _execute_map(cls, ctx, op)
380 cls._execute_map_with_count(ctx, op)
381 else:
--> 382 cls._execute_without_count(ctx, op)
383
384 @classmethod
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/dataframe/reduction/core.py in _execute_without_count(cls, ctx, op, reduction_func)
370 # cannot just do xdf.DataFrame(r).T
371 # cuz the dtype will be object since pandas 1.0
--> 372 df = xdf.DataFrame(OrderedDict((d, [v]) for d, v in r.iteritems()))
373 else:
374 df = xdf.DataFrame(r)
AttributeError: 'Series' object has no attribute 'iteritems'
|
AttributeError
|
def seal(self, session_id, data_key):
obj_id = self._get_object_id(session_id, data_key)
try:
self._plasma_client.seal(obj_id)
except plasma_errors.PlasmaObjectNotFound:
self._mapper_ref.delete(session_id, data_key)
raise KeyError((session_id, data_key))
|
def seal(self, session_id, data_key):
obj_id = self._get_object_id(session_id, data_key)
try:
self._plasma_client.seal(obj_id)
except PlasmaObjectNotFound:
self._mapper_ref.delete(session_id, data_key)
raise KeyError((session_id, data_key))
|
https://github.com/mars-project/mars/issues/1533
|
AttributeError Traceback (most recent call last)
<ipython-input-3-a85925f048d0> in <module>
1 start=time.time()
2 df_mars=df_mars.to_gpu()
----> 3 print(df_mars.sum().to_frame(name="sum").execute())
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/core.py in execute(self, session, **kw)
580
581 def execute(self, session=None, **kw):
--> 582 self._data.execute(session, **kw)
583 return self
584
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/core.py in execute(self, session, **kw)
368
369 # no more fetch, thus just fire run
--> 370 session.run(self, **kw)
371 # return Tileable or ExecutableTuple itself
372 return self
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/session.py in run(self, *tileables, **kw)
460 tileables = tuple(mt.tensor(t) if not isinstance(t, (Entity, Base)) else t
461 for t in tileables)
--> 462 result = self._sess.run(*tileables, **kw)
463
464 for t in tileables:
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/session.py in run(self, *tileables, **kw)
105 # set number of running cores
106 self.context.set_ncores(kw['n_parallel'])
--> 107 res = self._executor.execute_tileables(tileables, **kw)
108 return res
109
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/utils.py in _wrapped(*args, **kwargs)
406 _kernel_mode.eager = False
407 _kernel_mode.eager_count = enter_eager_count + 1
--> 408 return func(*args, **kwargs)
409 finally:
410 _kernel_mode.eager_count -= 1
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/utils.py in inner(*args, **kwargs)
500 def inner(*args, **kwargs):
501 with build_mode():
--> 502 return func(*args, **kwargs)
503 return inner
504
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in execute_tileables(self, tileables, fetch, n_parallel, n_thread, print_progress, mock, compose, name)
878 n_parallel=n_parallel or n_thread,
879 print_progress=print_progress, mock=mock,
--> 880 chunk_result=chunk_result)
881
882 # update shape of tileable and its chunks whatever it's successful or not
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in execute_graph(self, graph, keys, n_parallel, print_progress, mock, no_intermediate, compose, retval, chunk_result)
688 print_progress=print_progress, mock=mock, mock_max_memory=self._mock_max_memory,
689 fetch_keys=fetch_keys, no_intermediate=no_intermediate)
--> 690 res = graph_execution.execute(retval)
691 self._mock_max_memory = max(self._mock_max_memory, graph_execution._mock_max_memory)
692 if mock:
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in execute(self, retval)
569 # wait until all the futures completed
570 for future in executed_futures:
--> 571 future.result()
572
573 if retval:
/opt/conda/envs/rapids/lib/python3.6/concurrent/futures/_base.py in result(self, timeout)
423 raise CancelledError()
424 elif self._state == FINISHED:
--> 425 return self.__get_result()
426
427 self._condition.wait(timeout)
/opt/conda/envs/rapids/lib/python3.6/concurrent/futures/_base.py in __get_result(self)
382 def __get_result(self):
383 if self._exception:
--> 384 raise self._exception
385 else:
386 return self._result
/opt/conda/envs/rapids/lib/python3.6/concurrent/futures/thread.py in run(self)
54
55 try:
---> 56 result = self.fn(*self.args, **self.kwargs)
57 except BaseException as exc:
58 self.future.set_exception(exc)
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in _execute_operand(self, op)
441 # so we pass the first operand's first output to Executor.handle
442 first_op = ops[0]
--> 443 Executor.handle(first_op, results, self._mock)
444
445 # update maximal memory usage during execution
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in handle(cls, op, results, mock)
639 # The `UFuncTypeError` was introduced by numpy#12593 since v1.17.0.
640 try:
--> 641 return runner(results, op)
642 except UFuncTypeError as e:
643 raise TypeError(str(e)).with_traceback(sys.exc_info()[2]) from None
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/dataframe/reduction/core.py in execute(cls, ctx, op)
405 cls._execute_agg(ctx, op)
406 elif op.stage == OperandStage.map:
--> 407 cls._execute_map(ctx, op)
408 else:
409 in_data = ctx[op.inputs[0].key]
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/dataframe/reduction/core.py in _execute_map(cls, ctx, op)
380 cls._execute_map_with_count(ctx, op)
381 else:
--> 382 cls._execute_without_count(ctx, op)
383
384 @classmethod
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/dataframe/reduction/core.py in _execute_without_count(cls, ctx, op, reduction_func)
370 # cannot just do xdf.DataFrame(r).T
371 # cuz the dtype will be object since pandas 1.0
--> 372 df = xdf.DataFrame(OrderedDict((d, [v]) for d, v in r.iteritems()))
373 else:
374 df = xdf.DataFrame(r)
AttributeError: 'Series' object has no attribute 'iteritems'
|
AttributeError
|
def put(self, session_id, data_key, value):
"""
Put a Mars object into plasma store
:param session_id: session id
:param data_key: chunk key
:param value: Mars object to be put
"""
data_size = None
try:
obj_id = self._new_object_id(session_id, data_key)
except StorageDataExists:
obj_id = self._get_object_id(session_id, data_key)
if self._plasma_client.contains(obj_id):
logger.debug("Data %s already exists, returning existing", data_key)
[buffer] = self._plasma_client.get_buffers([obj_id], timeout_ms=10)
del value
return buffer
else:
logger.warning(
"Data %s registered but no data found, reconstructed", data_key
)
self._mapper_ref.delete(session_id, data_key)
obj_id = self._new_object_id(session_id, data_key)
try:
try:
serialized = dataserializer.serialize(value)
except SerializationCallbackError:
self._mapper_ref.delete(session_id, data_key)
raise SerializationFailed(obj=value) from None
del value
data_size = serialized.total_bytes
try:
buffer = self._plasma_client.create(obj_id, serialized.total_bytes)
stream = pyarrow.FixedSizeBufferWriter(buffer)
stream.set_memcopy_threads(6)
self._pool.submit(serialized.write_to, stream).result()
self._plasma_client.seal(obj_id)
finally:
del serialized
return buffer
except plasma_errors.PlasmaStoreFull:
self._mapper_ref.delete(session_id, data_key)
logger.warning(
"Data %s(%d) failed to store to plasma due to StorageFull",
data_key,
data_size,
)
exc = plasma_errors.PlasmaStoreFull
except: # noqa: E722
self._mapper_ref.delete(session_id, data_key)
raise
if exc is plasma_errors.PlasmaStoreFull:
raise StorageFull(
request_size=data_size, capacity=self._size_limit, affected_keys=[data_key]
)
|
def put(self, session_id, data_key, value):
"""
Put a Mars object into plasma store
:param session_id: session id
:param data_key: chunk key
:param value: Mars object to be put
"""
data_size = None
try:
obj_id = self._new_object_id(session_id, data_key)
except StorageDataExists:
obj_id = self._get_object_id(session_id, data_key)
if self._plasma_client.contains(obj_id):
logger.debug("Data %s already exists, returning existing", data_key)
[buffer] = self._plasma_client.get_buffers([obj_id], timeout_ms=10)
del value
return buffer
else:
logger.warning(
"Data %s registered but no data found, reconstructed", data_key
)
self._mapper_ref.delete(session_id, data_key)
obj_id = self._new_object_id(session_id, data_key)
try:
try:
serialized = dataserializer.serialize(value)
except SerializationCallbackError:
self._mapper_ref.delete(session_id, data_key)
raise SerializationFailed(obj=value) from None
del value
data_size = serialized.total_bytes
try:
buffer = self._plasma_client.create(obj_id, serialized.total_bytes)
stream = pyarrow.FixedSizeBufferWriter(buffer)
stream.set_memcopy_threads(6)
self._pool.submit(serialized.write_to, stream).result()
self._plasma_client.seal(obj_id)
finally:
del serialized
return buffer
except PlasmaStoreFull:
self._mapper_ref.delete(session_id, data_key)
logger.warning(
"Data %s(%d) failed to store to plasma due to StorageFull",
data_key,
data_size,
)
exc = PlasmaStoreFull
except: # noqa: E722
self._mapper_ref.delete(session_id, data_key)
raise
if exc is PlasmaStoreFull:
raise StorageFull(
request_size=data_size, capacity=self._size_limit, affected_keys=[data_key]
)
|
https://github.com/mars-project/mars/issues/1533
|
AttributeError Traceback (most recent call last)
<ipython-input-3-a85925f048d0> in <module>
1 start=time.time()
2 df_mars=df_mars.to_gpu()
----> 3 print(df_mars.sum().to_frame(name="sum").execute())
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/core.py in execute(self, session, **kw)
580
581 def execute(self, session=None, **kw):
--> 582 self._data.execute(session, **kw)
583 return self
584
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/core.py in execute(self, session, **kw)
368
369 # no more fetch, thus just fire run
--> 370 session.run(self, **kw)
371 # return Tileable or ExecutableTuple itself
372 return self
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/session.py in run(self, *tileables, **kw)
460 tileables = tuple(mt.tensor(t) if not isinstance(t, (Entity, Base)) else t
461 for t in tileables)
--> 462 result = self._sess.run(*tileables, **kw)
463
464 for t in tileables:
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/session.py in run(self, *tileables, **kw)
105 # set number of running cores
106 self.context.set_ncores(kw['n_parallel'])
--> 107 res = self._executor.execute_tileables(tileables, **kw)
108 return res
109
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/utils.py in _wrapped(*args, **kwargs)
406 _kernel_mode.eager = False
407 _kernel_mode.eager_count = enter_eager_count + 1
--> 408 return func(*args, **kwargs)
409 finally:
410 _kernel_mode.eager_count -= 1
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/utils.py in inner(*args, **kwargs)
500 def inner(*args, **kwargs):
501 with build_mode():
--> 502 return func(*args, **kwargs)
503 return inner
504
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in execute_tileables(self, tileables, fetch, n_parallel, n_thread, print_progress, mock, compose, name)
878 n_parallel=n_parallel or n_thread,
879 print_progress=print_progress, mock=mock,
--> 880 chunk_result=chunk_result)
881
882 # update shape of tileable and its chunks whatever it's successful or not
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in execute_graph(self, graph, keys, n_parallel, print_progress, mock, no_intermediate, compose, retval, chunk_result)
688 print_progress=print_progress, mock=mock, mock_max_memory=self._mock_max_memory,
689 fetch_keys=fetch_keys, no_intermediate=no_intermediate)
--> 690 res = graph_execution.execute(retval)
691 self._mock_max_memory = max(self._mock_max_memory, graph_execution._mock_max_memory)
692 if mock:
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in execute(self, retval)
569 # wait until all the futures completed
570 for future in executed_futures:
--> 571 future.result()
572
573 if retval:
/opt/conda/envs/rapids/lib/python3.6/concurrent/futures/_base.py in result(self, timeout)
423 raise CancelledError()
424 elif self._state == FINISHED:
--> 425 return self.__get_result()
426
427 self._condition.wait(timeout)
/opt/conda/envs/rapids/lib/python3.6/concurrent/futures/_base.py in __get_result(self)
382 def __get_result(self):
383 if self._exception:
--> 384 raise self._exception
385 else:
386 return self._result
/opt/conda/envs/rapids/lib/python3.6/concurrent/futures/thread.py in run(self)
54
55 try:
---> 56 result = self.fn(*self.args, **self.kwargs)
57 except BaseException as exc:
58 self.future.set_exception(exc)
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in _execute_operand(self, op)
441 # so we pass the first operand's first output to Executor.handle
442 first_op = ops[0]
--> 443 Executor.handle(first_op, results, self._mock)
444
445 # update maximal memory usage during execution
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/executor.py in handle(cls, op, results, mock)
639 # The `UFuncTypeError` was introduced by numpy#12593 since v1.17.0.
640 try:
--> 641 return runner(results, op)
642 except UFuncTypeError as e:
643 raise TypeError(str(e)).with_traceback(sys.exc_info()[2]) from None
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/dataframe/reduction/core.py in execute(cls, ctx, op)
405 cls._execute_agg(ctx, op)
406 elif op.stage == OperandStage.map:
--> 407 cls._execute_map(ctx, op)
408 else:
409 in_data = ctx[op.inputs[0].key]
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/dataframe/reduction/core.py in _execute_map(cls, ctx, op)
380 cls._execute_map_with_count(ctx, op)
381 else:
--> 382 cls._execute_without_count(ctx, op)
383
384 @classmethod
/opt/conda/envs/rapids/lib/python3.6/site-packages/mars/dataframe/reduction/core.py in _execute_without_count(cls, ctx, op, reduction_func)
370 # cannot just do xdf.DataFrame(r).T
371 # cuz the dtype will be object since pandas 1.0
--> 372 df = xdf.DataFrame(OrderedDict((d, [v]) for d, v in r.iteritems()))
373 else:
374 df = xdf.DataFrame(r)
AttributeError: 'Series' object has no attribute 'iteritems'
|
AttributeError
|
def __init__(self, values, dtype: ArrowDtype = None, copy=False):
pandas_only = self._pandas_only()
if pa is not None and not pandas_only:
self._init_by_arrow(values, dtype=dtype, copy=copy)
elif not is_kernel_mode():
# not in kernel mode, allow to use numpy handle data
# just for infer dtypes purpose
self._init_by_numpy(values, dtype=dtype, copy=copy)
else:
raise ImportError("Cannot create ArrowArray when `pyarrow` not installed")
# for test purpose
self._force_use_pandas = pandas_only
|
def __init__(self, values, dtype: ArrowDtype = None, copy=False):
if isinstance(values, (pd.Index, pd.Series)):
# for pandas Index and Series,
# convert to PandasArray
values = values.array
if isinstance(values, type(self)):
arrow_array = values._arrow_array
elif isinstance(values, ExtensionArray):
# if come from pandas object like index,
# convert to pandas StringArray first,
# validation will be done in construct
arrow_array = pa.chunked_array([pa.array(values, from_pandas=True)])
elif isinstance(values, pa.ChunkedArray):
arrow_array = values
elif isinstance(values, pa.Array):
arrow_array = pa.chunked_array([values])
else:
arrow_array = pa.chunked_array([pa.array(values, type=dtype.arrow_type)])
if copy:
arrow_array = copy_obj(arrow_array)
self._arrow_array = arrow_array
self._dtype = dtype
# for test purpose
self._force_use_pandas = False
|
https://github.com/mars-project/mars/issues/1514
|
In [1]: import mars.dataframe as md
In [2]: df = md.DataFrame({'a': [1, 2, 3], 'b': ['a', 'b', 'c']})
In [3]: df['b'] = df['b'].astype(md.ArrowStringDtype())
In [6]: df.groupby('b').count()
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
545 elif not is_extension_array_dtype(subarr):
--> 546 subarr = construct_1d_ndarray_preserving_na(subarr, dtype, copy=copy)
547 except OutOfBoundsDatetime:
~/miniconda3/lib/python3.7/site-packages/pandas/core/dtypes/cast.py in construct_1d_ndarray_preserving_na(values, dtype, copy)
1506 """
-> 1507 subarr = np.array(values, dtype=dtype, copy=copy)
1508
TypeError: Cannot interpret '<mars.dataframe.arrays.ArrowStringDtype object at 0x7f7f81874150>' as a data type
During handling of the above exception, another exception occurred:
TypeError Traceback (most recent call last)
<ipython-input-6-884154032b17> in <module>
----> 1 df.groupby('b').count()
~/Workspace/mars/mars/dataframe/groupby/__init__.py in <lambda>(groupby, **kw)
40 setattr(cls, 'max', lambda groupby, **kw: agg(groupby, 'max', **kw))
41 setattr(cls, 'min', lambda groupby, **kw: agg(groupby, 'min', **kw))
---> 42 setattr(cls, 'count', lambda groupby, **kw: agg(groupby, 'count', **kw))
43 setattr(cls, 'size', lambda groupby, **kw: agg(groupby, 'size', **kw))
44 setattr(cls, 'mean', lambda groupby, **kw: agg(groupby, 'mean', **kw))
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in agg(groupby, func, method, *args, **kwargs)
650 agg_op = DataFrameGroupByAgg(func=func, method=method, raw_func=func,
651 groupby_params=groupby.op.groupby_params)
--> 652 return agg_op(groupby)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in __call__(self, groupby)
201
202 if self.output_types[0] == OutputType.dataframe:
--> 203 return self._call_dataframe(groupby, df)
204 else:
205 return self._call_series(groupby, df)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in _call_dataframe(self, groupby, input_df)
144
145 def _call_dataframe(self, groupby, input_df):
--> 146 grouped = groupby.op.build_mock_groupby()
147 agg_df = grouped.aggregate(self.func)
148
~/Workspace/mars/mars/dataframe/groupby/core.py in build_mock_groupby(self, **kwargs)
98 in_df = self.inputs[0]
99 if self.is_dataframe_obj:
--> 100 empty_df = build_empty_df(in_df.dtypes, index=pd.RangeIndex(2))
101 obj_dtypes = in_df.dtypes[in_df.dtypes == np.dtype('O')]
102 empty_df[obj_dtypes.index] = 'O'
~/Workspace/mars/mars/dataframe/utils.py in build_empty_df(dtypes, index)
446 df = pd.DataFrame(columns=pd.RangeIndex(len(columns)), index=index)
447 for i, d in enumerate(dtypes):
--> 448 df[i] = pd.Series(dtype=d, index=index)
449 df.columns = columns
450 return df
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
254 data = data._data
255 elif is_dict_like(data):
--> 256 data, index = self._init_dict(data, index, dtype)
257 dtype = None
258 copy = False
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in _init_dict(self, data, index, dtype)
347 # TODO: passing np.float64 to not break anything yet. See GH-17261
348 s = create_series_with_explicit_dtype(
--> 349 values, index=keys, dtype=dtype, dtype_if_empty=np.float64
350 )
351
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in create_series_with_explicit_dtype(data, index, dtype, name, copy, fastpath, dtype_if_empty)
623 dtype = dtype_if_empty
624 return Series(
--> 625 data=data, index=index, dtype=dtype, name=name, copy=copy, fastpath=fastpath
626 )
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
303 data = data.copy()
304 else:
--> 305 data = sanitize_array(data, index, dtype, copy, raise_cast_failure=True)
306
307 data = SingleBlockManager(data, index, fastpath=True)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in sanitize_array(data, index, dtype, copy, raise_cast_failure)
447 subarr = _try_cast(arr, dtype, copy, raise_cast_failure)
448 else:
--> 449 subarr = _try_cast(data, dtype, copy, raise_cast_failure)
450
451 # scalar like, GH
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
560 dtype = cast(ExtensionDtype, dtype)
561 array_type = dtype.construct_array_type()._from_sequence
--> 562 subarr = array_type(arr, dtype=dtype, copy=copy)
563 elif dtype is not None and raise_cast_failure:
564 raise
~/Workspace/mars/mars/dataframe/arrays.py in _from_sequence(cls, scalars, dtype, copy)
253 def _from_sequence(cls, scalars, dtype=None, copy=False):
254 if not hasattr(scalars, 'dtype'):
--> 255 ret = np.empty(len(scalars), dtype=object)
256 for i, s in enumerate(scalars):
257 ret[i] = s
TypeError: object of type 'pyarrow.lib.NullScalar' has no len()
|
TypeError
|
def __repr__(self):
return f"{type(self).__name__}({repr(self._array)})"
|
def __repr__(self):
return f"{type(self).__name__}({repr(self._arrow_array)})"
|
https://github.com/mars-project/mars/issues/1514
|
In [1]: import mars.dataframe as md
In [2]: df = md.DataFrame({'a': [1, 2, 3], 'b': ['a', 'b', 'c']})
In [3]: df['b'] = df['b'].astype(md.ArrowStringDtype())
In [6]: df.groupby('b').count()
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
545 elif not is_extension_array_dtype(subarr):
--> 546 subarr = construct_1d_ndarray_preserving_na(subarr, dtype, copy=copy)
547 except OutOfBoundsDatetime:
~/miniconda3/lib/python3.7/site-packages/pandas/core/dtypes/cast.py in construct_1d_ndarray_preserving_na(values, dtype, copy)
1506 """
-> 1507 subarr = np.array(values, dtype=dtype, copy=copy)
1508
TypeError: Cannot interpret '<mars.dataframe.arrays.ArrowStringDtype object at 0x7f7f81874150>' as a data type
During handling of the above exception, another exception occurred:
TypeError Traceback (most recent call last)
<ipython-input-6-884154032b17> in <module>
----> 1 df.groupby('b').count()
~/Workspace/mars/mars/dataframe/groupby/__init__.py in <lambda>(groupby, **kw)
40 setattr(cls, 'max', lambda groupby, **kw: agg(groupby, 'max', **kw))
41 setattr(cls, 'min', lambda groupby, **kw: agg(groupby, 'min', **kw))
---> 42 setattr(cls, 'count', lambda groupby, **kw: agg(groupby, 'count', **kw))
43 setattr(cls, 'size', lambda groupby, **kw: agg(groupby, 'size', **kw))
44 setattr(cls, 'mean', lambda groupby, **kw: agg(groupby, 'mean', **kw))
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in agg(groupby, func, method, *args, **kwargs)
650 agg_op = DataFrameGroupByAgg(func=func, method=method, raw_func=func,
651 groupby_params=groupby.op.groupby_params)
--> 652 return agg_op(groupby)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in __call__(self, groupby)
201
202 if self.output_types[0] == OutputType.dataframe:
--> 203 return self._call_dataframe(groupby, df)
204 else:
205 return self._call_series(groupby, df)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in _call_dataframe(self, groupby, input_df)
144
145 def _call_dataframe(self, groupby, input_df):
--> 146 grouped = groupby.op.build_mock_groupby()
147 agg_df = grouped.aggregate(self.func)
148
~/Workspace/mars/mars/dataframe/groupby/core.py in build_mock_groupby(self, **kwargs)
98 in_df = self.inputs[0]
99 if self.is_dataframe_obj:
--> 100 empty_df = build_empty_df(in_df.dtypes, index=pd.RangeIndex(2))
101 obj_dtypes = in_df.dtypes[in_df.dtypes == np.dtype('O')]
102 empty_df[obj_dtypes.index] = 'O'
~/Workspace/mars/mars/dataframe/utils.py in build_empty_df(dtypes, index)
446 df = pd.DataFrame(columns=pd.RangeIndex(len(columns)), index=index)
447 for i, d in enumerate(dtypes):
--> 448 df[i] = pd.Series(dtype=d, index=index)
449 df.columns = columns
450 return df
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
254 data = data._data
255 elif is_dict_like(data):
--> 256 data, index = self._init_dict(data, index, dtype)
257 dtype = None
258 copy = False
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in _init_dict(self, data, index, dtype)
347 # TODO: passing np.float64 to not break anything yet. See GH-17261
348 s = create_series_with_explicit_dtype(
--> 349 values, index=keys, dtype=dtype, dtype_if_empty=np.float64
350 )
351
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in create_series_with_explicit_dtype(data, index, dtype, name, copy, fastpath, dtype_if_empty)
623 dtype = dtype_if_empty
624 return Series(
--> 625 data=data, index=index, dtype=dtype, name=name, copy=copy, fastpath=fastpath
626 )
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
303 data = data.copy()
304 else:
--> 305 data = sanitize_array(data, index, dtype, copy, raise_cast_failure=True)
306
307 data = SingleBlockManager(data, index, fastpath=True)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in sanitize_array(data, index, dtype, copy, raise_cast_failure)
447 subarr = _try_cast(arr, dtype, copy, raise_cast_failure)
448 else:
--> 449 subarr = _try_cast(data, dtype, copy, raise_cast_failure)
450
451 # scalar like, GH
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
560 dtype = cast(ExtensionDtype, dtype)
561 array_type = dtype.construct_array_type()._from_sequence
--> 562 subarr = array_type(arr, dtype=dtype, copy=copy)
563 elif dtype is not None and raise_cast_failure:
564 raise
~/Workspace/mars/mars/dataframe/arrays.py in _from_sequence(cls, scalars, dtype, copy)
253 def _from_sequence(cls, scalars, dtype=None, copy=False):
254 if not hasattr(scalars, 'dtype'):
--> 255 ret = np.empty(len(scalars), dtype=object)
256 for i, s in enumerate(scalars):
257 ret[i] = s
TypeError: object of type 'pyarrow.lib.NullScalar' has no len()
|
TypeError
|
def nbytes(self) -> int:
if self._use_arrow:
return sum(
x.size
for chunk in self._arrow_array.chunks
for x in chunk.buffers()
if x is not None
)
else:
return self._ndarray.nbytes
|
def nbytes(self) -> int:
return sum(
x.size
for chunk in self._arrow_array.chunks
for x in chunk.buffers()
if x is not None
)
|
https://github.com/mars-project/mars/issues/1514
|
In [1]: import mars.dataframe as md
In [2]: df = md.DataFrame({'a': [1, 2, 3], 'b': ['a', 'b', 'c']})
In [3]: df['b'] = df['b'].astype(md.ArrowStringDtype())
In [6]: df.groupby('b').count()
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
545 elif not is_extension_array_dtype(subarr):
--> 546 subarr = construct_1d_ndarray_preserving_na(subarr, dtype, copy=copy)
547 except OutOfBoundsDatetime:
~/miniconda3/lib/python3.7/site-packages/pandas/core/dtypes/cast.py in construct_1d_ndarray_preserving_na(values, dtype, copy)
1506 """
-> 1507 subarr = np.array(values, dtype=dtype, copy=copy)
1508
TypeError: Cannot interpret '<mars.dataframe.arrays.ArrowStringDtype object at 0x7f7f81874150>' as a data type
During handling of the above exception, another exception occurred:
TypeError Traceback (most recent call last)
<ipython-input-6-884154032b17> in <module>
----> 1 df.groupby('b').count()
~/Workspace/mars/mars/dataframe/groupby/__init__.py in <lambda>(groupby, **kw)
40 setattr(cls, 'max', lambda groupby, **kw: agg(groupby, 'max', **kw))
41 setattr(cls, 'min', lambda groupby, **kw: agg(groupby, 'min', **kw))
---> 42 setattr(cls, 'count', lambda groupby, **kw: agg(groupby, 'count', **kw))
43 setattr(cls, 'size', lambda groupby, **kw: agg(groupby, 'size', **kw))
44 setattr(cls, 'mean', lambda groupby, **kw: agg(groupby, 'mean', **kw))
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in agg(groupby, func, method, *args, **kwargs)
650 agg_op = DataFrameGroupByAgg(func=func, method=method, raw_func=func,
651 groupby_params=groupby.op.groupby_params)
--> 652 return agg_op(groupby)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in __call__(self, groupby)
201
202 if self.output_types[0] == OutputType.dataframe:
--> 203 return self._call_dataframe(groupby, df)
204 else:
205 return self._call_series(groupby, df)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in _call_dataframe(self, groupby, input_df)
144
145 def _call_dataframe(self, groupby, input_df):
--> 146 grouped = groupby.op.build_mock_groupby()
147 agg_df = grouped.aggregate(self.func)
148
~/Workspace/mars/mars/dataframe/groupby/core.py in build_mock_groupby(self, **kwargs)
98 in_df = self.inputs[0]
99 if self.is_dataframe_obj:
--> 100 empty_df = build_empty_df(in_df.dtypes, index=pd.RangeIndex(2))
101 obj_dtypes = in_df.dtypes[in_df.dtypes == np.dtype('O')]
102 empty_df[obj_dtypes.index] = 'O'
~/Workspace/mars/mars/dataframe/utils.py in build_empty_df(dtypes, index)
446 df = pd.DataFrame(columns=pd.RangeIndex(len(columns)), index=index)
447 for i, d in enumerate(dtypes):
--> 448 df[i] = pd.Series(dtype=d, index=index)
449 df.columns = columns
450 return df
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
254 data = data._data
255 elif is_dict_like(data):
--> 256 data, index = self._init_dict(data, index, dtype)
257 dtype = None
258 copy = False
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in _init_dict(self, data, index, dtype)
347 # TODO: passing np.float64 to not break anything yet. See GH-17261
348 s = create_series_with_explicit_dtype(
--> 349 values, index=keys, dtype=dtype, dtype_if_empty=np.float64
350 )
351
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in create_series_with_explicit_dtype(data, index, dtype, name, copy, fastpath, dtype_if_empty)
623 dtype = dtype_if_empty
624 return Series(
--> 625 data=data, index=index, dtype=dtype, name=name, copy=copy, fastpath=fastpath
626 )
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
303 data = data.copy()
304 else:
--> 305 data = sanitize_array(data, index, dtype, copy, raise_cast_failure=True)
306
307 data = SingleBlockManager(data, index, fastpath=True)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in sanitize_array(data, index, dtype, copy, raise_cast_failure)
447 subarr = _try_cast(arr, dtype, copy, raise_cast_failure)
448 else:
--> 449 subarr = _try_cast(data, dtype, copy, raise_cast_failure)
450
451 # scalar like, GH
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
560 dtype = cast(ExtensionDtype, dtype)
561 array_type = dtype.construct_array_type()._from_sequence
--> 562 subarr = array_type(arr, dtype=dtype, copy=copy)
563 elif dtype is not None and raise_cast_failure:
564 raise
~/Workspace/mars/mars/dataframe/arrays.py in _from_sequence(cls, scalars, dtype, copy)
253 def _from_sequence(cls, scalars, dtype=None, copy=False):
254 if not hasattr(scalars, 'dtype'):
--> 255 ret = np.empty(len(scalars), dtype=object)
256 for i, s in enumerate(scalars):
257 ret[i] = s
TypeError: object of type 'pyarrow.lib.NullScalar' has no len()
|
TypeError
|
def shape(self):
if self._use_arrow:
return (self._arrow_array.length(),)
else:
return self._ndarray.shape
|
def shape(self):
return (self._arrow_array.length(),)
|
https://github.com/mars-project/mars/issues/1514
|
In [1]: import mars.dataframe as md
In [2]: df = md.DataFrame({'a': [1, 2, 3], 'b': ['a', 'b', 'c']})
In [3]: df['b'] = df['b'].astype(md.ArrowStringDtype())
In [6]: df.groupby('b').count()
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
545 elif not is_extension_array_dtype(subarr):
--> 546 subarr = construct_1d_ndarray_preserving_na(subarr, dtype, copy=copy)
547 except OutOfBoundsDatetime:
~/miniconda3/lib/python3.7/site-packages/pandas/core/dtypes/cast.py in construct_1d_ndarray_preserving_na(values, dtype, copy)
1506 """
-> 1507 subarr = np.array(values, dtype=dtype, copy=copy)
1508
TypeError: Cannot interpret '<mars.dataframe.arrays.ArrowStringDtype object at 0x7f7f81874150>' as a data type
During handling of the above exception, another exception occurred:
TypeError Traceback (most recent call last)
<ipython-input-6-884154032b17> in <module>
----> 1 df.groupby('b').count()
~/Workspace/mars/mars/dataframe/groupby/__init__.py in <lambda>(groupby, **kw)
40 setattr(cls, 'max', lambda groupby, **kw: agg(groupby, 'max', **kw))
41 setattr(cls, 'min', lambda groupby, **kw: agg(groupby, 'min', **kw))
---> 42 setattr(cls, 'count', lambda groupby, **kw: agg(groupby, 'count', **kw))
43 setattr(cls, 'size', lambda groupby, **kw: agg(groupby, 'size', **kw))
44 setattr(cls, 'mean', lambda groupby, **kw: agg(groupby, 'mean', **kw))
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in agg(groupby, func, method, *args, **kwargs)
650 agg_op = DataFrameGroupByAgg(func=func, method=method, raw_func=func,
651 groupby_params=groupby.op.groupby_params)
--> 652 return agg_op(groupby)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in __call__(self, groupby)
201
202 if self.output_types[0] == OutputType.dataframe:
--> 203 return self._call_dataframe(groupby, df)
204 else:
205 return self._call_series(groupby, df)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in _call_dataframe(self, groupby, input_df)
144
145 def _call_dataframe(self, groupby, input_df):
--> 146 grouped = groupby.op.build_mock_groupby()
147 agg_df = grouped.aggregate(self.func)
148
~/Workspace/mars/mars/dataframe/groupby/core.py in build_mock_groupby(self, **kwargs)
98 in_df = self.inputs[0]
99 if self.is_dataframe_obj:
--> 100 empty_df = build_empty_df(in_df.dtypes, index=pd.RangeIndex(2))
101 obj_dtypes = in_df.dtypes[in_df.dtypes == np.dtype('O')]
102 empty_df[obj_dtypes.index] = 'O'
~/Workspace/mars/mars/dataframe/utils.py in build_empty_df(dtypes, index)
446 df = pd.DataFrame(columns=pd.RangeIndex(len(columns)), index=index)
447 for i, d in enumerate(dtypes):
--> 448 df[i] = pd.Series(dtype=d, index=index)
449 df.columns = columns
450 return df
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
254 data = data._data
255 elif is_dict_like(data):
--> 256 data, index = self._init_dict(data, index, dtype)
257 dtype = None
258 copy = False
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in _init_dict(self, data, index, dtype)
347 # TODO: passing np.float64 to not break anything yet. See GH-17261
348 s = create_series_with_explicit_dtype(
--> 349 values, index=keys, dtype=dtype, dtype_if_empty=np.float64
350 )
351
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in create_series_with_explicit_dtype(data, index, dtype, name, copy, fastpath, dtype_if_empty)
623 dtype = dtype_if_empty
624 return Series(
--> 625 data=data, index=index, dtype=dtype, name=name, copy=copy, fastpath=fastpath
626 )
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
303 data = data.copy()
304 else:
--> 305 data = sanitize_array(data, index, dtype, copy, raise_cast_failure=True)
306
307 data = SingleBlockManager(data, index, fastpath=True)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in sanitize_array(data, index, dtype, copy, raise_cast_failure)
447 subarr = _try_cast(arr, dtype, copy, raise_cast_failure)
448 else:
--> 449 subarr = _try_cast(data, dtype, copy, raise_cast_failure)
450
451 # scalar like, GH
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
560 dtype = cast(ExtensionDtype, dtype)
561 array_type = dtype.construct_array_type()._from_sequence
--> 562 subarr = array_type(arr, dtype=dtype, copy=copy)
563 elif dtype is not None and raise_cast_failure:
564 raise
~/Workspace/mars/mars/dataframe/arrays.py in _from_sequence(cls, scalars, dtype, copy)
253 def _from_sequence(cls, scalars, dtype=None, copy=False):
254 if not hasattr(scalars, 'dtype'):
--> 255 ret = np.empty(len(scalars), dtype=object)
256 for i, s in enumerate(scalars):
257 ret[i] = s
TypeError: object of type 'pyarrow.lib.NullScalar' has no len()
|
TypeError
|
def memory_usage(self, deep=True) -> int:
if self._use_arrow:
return self.nbytes
else:
return pd.Series(self._ndarray).memory_usage(index=False, deep=deep)
|
def memory_usage(self, deep=True) -> int:
return self.nbytes
|
https://github.com/mars-project/mars/issues/1514
|
In [1]: import mars.dataframe as md
In [2]: df = md.DataFrame({'a': [1, 2, 3], 'b': ['a', 'b', 'c']})
In [3]: df['b'] = df['b'].astype(md.ArrowStringDtype())
In [6]: df.groupby('b').count()
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
545 elif not is_extension_array_dtype(subarr):
--> 546 subarr = construct_1d_ndarray_preserving_na(subarr, dtype, copy=copy)
547 except OutOfBoundsDatetime:
~/miniconda3/lib/python3.7/site-packages/pandas/core/dtypes/cast.py in construct_1d_ndarray_preserving_na(values, dtype, copy)
1506 """
-> 1507 subarr = np.array(values, dtype=dtype, copy=copy)
1508
TypeError: Cannot interpret '<mars.dataframe.arrays.ArrowStringDtype object at 0x7f7f81874150>' as a data type
During handling of the above exception, another exception occurred:
TypeError Traceback (most recent call last)
<ipython-input-6-884154032b17> in <module>
----> 1 df.groupby('b').count()
~/Workspace/mars/mars/dataframe/groupby/__init__.py in <lambda>(groupby, **kw)
40 setattr(cls, 'max', lambda groupby, **kw: agg(groupby, 'max', **kw))
41 setattr(cls, 'min', lambda groupby, **kw: agg(groupby, 'min', **kw))
---> 42 setattr(cls, 'count', lambda groupby, **kw: agg(groupby, 'count', **kw))
43 setattr(cls, 'size', lambda groupby, **kw: agg(groupby, 'size', **kw))
44 setattr(cls, 'mean', lambda groupby, **kw: agg(groupby, 'mean', **kw))
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in agg(groupby, func, method, *args, **kwargs)
650 agg_op = DataFrameGroupByAgg(func=func, method=method, raw_func=func,
651 groupby_params=groupby.op.groupby_params)
--> 652 return agg_op(groupby)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in __call__(self, groupby)
201
202 if self.output_types[0] == OutputType.dataframe:
--> 203 return self._call_dataframe(groupby, df)
204 else:
205 return self._call_series(groupby, df)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in _call_dataframe(self, groupby, input_df)
144
145 def _call_dataframe(self, groupby, input_df):
--> 146 grouped = groupby.op.build_mock_groupby()
147 agg_df = grouped.aggregate(self.func)
148
~/Workspace/mars/mars/dataframe/groupby/core.py in build_mock_groupby(self, **kwargs)
98 in_df = self.inputs[0]
99 if self.is_dataframe_obj:
--> 100 empty_df = build_empty_df(in_df.dtypes, index=pd.RangeIndex(2))
101 obj_dtypes = in_df.dtypes[in_df.dtypes == np.dtype('O')]
102 empty_df[obj_dtypes.index] = 'O'
~/Workspace/mars/mars/dataframe/utils.py in build_empty_df(dtypes, index)
446 df = pd.DataFrame(columns=pd.RangeIndex(len(columns)), index=index)
447 for i, d in enumerate(dtypes):
--> 448 df[i] = pd.Series(dtype=d, index=index)
449 df.columns = columns
450 return df
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
254 data = data._data
255 elif is_dict_like(data):
--> 256 data, index = self._init_dict(data, index, dtype)
257 dtype = None
258 copy = False
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in _init_dict(self, data, index, dtype)
347 # TODO: passing np.float64 to not break anything yet. See GH-17261
348 s = create_series_with_explicit_dtype(
--> 349 values, index=keys, dtype=dtype, dtype_if_empty=np.float64
350 )
351
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in create_series_with_explicit_dtype(data, index, dtype, name, copy, fastpath, dtype_if_empty)
623 dtype = dtype_if_empty
624 return Series(
--> 625 data=data, index=index, dtype=dtype, name=name, copy=copy, fastpath=fastpath
626 )
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
303 data = data.copy()
304 else:
--> 305 data = sanitize_array(data, index, dtype, copy, raise_cast_failure=True)
306
307 data = SingleBlockManager(data, index, fastpath=True)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in sanitize_array(data, index, dtype, copy, raise_cast_failure)
447 subarr = _try_cast(arr, dtype, copy, raise_cast_failure)
448 else:
--> 449 subarr = _try_cast(data, dtype, copy, raise_cast_failure)
450
451 # scalar like, GH
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
560 dtype = cast(ExtensionDtype, dtype)
561 array_type = dtype.construct_array_type()._from_sequence
--> 562 subarr = array_type(arr, dtype=dtype, copy=copy)
563 elif dtype is not None and raise_cast_failure:
564 raise
~/Workspace/mars/mars/dataframe/arrays.py in _from_sequence(cls, scalars, dtype, copy)
253 def _from_sequence(cls, scalars, dtype=None, copy=False):
254 if not hasattr(scalars, 'dtype'):
--> 255 ret = np.empty(len(scalars), dtype=object)
256 for i, s in enumerate(scalars):
257 ret[i] = s
TypeError: object of type 'pyarrow.lib.NullScalar' has no len()
|
TypeError
|
def _from_sequence(cls, scalars, dtype=None, copy=False):
if pa is None or cls._pandas_only():
# pyarrow not installed, just return numpy
ret = np.empty(len(scalars), dtype=object)
ret[:] = scalars
return cls(ret)
if pa_null is not None and isinstance(scalars, type(pa_null)):
scalars = []
elif not hasattr(scalars, "dtype"):
ret = np.empty(len(scalars), dtype=object)
for i, s in enumerate(scalars):
ret[i] = s
scalars = ret
elif isinstance(scalars, cls):
if copy:
scalars = scalars.copy()
return scalars
arrow_array = pa.chunked_array([cls._to_arrow_array(scalars)])
return cls(arrow_array, dtype=dtype, copy=copy)
|
def _from_sequence(cls, scalars, dtype=None, copy=False):
if not hasattr(scalars, "dtype"):
ret = np.empty(len(scalars), dtype=object)
for i, s in enumerate(scalars):
ret[i] = s
scalars = ret
if isinstance(scalars, cls):
if copy:
scalars = scalars.copy()
return scalars
arrow_array = pa.chunked_array([cls._to_arrow_array(scalars)])
return cls(arrow_array, dtype=dtype, copy=copy)
|
https://github.com/mars-project/mars/issues/1514
|
In [1]: import mars.dataframe as md
In [2]: df = md.DataFrame({'a': [1, 2, 3], 'b': ['a', 'b', 'c']})
In [3]: df['b'] = df['b'].astype(md.ArrowStringDtype())
In [6]: df.groupby('b').count()
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
545 elif not is_extension_array_dtype(subarr):
--> 546 subarr = construct_1d_ndarray_preserving_na(subarr, dtype, copy=copy)
547 except OutOfBoundsDatetime:
~/miniconda3/lib/python3.7/site-packages/pandas/core/dtypes/cast.py in construct_1d_ndarray_preserving_na(values, dtype, copy)
1506 """
-> 1507 subarr = np.array(values, dtype=dtype, copy=copy)
1508
TypeError: Cannot interpret '<mars.dataframe.arrays.ArrowStringDtype object at 0x7f7f81874150>' as a data type
During handling of the above exception, another exception occurred:
TypeError Traceback (most recent call last)
<ipython-input-6-884154032b17> in <module>
----> 1 df.groupby('b').count()
~/Workspace/mars/mars/dataframe/groupby/__init__.py in <lambda>(groupby, **kw)
40 setattr(cls, 'max', lambda groupby, **kw: agg(groupby, 'max', **kw))
41 setattr(cls, 'min', lambda groupby, **kw: agg(groupby, 'min', **kw))
---> 42 setattr(cls, 'count', lambda groupby, **kw: agg(groupby, 'count', **kw))
43 setattr(cls, 'size', lambda groupby, **kw: agg(groupby, 'size', **kw))
44 setattr(cls, 'mean', lambda groupby, **kw: agg(groupby, 'mean', **kw))
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in agg(groupby, func, method, *args, **kwargs)
650 agg_op = DataFrameGroupByAgg(func=func, method=method, raw_func=func,
651 groupby_params=groupby.op.groupby_params)
--> 652 return agg_op(groupby)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in __call__(self, groupby)
201
202 if self.output_types[0] == OutputType.dataframe:
--> 203 return self._call_dataframe(groupby, df)
204 else:
205 return self._call_series(groupby, df)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in _call_dataframe(self, groupby, input_df)
144
145 def _call_dataframe(self, groupby, input_df):
--> 146 grouped = groupby.op.build_mock_groupby()
147 agg_df = grouped.aggregate(self.func)
148
~/Workspace/mars/mars/dataframe/groupby/core.py in build_mock_groupby(self, **kwargs)
98 in_df = self.inputs[0]
99 if self.is_dataframe_obj:
--> 100 empty_df = build_empty_df(in_df.dtypes, index=pd.RangeIndex(2))
101 obj_dtypes = in_df.dtypes[in_df.dtypes == np.dtype('O')]
102 empty_df[obj_dtypes.index] = 'O'
~/Workspace/mars/mars/dataframe/utils.py in build_empty_df(dtypes, index)
446 df = pd.DataFrame(columns=pd.RangeIndex(len(columns)), index=index)
447 for i, d in enumerate(dtypes):
--> 448 df[i] = pd.Series(dtype=d, index=index)
449 df.columns = columns
450 return df
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
254 data = data._data
255 elif is_dict_like(data):
--> 256 data, index = self._init_dict(data, index, dtype)
257 dtype = None
258 copy = False
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in _init_dict(self, data, index, dtype)
347 # TODO: passing np.float64 to not break anything yet. See GH-17261
348 s = create_series_with_explicit_dtype(
--> 349 values, index=keys, dtype=dtype, dtype_if_empty=np.float64
350 )
351
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in create_series_with_explicit_dtype(data, index, dtype, name, copy, fastpath, dtype_if_empty)
623 dtype = dtype_if_empty
624 return Series(
--> 625 data=data, index=index, dtype=dtype, name=name, copy=copy, fastpath=fastpath
626 )
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
303 data = data.copy()
304 else:
--> 305 data = sanitize_array(data, index, dtype, copy, raise_cast_failure=True)
306
307 data = SingleBlockManager(data, index, fastpath=True)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in sanitize_array(data, index, dtype, copy, raise_cast_failure)
447 subarr = _try_cast(arr, dtype, copy, raise_cast_failure)
448 else:
--> 449 subarr = _try_cast(data, dtype, copy, raise_cast_failure)
450
451 # scalar like, GH
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
560 dtype = cast(ExtensionDtype, dtype)
561 array_type = dtype.construct_array_type()._from_sequence
--> 562 subarr = array_type(arr, dtype=dtype, copy=copy)
563 elif dtype is not None and raise_cast_failure:
564 raise
~/Workspace/mars/mars/dataframe/arrays.py in _from_sequence(cls, scalars, dtype, copy)
253 def _from_sequence(cls, scalars, dtype=None, copy=False):
254 if not hasattr(scalars, 'dtype'):
--> 255 ret = np.empty(len(scalars), dtype=object)
256 for i, s in enumerate(scalars):
257 ret[i] = s
TypeError: object of type 'pyarrow.lib.NullScalar' has no len()
|
TypeError
|
def __getitem__(self, item):
cls = type(self)
if pa is None or self._force_use_pandas:
# pyarrow not installed
result = self._ndarray[item]
if pd.api.types.is_scalar(item):
return result
else:
return type(self)(result)
has_take = hasattr(self._arrow_array, "take")
if not self._force_use_pandas and has_take:
if pd.api.types.is_scalar(item):
item = item + len(self) if item < 0 else item
return self._post_scalar_getitem(self._arrow_array.take([item]))
elif self._can_process_slice_via_arrow(item):
length = len(self)
start, stop = item.start, item.stop
start = self._process_pos(start, length, True)
stop = self._process_pos(stop, length, False)
return cls(
self._arrow_array.slice(offset=start, length=stop - start),
dtype=self._dtype,
)
elif hasattr(item, "dtype") and np.issubdtype(item.dtype, np.bool_):
return cls(
self._arrow_array.filter(pa.array(item, from_pandas=True)),
dtype=self._dtype,
)
elif hasattr(item, "dtype"):
length = len(self)
item = np.where(item < 0, item + length, item)
return cls(self._arrow_array.take(item), dtype=self._dtype)
array = np.asarray(self._arrow_array.to_pandas())
return cls(array[item], dtype=self._dtype)
|
def __getitem__(self, item):
cls = type(self)
has_take = hasattr(self._arrow_array, "take")
if not self._force_use_pandas and has_take:
if pd.api.types.is_scalar(item):
item = item + len(self) if item < 0 else item
return self._post_scalar_getitem(self._arrow_array.take([item]))
elif self._can_process_slice_via_arrow(item):
length = len(self)
start, stop = item.start, item.stop
start = self._process_pos(start, length, True)
stop = self._process_pos(stop, length, False)
return cls(
self._arrow_array.slice(offset=start, length=stop - start),
dtype=self._dtype,
)
elif hasattr(item, "dtype") and np.issubdtype(item.dtype, np.bool_):
return cls(
self._arrow_array.filter(pa.array(item, from_pandas=True)),
dtype=self._dtype,
)
elif hasattr(item, "dtype"):
length = len(self)
item = np.where(item < 0, item + length, item)
return cls(self._arrow_array.take(item), dtype=self._dtype)
array = np.asarray(self._arrow_array.to_pandas())
return cls(array[item], dtype=self._dtype)
|
https://github.com/mars-project/mars/issues/1514
|
In [1]: import mars.dataframe as md
In [2]: df = md.DataFrame({'a': [1, 2, 3], 'b': ['a', 'b', 'c']})
In [3]: df['b'] = df['b'].astype(md.ArrowStringDtype())
In [6]: df.groupby('b').count()
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
545 elif not is_extension_array_dtype(subarr):
--> 546 subarr = construct_1d_ndarray_preserving_na(subarr, dtype, copy=copy)
547 except OutOfBoundsDatetime:
~/miniconda3/lib/python3.7/site-packages/pandas/core/dtypes/cast.py in construct_1d_ndarray_preserving_na(values, dtype, copy)
1506 """
-> 1507 subarr = np.array(values, dtype=dtype, copy=copy)
1508
TypeError: Cannot interpret '<mars.dataframe.arrays.ArrowStringDtype object at 0x7f7f81874150>' as a data type
During handling of the above exception, another exception occurred:
TypeError Traceback (most recent call last)
<ipython-input-6-884154032b17> in <module>
----> 1 df.groupby('b').count()
~/Workspace/mars/mars/dataframe/groupby/__init__.py in <lambda>(groupby, **kw)
40 setattr(cls, 'max', lambda groupby, **kw: agg(groupby, 'max', **kw))
41 setattr(cls, 'min', lambda groupby, **kw: agg(groupby, 'min', **kw))
---> 42 setattr(cls, 'count', lambda groupby, **kw: agg(groupby, 'count', **kw))
43 setattr(cls, 'size', lambda groupby, **kw: agg(groupby, 'size', **kw))
44 setattr(cls, 'mean', lambda groupby, **kw: agg(groupby, 'mean', **kw))
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in agg(groupby, func, method, *args, **kwargs)
650 agg_op = DataFrameGroupByAgg(func=func, method=method, raw_func=func,
651 groupby_params=groupby.op.groupby_params)
--> 652 return agg_op(groupby)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in __call__(self, groupby)
201
202 if self.output_types[0] == OutputType.dataframe:
--> 203 return self._call_dataframe(groupby, df)
204 else:
205 return self._call_series(groupby, df)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in _call_dataframe(self, groupby, input_df)
144
145 def _call_dataframe(self, groupby, input_df):
--> 146 grouped = groupby.op.build_mock_groupby()
147 agg_df = grouped.aggregate(self.func)
148
~/Workspace/mars/mars/dataframe/groupby/core.py in build_mock_groupby(self, **kwargs)
98 in_df = self.inputs[0]
99 if self.is_dataframe_obj:
--> 100 empty_df = build_empty_df(in_df.dtypes, index=pd.RangeIndex(2))
101 obj_dtypes = in_df.dtypes[in_df.dtypes == np.dtype('O')]
102 empty_df[obj_dtypes.index] = 'O'
~/Workspace/mars/mars/dataframe/utils.py in build_empty_df(dtypes, index)
446 df = pd.DataFrame(columns=pd.RangeIndex(len(columns)), index=index)
447 for i, d in enumerate(dtypes):
--> 448 df[i] = pd.Series(dtype=d, index=index)
449 df.columns = columns
450 return df
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
254 data = data._data
255 elif is_dict_like(data):
--> 256 data, index = self._init_dict(data, index, dtype)
257 dtype = None
258 copy = False
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in _init_dict(self, data, index, dtype)
347 # TODO: passing np.float64 to not break anything yet. See GH-17261
348 s = create_series_with_explicit_dtype(
--> 349 values, index=keys, dtype=dtype, dtype_if_empty=np.float64
350 )
351
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in create_series_with_explicit_dtype(data, index, dtype, name, copy, fastpath, dtype_if_empty)
623 dtype = dtype_if_empty
624 return Series(
--> 625 data=data, index=index, dtype=dtype, name=name, copy=copy, fastpath=fastpath
626 )
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
303 data = data.copy()
304 else:
--> 305 data = sanitize_array(data, index, dtype, copy, raise_cast_failure=True)
306
307 data = SingleBlockManager(data, index, fastpath=True)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in sanitize_array(data, index, dtype, copy, raise_cast_failure)
447 subarr = _try_cast(arr, dtype, copy, raise_cast_failure)
448 else:
--> 449 subarr = _try_cast(data, dtype, copy, raise_cast_failure)
450
451 # scalar like, GH
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
560 dtype = cast(ExtensionDtype, dtype)
561 array_type = dtype.construct_array_type()._from_sequence
--> 562 subarr = array_type(arr, dtype=dtype, copy=copy)
563 elif dtype is not None and raise_cast_failure:
564 raise
~/Workspace/mars/mars/dataframe/arrays.py in _from_sequence(cls, scalars, dtype, copy)
253 def _from_sequence(cls, scalars, dtype=None, copy=False):
254 if not hasattr(scalars, 'dtype'):
--> 255 ret = np.empty(len(scalars), dtype=object)
256 for i, s in enumerate(scalars):
257 ret[i] = s
TypeError: object of type 'pyarrow.lib.NullScalar' has no len()
|
TypeError
|
def _concat_same_type(cls, to_concat: Sequence["ArrowArray"]) -> "ArrowArray":
if pa is None or cls._pandas_only():
# pyarrow not installed
return cls(np.concatenate([x._array for x in to_concat]))
chunks = list(
itertools.chain.from_iterable(x._arrow_array.chunks for x in to_concat)
)
if len(chunks) == 0:
chunks = [pa.array([], type=to_concat[0].dtype.arrow_type)]
return cls(pa.chunked_array(chunks))
|
def _concat_same_type(cls, to_concat: Sequence["ArrowArray"]) -> "ArrowArray":
chunks = list(
itertools.chain.from_iterable(x._arrow_array.chunks for x in to_concat)
)
if len(chunks) == 0:
chunks = [pa.array([], type=to_concat[0].dtype.arrow_type)]
return cls(pa.chunked_array(chunks))
|
https://github.com/mars-project/mars/issues/1514
|
In [1]: import mars.dataframe as md
In [2]: df = md.DataFrame({'a': [1, 2, 3], 'b': ['a', 'b', 'c']})
In [3]: df['b'] = df['b'].astype(md.ArrowStringDtype())
In [6]: df.groupby('b').count()
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
545 elif not is_extension_array_dtype(subarr):
--> 546 subarr = construct_1d_ndarray_preserving_na(subarr, dtype, copy=copy)
547 except OutOfBoundsDatetime:
~/miniconda3/lib/python3.7/site-packages/pandas/core/dtypes/cast.py in construct_1d_ndarray_preserving_na(values, dtype, copy)
1506 """
-> 1507 subarr = np.array(values, dtype=dtype, copy=copy)
1508
TypeError: Cannot interpret '<mars.dataframe.arrays.ArrowStringDtype object at 0x7f7f81874150>' as a data type
During handling of the above exception, another exception occurred:
TypeError Traceback (most recent call last)
<ipython-input-6-884154032b17> in <module>
----> 1 df.groupby('b').count()
~/Workspace/mars/mars/dataframe/groupby/__init__.py in <lambda>(groupby, **kw)
40 setattr(cls, 'max', lambda groupby, **kw: agg(groupby, 'max', **kw))
41 setattr(cls, 'min', lambda groupby, **kw: agg(groupby, 'min', **kw))
---> 42 setattr(cls, 'count', lambda groupby, **kw: agg(groupby, 'count', **kw))
43 setattr(cls, 'size', lambda groupby, **kw: agg(groupby, 'size', **kw))
44 setattr(cls, 'mean', lambda groupby, **kw: agg(groupby, 'mean', **kw))
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in agg(groupby, func, method, *args, **kwargs)
650 agg_op = DataFrameGroupByAgg(func=func, method=method, raw_func=func,
651 groupby_params=groupby.op.groupby_params)
--> 652 return agg_op(groupby)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in __call__(self, groupby)
201
202 if self.output_types[0] == OutputType.dataframe:
--> 203 return self._call_dataframe(groupby, df)
204 else:
205 return self._call_series(groupby, df)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in _call_dataframe(self, groupby, input_df)
144
145 def _call_dataframe(self, groupby, input_df):
--> 146 grouped = groupby.op.build_mock_groupby()
147 agg_df = grouped.aggregate(self.func)
148
~/Workspace/mars/mars/dataframe/groupby/core.py in build_mock_groupby(self, **kwargs)
98 in_df = self.inputs[0]
99 if self.is_dataframe_obj:
--> 100 empty_df = build_empty_df(in_df.dtypes, index=pd.RangeIndex(2))
101 obj_dtypes = in_df.dtypes[in_df.dtypes == np.dtype('O')]
102 empty_df[obj_dtypes.index] = 'O'
~/Workspace/mars/mars/dataframe/utils.py in build_empty_df(dtypes, index)
446 df = pd.DataFrame(columns=pd.RangeIndex(len(columns)), index=index)
447 for i, d in enumerate(dtypes):
--> 448 df[i] = pd.Series(dtype=d, index=index)
449 df.columns = columns
450 return df
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
254 data = data._data
255 elif is_dict_like(data):
--> 256 data, index = self._init_dict(data, index, dtype)
257 dtype = None
258 copy = False
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in _init_dict(self, data, index, dtype)
347 # TODO: passing np.float64 to not break anything yet. See GH-17261
348 s = create_series_with_explicit_dtype(
--> 349 values, index=keys, dtype=dtype, dtype_if_empty=np.float64
350 )
351
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in create_series_with_explicit_dtype(data, index, dtype, name, copy, fastpath, dtype_if_empty)
623 dtype = dtype_if_empty
624 return Series(
--> 625 data=data, index=index, dtype=dtype, name=name, copy=copy, fastpath=fastpath
626 )
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
303 data = data.copy()
304 else:
--> 305 data = sanitize_array(data, index, dtype, copy, raise_cast_failure=True)
306
307 data = SingleBlockManager(data, index, fastpath=True)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in sanitize_array(data, index, dtype, copy, raise_cast_failure)
447 subarr = _try_cast(arr, dtype, copy, raise_cast_failure)
448 else:
--> 449 subarr = _try_cast(data, dtype, copy, raise_cast_failure)
450
451 # scalar like, GH
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
560 dtype = cast(ExtensionDtype, dtype)
561 array_type = dtype.construct_array_type()._from_sequence
--> 562 subarr = array_type(arr, dtype=dtype, copy=copy)
563 elif dtype is not None and raise_cast_failure:
564 raise
~/Workspace/mars/mars/dataframe/arrays.py in _from_sequence(cls, scalars, dtype, copy)
253 def _from_sequence(cls, scalars, dtype=None, copy=False):
254 if not hasattr(scalars, 'dtype'):
--> 255 ret = np.empty(len(scalars), dtype=object)
256 for i, s in enumerate(scalars):
257 ret[i] = s
TypeError: object of type 'pyarrow.lib.NullScalar' has no len()
|
TypeError
|
def __len__(self):
return len(self._array)
|
def __len__(self):
return len(self._arrow_array)
|
https://github.com/mars-project/mars/issues/1514
|
In [1]: import mars.dataframe as md
In [2]: df = md.DataFrame({'a': [1, 2, 3], 'b': ['a', 'b', 'c']})
In [3]: df['b'] = df['b'].astype(md.ArrowStringDtype())
In [6]: df.groupby('b').count()
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
545 elif not is_extension_array_dtype(subarr):
--> 546 subarr = construct_1d_ndarray_preserving_na(subarr, dtype, copy=copy)
547 except OutOfBoundsDatetime:
~/miniconda3/lib/python3.7/site-packages/pandas/core/dtypes/cast.py in construct_1d_ndarray_preserving_na(values, dtype, copy)
1506 """
-> 1507 subarr = np.array(values, dtype=dtype, copy=copy)
1508
TypeError: Cannot interpret '<mars.dataframe.arrays.ArrowStringDtype object at 0x7f7f81874150>' as a data type
During handling of the above exception, another exception occurred:
TypeError Traceback (most recent call last)
<ipython-input-6-884154032b17> in <module>
----> 1 df.groupby('b').count()
~/Workspace/mars/mars/dataframe/groupby/__init__.py in <lambda>(groupby, **kw)
40 setattr(cls, 'max', lambda groupby, **kw: agg(groupby, 'max', **kw))
41 setattr(cls, 'min', lambda groupby, **kw: agg(groupby, 'min', **kw))
---> 42 setattr(cls, 'count', lambda groupby, **kw: agg(groupby, 'count', **kw))
43 setattr(cls, 'size', lambda groupby, **kw: agg(groupby, 'size', **kw))
44 setattr(cls, 'mean', lambda groupby, **kw: agg(groupby, 'mean', **kw))
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in agg(groupby, func, method, *args, **kwargs)
650 agg_op = DataFrameGroupByAgg(func=func, method=method, raw_func=func,
651 groupby_params=groupby.op.groupby_params)
--> 652 return agg_op(groupby)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in __call__(self, groupby)
201
202 if self.output_types[0] == OutputType.dataframe:
--> 203 return self._call_dataframe(groupby, df)
204 else:
205 return self._call_series(groupby, df)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in _call_dataframe(self, groupby, input_df)
144
145 def _call_dataframe(self, groupby, input_df):
--> 146 grouped = groupby.op.build_mock_groupby()
147 agg_df = grouped.aggregate(self.func)
148
~/Workspace/mars/mars/dataframe/groupby/core.py in build_mock_groupby(self, **kwargs)
98 in_df = self.inputs[0]
99 if self.is_dataframe_obj:
--> 100 empty_df = build_empty_df(in_df.dtypes, index=pd.RangeIndex(2))
101 obj_dtypes = in_df.dtypes[in_df.dtypes == np.dtype('O')]
102 empty_df[obj_dtypes.index] = 'O'
~/Workspace/mars/mars/dataframe/utils.py in build_empty_df(dtypes, index)
446 df = pd.DataFrame(columns=pd.RangeIndex(len(columns)), index=index)
447 for i, d in enumerate(dtypes):
--> 448 df[i] = pd.Series(dtype=d, index=index)
449 df.columns = columns
450 return df
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
254 data = data._data
255 elif is_dict_like(data):
--> 256 data, index = self._init_dict(data, index, dtype)
257 dtype = None
258 copy = False
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in _init_dict(self, data, index, dtype)
347 # TODO: passing np.float64 to not break anything yet. See GH-17261
348 s = create_series_with_explicit_dtype(
--> 349 values, index=keys, dtype=dtype, dtype_if_empty=np.float64
350 )
351
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in create_series_with_explicit_dtype(data, index, dtype, name, copy, fastpath, dtype_if_empty)
623 dtype = dtype_if_empty
624 return Series(
--> 625 data=data, index=index, dtype=dtype, name=name, copy=copy, fastpath=fastpath
626 )
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
303 data = data.copy()
304 else:
--> 305 data = sanitize_array(data, index, dtype, copy, raise_cast_failure=True)
306
307 data = SingleBlockManager(data, index, fastpath=True)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in sanitize_array(data, index, dtype, copy, raise_cast_failure)
447 subarr = _try_cast(arr, dtype, copy, raise_cast_failure)
448 else:
--> 449 subarr = _try_cast(data, dtype, copy, raise_cast_failure)
450
451 # scalar like, GH
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
560 dtype = cast(ExtensionDtype, dtype)
561 array_type = dtype.construct_array_type()._from_sequence
--> 562 subarr = array_type(arr, dtype=dtype, copy=copy)
563 elif dtype is not None and raise_cast_failure:
564 raise
~/Workspace/mars/mars/dataframe/arrays.py in _from_sequence(cls, scalars, dtype, copy)
253 def _from_sequence(cls, scalars, dtype=None, copy=False):
254 if not hasattr(scalars, 'dtype'):
--> 255 ret = np.empty(len(scalars), dtype=object)
256 for i, s in enumerate(scalars):
257 ret[i] = s
TypeError: object of type 'pyarrow.lib.NullScalar' has no len()
|
TypeError
|
def to_numpy(self, dtype=None, copy=False, na_value=lib.no_default):
if self._use_arrow:
array = np.asarray(self._arrow_array.to_pandas())
else:
array = self._ndarray
if copy or na_value is not lib.no_default:
array = array.copy()
if na_value is not lib.no_default:
array[self.isna()] = na_value
return array
|
def to_numpy(self, dtype=None, copy=False, na_value=lib.no_default):
array = np.asarray(self._arrow_array.to_pandas())
if copy or na_value is not lib.no_default:
array = array.copy()
if na_value is not lib.no_default:
array[self.isna()] = na_value
return array
|
https://github.com/mars-project/mars/issues/1514
|
In [1]: import mars.dataframe as md
In [2]: df = md.DataFrame({'a': [1, 2, 3], 'b': ['a', 'b', 'c']})
In [3]: df['b'] = df['b'].astype(md.ArrowStringDtype())
In [6]: df.groupby('b').count()
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
545 elif not is_extension_array_dtype(subarr):
--> 546 subarr = construct_1d_ndarray_preserving_na(subarr, dtype, copy=copy)
547 except OutOfBoundsDatetime:
~/miniconda3/lib/python3.7/site-packages/pandas/core/dtypes/cast.py in construct_1d_ndarray_preserving_na(values, dtype, copy)
1506 """
-> 1507 subarr = np.array(values, dtype=dtype, copy=copy)
1508
TypeError: Cannot interpret '<mars.dataframe.arrays.ArrowStringDtype object at 0x7f7f81874150>' as a data type
During handling of the above exception, another exception occurred:
TypeError Traceback (most recent call last)
<ipython-input-6-884154032b17> in <module>
----> 1 df.groupby('b').count()
~/Workspace/mars/mars/dataframe/groupby/__init__.py in <lambda>(groupby, **kw)
40 setattr(cls, 'max', lambda groupby, **kw: agg(groupby, 'max', **kw))
41 setattr(cls, 'min', lambda groupby, **kw: agg(groupby, 'min', **kw))
---> 42 setattr(cls, 'count', lambda groupby, **kw: agg(groupby, 'count', **kw))
43 setattr(cls, 'size', lambda groupby, **kw: agg(groupby, 'size', **kw))
44 setattr(cls, 'mean', lambda groupby, **kw: agg(groupby, 'mean', **kw))
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in agg(groupby, func, method, *args, **kwargs)
650 agg_op = DataFrameGroupByAgg(func=func, method=method, raw_func=func,
651 groupby_params=groupby.op.groupby_params)
--> 652 return agg_op(groupby)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in __call__(self, groupby)
201
202 if self.output_types[0] == OutputType.dataframe:
--> 203 return self._call_dataframe(groupby, df)
204 else:
205 return self._call_series(groupby, df)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in _call_dataframe(self, groupby, input_df)
144
145 def _call_dataframe(self, groupby, input_df):
--> 146 grouped = groupby.op.build_mock_groupby()
147 agg_df = grouped.aggregate(self.func)
148
~/Workspace/mars/mars/dataframe/groupby/core.py in build_mock_groupby(self, **kwargs)
98 in_df = self.inputs[0]
99 if self.is_dataframe_obj:
--> 100 empty_df = build_empty_df(in_df.dtypes, index=pd.RangeIndex(2))
101 obj_dtypes = in_df.dtypes[in_df.dtypes == np.dtype('O')]
102 empty_df[obj_dtypes.index] = 'O'
~/Workspace/mars/mars/dataframe/utils.py in build_empty_df(dtypes, index)
446 df = pd.DataFrame(columns=pd.RangeIndex(len(columns)), index=index)
447 for i, d in enumerate(dtypes):
--> 448 df[i] = pd.Series(dtype=d, index=index)
449 df.columns = columns
450 return df
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
254 data = data._data
255 elif is_dict_like(data):
--> 256 data, index = self._init_dict(data, index, dtype)
257 dtype = None
258 copy = False
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in _init_dict(self, data, index, dtype)
347 # TODO: passing np.float64 to not break anything yet. See GH-17261
348 s = create_series_with_explicit_dtype(
--> 349 values, index=keys, dtype=dtype, dtype_if_empty=np.float64
350 )
351
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in create_series_with_explicit_dtype(data, index, dtype, name, copy, fastpath, dtype_if_empty)
623 dtype = dtype_if_empty
624 return Series(
--> 625 data=data, index=index, dtype=dtype, name=name, copy=copy, fastpath=fastpath
626 )
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
303 data = data.copy()
304 else:
--> 305 data = sanitize_array(data, index, dtype, copy, raise_cast_failure=True)
306
307 data = SingleBlockManager(data, index, fastpath=True)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in sanitize_array(data, index, dtype, copy, raise_cast_failure)
447 subarr = _try_cast(arr, dtype, copy, raise_cast_failure)
448 else:
--> 449 subarr = _try_cast(data, dtype, copy, raise_cast_failure)
450
451 # scalar like, GH
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
560 dtype = cast(ExtensionDtype, dtype)
561 array_type = dtype.construct_array_type()._from_sequence
--> 562 subarr = array_type(arr, dtype=dtype, copy=copy)
563 elif dtype is not None and raise_cast_failure:
564 raise
~/Workspace/mars/mars/dataframe/arrays.py in _from_sequence(cls, scalars, dtype, copy)
253 def _from_sequence(cls, scalars, dtype=None, copy=False):
254 if not hasattr(scalars, 'dtype'):
--> 255 ret = np.empty(len(scalars), dtype=object)
256 for i, s in enumerate(scalars):
257 ret[i] = s
TypeError: object of type 'pyarrow.lib.NullScalar' has no len()
|
TypeError
|
def fillna(self, value=None, method=None, limit=None):
cls = type(self)
if pa is None or self._force_use_pandas:
# pyarrow not installed
return cls(
pd.Series(self.to_numpy()).fillna(value=value, method=method, limit=limit)
)
chunks = []
for chunk_array in self._arrow_array.chunks:
array = chunk_array.to_pandas()
if method is None:
result_array = self._array_fillna(array, value)
else:
result_array = array.fillna(value=value, method=method, limit=limit)
chunks.append(pa.array(result_array, from_pandas=True))
return cls(pa.chunked_array(chunks), dtype=self._dtype)
|
def fillna(self, value=None, method=None, limit=None):
chunks = []
for chunk_array in self._arrow_array.chunks:
array = chunk_array.to_pandas()
if method is None:
result_array = self._array_fillna(array, value)
else:
result_array = array.fillna(value=value, method=method, limit=limit)
chunks.append(pa.array(result_array, from_pandas=True))
return type(self)(pa.chunked_array(chunks), dtype=self._dtype)
|
https://github.com/mars-project/mars/issues/1514
|
In [1]: import mars.dataframe as md
In [2]: df = md.DataFrame({'a': [1, 2, 3], 'b': ['a', 'b', 'c']})
In [3]: df['b'] = df['b'].astype(md.ArrowStringDtype())
In [6]: df.groupby('b').count()
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
545 elif not is_extension_array_dtype(subarr):
--> 546 subarr = construct_1d_ndarray_preserving_na(subarr, dtype, copy=copy)
547 except OutOfBoundsDatetime:
~/miniconda3/lib/python3.7/site-packages/pandas/core/dtypes/cast.py in construct_1d_ndarray_preserving_na(values, dtype, copy)
1506 """
-> 1507 subarr = np.array(values, dtype=dtype, copy=copy)
1508
TypeError: Cannot interpret '<mars.dataframe.arrays.ArrowStringDtype object at 0x7f7f81874150>' as a data type
During handling of the above exception, another exception occurred:
TypeError Traceback (most recent call last)
<ipython-input-6-884154032b17> in <module>
----> 1 df.groupby('b').count()
~/Workspace/mars/mars/dataframe/groupby/__init__.py in <lambda>(groupby, **kw)
40 setattr(cls, 'max', lambda groupby, **kw: agg(groupby, 'max', **kw))
41 setattr(cls, 'min', lambda groupby, **kw: agg(groupby, 'min', **kw))
---> 42 setattr(cls, 'count', lambda groupby, **kw: agg(groupby, 'count', **kw))
43 setattr(cls, 'size', lambda groupby, **kw: agg(groupby, 'size', **kw))
44 setattr(cls, 'mean', lambda groupby, **kw: agg(groupby, 'mean', **kw))
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in agg(groupby, func, method, *args, **kwargs)
650 agg_op = DataFrameGroupByAgg(func=func, method=method, raw_func=func,
651 groupby_params=groupby.op.groupby_params)
--> 652 return agg_op(groupby)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in __call__(self, groupby)
201
202 if self.output_types[0] == OutputType.dataframe:
--> 203 return self._call_dataframe(groupby, df)
204 else:
205 return self._call_series(groupby, df)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in _call_dataframe(self, groupby, input_df)
144
145 def _call_dataframe(self, groupby, input_df):
--> 146 grouped = groupby.op.build_mock_groupby()
147 agg_df = grouped.aggregate(self.func)
148
~/Workspace/mars/mars/dataframe/groupby/core.py in build_mock_groupby(self, **kwargs)
98 in_df = self.inputs[0]
99 if self.is_dataframe_obj:
--> 100 empty_df = build_empty_df(in_df.dtypes, index=pd.RangeIndex(2))
101 obj_dtypes = in_df.dtypes[in_df.dtypes == np.dtype('O')]
102 empty_df[obj_dtypes.index] = 'O'
~/Workspace/mars/mars/dataframe/utils.py in build_empty_df(dtypes, index)
446 df = pd.DataFrame(columns=pd.RangeIndex(len(columns)), index=index)
447 for i, d in enumerate(dtypes):
--> 448 df[i] = pd.Series(dtype=d, index=index)
449 df.columns = columns
450 return df
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
254 data = data._data
255 elif is_dict_like(data):
--> 256 data, index = self._init_dict(data, index, dtype)
257 dtype = None
258 copy = False
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in _init_dict(self, data, index, dtype)
347 # TODO: passing np.float64 to not break anything yet. See GH-17261
348 s = create_series_with_explicit_dtype(
--> 349 values, index=keys, dtype=dtype, dtype_if_empty=np.float64
350 )
351
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in create_series_with_explicit_dtype(data, index, dtype, name, copy, fastpath, dtype_if_empty)
623 dtype = dtype_if_empty
624 return Series(
--> 625 data=data, index=index, dtype=dtype, name=name, copy=copy, fastpath=fastpath
626 )
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
303 data = data.copy()
304 else:
--> 305 data = sanitize_array(data, index, dtype, copy, raise_cast_failure=True)
306
307 data = SingleBlockManager(data, index, fastpath=True)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in sanitize_array(data, index, dtype, copy, raise_cast_failure)
447 subarr = _try_cast(arr, dtype, copy, raise_cast_failure)
448 else:
--> 449 subarr = _try_cast(data, dtype, copy, raise_cast_failure)
450
451 # scalar like, GH
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
560 dtype = cast(ExtensionDtype, dtype)
561 array_type = dtype.construct_array_type()._from_sequence
--> 562 subarr = array_type(arr, dtype=dtype, copy=copy)
563 elif dtype is not None and raise_cast_failure:
564 raise
~/Workspace/mars/mars/dataframe/arrays.py in _from_sequence(cls, scalars, dtype, copy)
253 def _from_sequence(cls, scalars, dtype=None, copy=False):
254 if not hasattr(scalars, 'dtype'):
--> 255 ret = np.empty(len(scalars), dtype=object)
256 for i, s in enumerate(scalars):
257 ret[i] = s
TypeError: object of type 'pyarrow.lib.NullScalar' has no len()
|
TypeError
|
def astype(self, dtype, copy=True):
dtype = pandas_dtype(dtype)
if isinstance(dtype, ArrowStringDtype):
if copy:
return self.copy()
return self
if pa is None or self._force_use_pandas:
# pyarrow not installed
if isinstance(dtype, ArrowDtype):
dtype = dtype.type
return type(self)(pd.Series(self.to_numpy()).astype(dtype, copy=copy))
# try to slice 1 record to get the result dtype
test_array = self._arrow_array.slice(0, 1).to_pandas()
test_result_array = test_array.astype(dtype).array
result_array = type(test_result_array)(
np.full(
self.shape,
test_result_array.dtype.na_value,
dtype=np.asarray(test_result_array).dtype,
)
)
start = 0
# use chunks to do astype
for chunk_array in self._arrow_array.chunks:
result_array[start : start + len(chunk_array)] = (
chunk_array.to_pandas().astype(dtype).array
)
start += len(chunk_array)
return result_array
|
def astype(self, dtype, copy=True):
dtype = pandas_dtype(dtype)
if isinstance(dtype, ArrowStringDtype):
if copy:
return self.copy()
return self
# try to slice 1 record to get the result dtype
test_array = self._arrow_array.slice(0, 1).to_pandas()
test_result_array = test_array.astype(dtype).array
result_array = type(test_result_array)(
np.full(
self.shape,
test_result_array.dtype.na_value,
dtype=np.asarray(test_result_array).dtype,
)
)
start = 0
# use chunks to do astype
for chunk_array in self._arrow_array.chunks:
result_array[start : start + len(chunk_array)] = (
chunk_array.to_pandas().astype(dtype).array
)
start += len(chunk_array)
return result_array
|
https://github.com/mars-project/mars/issues/1514
|
In [1]: import mars.dataframe as md
In [2]: df = md.DataFrame({'a': [1, 2, 3], 'b': ['a', 'b', 'c']})
In [3]: df['b'] = df['b'].astype(md.ArrowStringDtype())
In [6]: df.groupby('b').count()
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
545 elif not is_extension_array_dtype(subarr):
--> 546 subarr = construct_1d_ndarray_preserving_na(subarr, dtype, copy=copy)
547 except OutOfBoundsDatetime:
~/miniconda3/lib/python3.7/site-packages/pandas/core/dtypes/cast.py in construct_1d_ndarray_preserving_na(values, dtype, copy)
1506 """
-> 1507 subarr = np.array(values, dtype=dtype, copy=copy)
1508
TypeError: Cannot interpret '<mars.dataframe.arrays.ArrowStringDtype object at 0x7f7f81874150>' as a data type
During handling of the above exception, another exception occurred:
TypeError Traceback (most recent call last)
<ipython-input-6-884154032b17> in <module>
----> 1 df.groupby('b').count()
~/Workspace/mars/mars/dataframe/groupby/__init__.py in <lambda>(groupby, **kw)
40 setattr(cls, 'max', lambda groupby, **kw: agg(groupby, 'max', **kw))
41 setattr(cls, 'min', lambda groupby, **kw: agg(groupby, 'min', **kw))
---> 42 setattr(cls, 'count', lambda groupby, **kw: agg(groupby, 'count', **kw))
43 setattr(cls, 'size', lambda groupby, **kw: agg(groupby, 'size', **kw))
44 setattr(cls, 'mean', lambda groupby, **kw: agg(groupby, 'mean', **kw))
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in agg(groupby, func, method, *args, **kwargs)
650 agg_op = DataFrameGroupByAgg(func=func, method=method, raw_func=func,
651 groupby_params=groupby.op.groupby_params)
--> 652 return agg_op(groupby)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in __call__(self, groupby)
201
202 if self.output_types[0] == OutputType.dataframe:
--> 203 return self._call_dataframe(groupby, df)
204 else:
205 return self._call_series(groupby, df)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in _call_dataframe(self, groupby, input_df)
144
145 def _call_dataframe(self, groupby, input_df):
--> 146 grouped = groupby.op.build_mock_groupby()
147 agg_df = grouped.aggregate(self.func)
148
~/Workspace/mars/mars/dataframe/groupby/core.py in build_mock_groupby(self, **kwargs)
98 in_df = self.inputs[0]
99 if self.is_dataframe_obj:
--> 100 empty_df = build_empty_df(in_df.dtypes, index=pd.RangeIndex(2))
101 obj_dtypes = in_df.dtypes[in_df.dtypes == np.dtype('O')]
102 empty_df[obj_dtypes.index] = 'O'
~/Workspace/mars/mars/dataframe/utils.py in build_empty_df(dtypes, index)
446 df = pd.DataFrame(columns=pd.RangeIndex(len(columns)), index=index)
447 for i, d in enumerate(dtypes):
--> 448 df[i] = pd.Series(dtype=d, index=index)
449 df.columns = columns
450 return df
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
254 data = data._data
255 elif is_dict_like(data):
--> 256 data, index = self._init_dict(data, index, dtype)
257 dtype = None
258 copy = False
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in _init_dict(self, data, index, dtype)
347 # TODO: passing np.float64 to not break anything yet. See GH-17261
348 s = create_series_with_explicit_dtype(
--> 349 values, index=keys, dtype=dtype, dtype_if_empty=np.float64
350 )
351
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in create_series_with_explicit_dtype(data, index, dtype, name, copy, fastpath, dtype_if_empty)
623 dtype = dtype_if_empty
624 return Series(
--> 625 data=data, index=index, dtype=dtype, name=name, copy=copy, fastpath=fastpath
626 )
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
303 data = data.copy()
304 else:
--> 305 data = sanitize_array(data, index, dtype, copy, raise_cast_failure=True)
306
307 data = SingleBlockManager(data, index, fastpath=True)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in sanitize_array(data, index, dtype, copy, raise_cast_failure)
447 subarr = _try_cast(arr, dtype, copy, raise_cast_failure)
448 else:
--> 449 subarr = _try_cast(data, dtype, copy, raise_cast_failure)
450
451 # scalar like, GH
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
560 dtype = cast(ExtensionDtype, dtype)
561 array_type = dtype.construct_array_type()._from_sequence
--> 562 subarr = array_type(arr, dtype=dtype, copy=copy)
563 elif dtype is not None and raise_cast_failure:
564 raise
~/Workspace/mars/mars/dataframe/arrays.py in _from_sequence(cls, scalars, dtype, copy)
253 def _from_sequence(cls, scalars, dtype=None, copy=False):
254 if not hasattr(scalars, 'dtype'):
--> 255 ret = np.empty(len(scalars), dtype=object)
256 for i, s in enumerate(scalars):
257 ret[i] = s
TypeError: object of type 'pyarrow.lib.NullScalar' has no len()
|
TypeError
|
def isna(self):
if (
not self._force_use_pandas
and self._use_arrow
and hasattr(self._arrow_array, "is_null")
):
return self._arrow_array.is_null().to_pandas().to_numpy()
elif self._use_arrow:
return pd.isna(self._arrow_array.to_pandas()).to_numpy()
else:
return pd.isna(self._ndarray)
|
def isna(self):
if not self._force_use_pandas and hasattr(self._arrow_array, "is_null"):
return self._arrow_array.is_null().to_pandas().to_numpy()
else:
return pd.isna(self._arrow_array.to_pandas()).to_numpy()
|
https://github.com/mars-project/mars/issues/1514
|
In [1]: import mars.dataframe as md
In [2]: df = md.DataFrame({'a': [1, 2, 3], 'b': ['a', 'b', 'c']})
In [3]: df['b'] = df['b'].astype(md.ArrowStringDtype())
In [6]: df.groupby('b').count()
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
545 elif not is_extension_array_dtype(subarr):
--> 546 subarr = construct_1d_ndarray_preserving_na(subarr, dtype, copy=copy)
547 except OutOfBoundsDatetime:
~/miniconda3/lib/python3.7/site-packages/pandas/core/dtypes/cast.py in construct_1d_ndarray_preserving_na(values, dtype, copy)
1506 """
-> 1507 subarr = np.array(values, dtype=dtype, copy=copy)
1508
TypeError: Cannot interpret '<mars.dataframe.arrays.ArrowStringDtype object at 0x7f7f81874150>' as a data type
During handling of the above exception, another exception occurred:
TypeError Traceback (most recent call last)
<ipython-input-6-884154032b17> in <module>
----> 1 df.groupby('b').count()
~/Workspace/mars/mars/dataframe/groupby/__init__.py in <lambda>(groupby, **kw)
40 setattr(cls, 'max', lambda groupby, **kw: agg(groupby, 'max', **kw))
41 setattr(cls, 'min', lambda groupby, **kw: agg(groupby, 'min', **kw))
---> 42 setattr(cls, 'count', lambda groupby, **kw: agg(groupby, 'count', **kw))
43 setattr(cls, 'size', lambda groupby, **kw: agg(groupby, 'size', **kw))
44 setattr(cls, 'mean', lambda groupby, **kw: agg(groupby, 'mean', **kw))
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in agg(groupby, func, method, *args, **kwargs)
650 agg_op = DataFrameGroupByAgg(func=func, method=method, raw_func=func,
651 groupby_params=groupby.op.groupby_params)
--> 652 return agg_op(groupby)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in __call__(self, groupby)
201
202 if self.output_types[0] == OutputType.dataframe:
--> 203 return self._call_dataframe(groupby, df)
204 else:
205 return self._call_series(groupby, df)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in _call_dataframe(self, groupby, input_df)
144
145 def _call_dataframe(self, groupby, input_df):
--> 146 grouped = groupby.op.build_mock_groupby()
147 agg_df = grouped.aggregate(self.func)
148
~/Workspace/mars/mars/dataframe/groupby/core.py in build_mock_groupby(self, **kwargs)
98 in_df = self.inputs[0]
99 if self.is_dataframe_obj:
--> 100 empty_df = build_empty_df(in_df.dtypes, index=pd.RangeIndex(2))
101 obj_dtypes = in_df.dtypes[in_df.dtypes == np.dtype('O')]
102 empty_df[obj_dtypes.index] = 'O'
~/Workspace/mars/mars/dataframe/utils.py in build_empty_df(dtypes, index)
446 df = pd.DataFrame(columns=pd.RangeIndex(len(columns)), index=index)
447 for i, d in enumerate(dtypes):
--> 448 df[i] = pd.Series(dtype=d, index=index)
449 df.columns = columns
450 return df
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
254 data = data._data
255 elif is_dict_like(data):
--> 256 data, index = self._init_dict(data, index, dtype)
257 dtype = None
258 copy = False
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in _init_dict(self, data, index, dtype)
347 # TODO: passing np.float64 to not break anything yet. See GH-17261
348 s = create_series_with_explicit_dtype(
--> 349 values, index=keys, dtype=dtype, dtype_if_empty=np.float64
350 )
351
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in create_series_with_explicit_dtype(data, index, dtype, name, copy, fastpath, dtype_if_empty)
623 dtype = dtype_if_empty
624 return Series(
--> 625 data=data, index=index, dtype=dtype, name=name, copy=copy, fastpath=fastpath
626 )
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
303 data = data.copy()
304 else:
--> 305 data = sanitize_array(data, index, dtype, copy, raise_cast_failure=True)
306
307 data = SingleBlockManager(data, index, fastpath=True)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in sanitize_array(data, index, dtype, copy, raise_cast_failure)
447 subarr = _try_cast(arr, dtype, copy, raise_cast_failure)
448 else:
--> 449 subarr = _try_cast(data, dtype, copy, raise_cast_failure)
450
451 # scalar like, GH
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
560 dtype = cast(ExtensionDtype, dtype)
561 array_type = dtype.construct_array_type()._from_sequence
--> 562 subarr = array_type(arr, dtype=dtype, copy=copy)
563 elif dtype is not None and raise_cast_failure:
564 raise
~/Workspace/mars/mars/dataframe/arrays.py in _from_sequence(cls, scalars, dtype, copy)
253 def _from_sequence(cls, scalars, dtype=None, copy=False):
254 if not hasattr(scalars, 'dtype'):
--> 255 ret = np.empty(len(scalars), dtype=object)
256 for i, s in enumerate(scalars):
257 ret[i] = s
TypeError: object of type 'pyarrow.lib.NullScalar' has no len()
|
TypeError
|
def take(self, indices, allow_fill=False, fill_value=None):
if (
allow_fill is False or (allow_fill and fill_value is self.dtype.na_value)
) and len(self) > 0:
return type(self)(self[indices], dtype=self._dtype)
if self._use_arrow:
array = self._arrow_array.to_pandas().to_numpy()
else:
array = self._ndarray
replace = False
if allow_fill and (fill_value is None or fill_value == self._dtype.na_value):
fill_value = self.dtype.na_value
replace = True
result = take(array, indices, fill_value=fill_value, allow_fill=allow_fill)
del array
if replace and pa is not None:
# pyarrow cannot recognize pa.NULL
result[result == self.dtype.na_value] = None
return type(self)(result, dtype=self._dtype)
|
def take(self, indices, allow_fill=False, fill_value=None):
if allow_fill is False or (allow_fill and fill_value is self.dtype.na_value):
return type(self)(self[indices], dtype=self._dtype)
array = self._arrow_array.to_pandas().to_numpy()
replace = False
if allow_fill and fill_value is None:
fill_value = self.dtype.na_value
replace = True
result = take(array, indices, fill_value=fill_value, allow_fill=allow_fill)
del array
if replace:
# pyarrow cannot recognize pa.NULL
result[result == self.dtype.na_value] = None
return type(self)(result, dtype=self._dtype)
|
https://github.com/mars-project/mars/issues/1514
|
In [1]: import mars.dataframe as md
In [2]: df = md.DataFrame({'a': [1, 2, 3], 'b': ['a', 'b', 'c']})
In [3]: df['b'] = df['b'].astype(md.ArrowStringDtype())
In [6]: df.groupby('b').count()
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
545 elif not is_extension_array_dtype(subarr):
--> 546 subarr = construct_1d_ndarray_preserving_na(subarr, dtype, copy=copy)
547 except OutOfBoundsDatetime:
~/miniconda3/lib/python3.7/site-packages/pandas/core/dtypes/cast.py in construct_1d_ndarray_preserving_na(values, dtype, copy)
1506 """
-> 1507 subarr = np.array(values, dtype=dtype, copy=copy)
1508
TypeError: Cannot interpret '<mars.dataframe.arrays.ArrowStringDtype object at 0x7f7f81874150>' as a data type
During handling of the above exception, another exception occurred:
TypeError Traceback (most recent call last)
<ipython-input-6-884154032b17> in <module>
----> 1 df.groupby('b').count()
~/Workspace/mars/mars/dataframe/groupby/__init__.py in <lambda>(groupby, **kw)
40 setattr(cls, 'max', lambda groupby, **kw: agg(groupby, 'max', **kw))
41 setattr(cls, 'min', lambda groupby, **kw: agg(groupby, 'min', **kw))
---> 42 setattr(cls, 'count', lambda groupby, **kw: agg(groupby, 'count', **kw))
43 setattr(cls, 'size', lambda groupby, **kw: agg(groupby, 'size', **kw))
44 setattr(cls, 'mean', lambda groupby, **kw: agg(groupby, 'mean', **kw))
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in agg(groupby, func, method, *args, **kwargs)
650 agg_op = DataFrameGroupByAgg(func=func, method=method, raw_func=func,
651 groupby_params=groupby.op.groupby_params)
--> 652 return agg_op(groupby)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in __call__(self, groupby)
201
202 if self.output_types[0] == OutputType.dataframe:
--> 203 return self._call_dataframe(groupby, df)
204 else:
205 return self._call_series(groupby, df)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in _call_dataframe(self, groupby, input_df)
144
145 def _call_dataframe(self, groupby, input_df):
--> 146 grouped = groupby.op.build_mock_groupby()
147 agg_df = grouped.aggregate(self.func)
148
~/Workspace/mars/mars/dataframe/groupby/core.py in build_mock_groupby(self, **kwargs)
98 in_df = self.inputs[0]
99 if self.is_dataframe_obj:
--> 100 empty_df = build_empty_df(in_df.dtypes, index=pd.RangeIndex(2))
101 obj_dtypes = in_df.dtypes[in_df.dtypes == np.dtype('O')]
102 empty_df[obj_dtypes.index] = 'O'
~/Workspace/mars/mars/dataframe/utils.py in build_empty_df(dtypes, index)
446 df = pd.DataFrame(columns=pd.RangeIndex(len(columns)), index=index)
447 for i, d in enumerate(dtypes):
--> 448 df[i] = pd.Series(dtype=d, index=index)
449 df.columns = columns
450 return df
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
254 data = data._data
255 elif is_dict_like(data):
--> 256 data, index = self._init_dict(data, index, dtype)
257 dtype = None
258 copy = False
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in _init_dict(self, data, index, dtype)
347 # TODO: passing np.float64 to not break anything yet. See GH-17261
348 s = create_series_with_explicit_dtype(
--> 349 values, index=keys, dtype=dtype, dtype_if_empty=np.float64
350 )
351
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in create_series_with_explicit_dtype(data, index, dtype, name, copy, fastpath, dtype_if_empty)
623 dtype = dtype_if_empty
624 return Series(
--> 625 data=data, index=index, dtype=dtype, name=name, copy=copy, fastpath=fastpath
626 )
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
303 data = data.copy()
304 else:
--> 305 data = sanitize_array(data, index, dtype, copy, raise_cast_failure=True)
306
307 data = SingleBlockManager(data, index, fastpath=True)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in sanitize_array(data, index, dtype, copy, raise_cast_failure)
447 subarr = _try_cast(arr, dtype, copy, raise_cast_failure)
448 else:
--> 449 subarr = _try_cast(data, dtype, copy, raise_cast_failure)
450
451 # scalar like, GH
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
560 dtype = cast(ExtensionDtype, dtype)
561 array_type = dtype.construct_array_type()._from_sequence
--> 562 subarr = array_type(arr, dtype=dtype, copy=copy)
563 elif dtype is not None and raise_cast_failure:
564 raise
~/Workspace/mars/mars/dataframe/arrays.py in _from_sequence(cls, scalars, dtype, copy)
253 def _from_sequence(cls, scalars, dtype=None, copy=False):
254 if not hasattr(scalars, 'dtype'):
--> 255 ret = np.empty(len(scalars), dtype=object)
256 for i, s in enumerate(scalars):
257 ret[i] = s
TypeError: object of type 'pyarrow.lib.NullScalar' has no len()
|
TypeError
|
def copy(self):
if self._use_arrow:
return type(self)(copy_obj(self._arrow_array))
else:
return type(self)(self._ndarray.copy())
|
def copy(self):
return type(self)(copy_obj(self._arrow_array))
|
https://github.com/mars-project/mars/issues/1514
|
In [1]: import mars.dataframe as md
In [2]: df = md.DataFrame({'a': [1, 2, 3], 'b': ['a', 'b', 'c']})
In [3]: df['b'] = df['b'].astype(md.ArrowStringDtype())
In [6]: df.groupby('b').count()
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
545 elif not is_extension_array_dtype(subarr):
--> 546 subarr = construct_1d_ndarray_preserving_na(subarr, dtype, copy=copy)
547 except OutOfBoundsDatetime:
~/miniconda3/lib/python3.7/site-packages/pandas/core/dtypes/cast.py in construct_1d_ndarray_preserving_na(values, dtype, copy)
1506 """
-> 1507 subarr = np.array(values, dtype=dtype, copy=copy)
1508
TypeError: Cannot interpret '<mars.dataframe.arrays.ArrowStringDtype object at 0x7f7f81874150>' as a data type
During handling of the above exception, another exception occurred:
TypeError Traceback (most recent call last)
<ipython-input-6-884154032b17> in <module>
----> 1 df.groupby('b').count()
~/Workspace/mars/mars/dataframe/groupby/__init__.py in <lambda>(groupby, **kw)
40 setattr(cls, 'max', lambda groupby, **kw: agg(groupby, 'max', **kw))
41 setattr(cls, 'min', lambda groupby, **kw: agg(groupby, 'min', **kw))
---> 42 setattr(cls, 'count', lambda groupby, **kw: agg(groupby, 'count', **kw))
43 setattr(cls, 'size', lambda groupby, **kw: agg(groupby, 'size', **kw))
44 setattr(cls, 'mean', lambda groupby, **kw: agg(groupby, 'mean', **kw))
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in agg(groupby, func, method, *args, **kwargs)
650 agg_op = DataFrameGroupByAgg(func=func, method=method, raw_func=func,
651 groupby_params=groupby.op.groupby_params)
--> 652 return agg_op(groupby)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in __call__(self, groupby)
201
202 if self.output_types[0] == OutputType.dataframe:
--> 203 return self._call_dataframe(groupby, df)
204 else:
205 return self._call_series(groupby, df)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in _call_dataframe(self, groupby, input_df)
144
145 def _call_dataframe(self, groupby, input_df):
--> 146 grouped = groupby.op.build_mock_groupby()
147 agg_df = grouped.aggregate(self.func)
148
~/Workspace/mars/mars/dataframe/groupby/core.py in build_mock_groupby(self, **kwargs)
98 in_df = self.inputs[0]
99 if self.is_dataframe_obj:
--> 100 empty_df = build_empty_df(in_df.dtypes, index=pd.RangeIndex(2))
101 obj_dtypes = in_df.dtypes[in_df.dtypes == np.dtype('O')]
102 empty_df[obj_dtypes.index] = 'O'
~/Workspace/mars/mars/dataframe/utils.py in build_empty_df(dtypes, index)
446 df = pd.DataFrame(columns=pd.RangeIndex(len(columns)), index=index)
447 for i, d in enumerate(dtypes):
--> 448 df[i] = pd.Series(dtype=d, index=index)
449 df.columns = columns
450 return df
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
254 data = data._data
255 elif is_dict_like(data):
--> 256 data, index = self._init_dict(data, index, dtype)
257 dtype = None
258 copy = False
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in _init_dict(self, data, index, dtype)
347 # TODO: passing np.float64 to not break anything yet. See GH-17261
348 s = create_series_with_explicit_dtype(
--> 349 values, index=keys, dtype=dtype, dtype_if_empty=np.float64
350 )
351
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in create_series_with_explicit_dtype(data, index, dtype, name, copy, fastpath, dtype_if_empty)
623 dtype = dtype_if_empty
624 return Series(
--> 625 data=data, index=index, dtype=dtype, name=name, copy=copy, fastpath=fastpath
626 )
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
303 data = data.copy()
304 else:
--> 305 data = sanitize_array(data, index, dtype, copy, raise_cast_failure=True)
306
307 data = SingleBlockManager(data, index, fastpath=True)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in sanitize_array(data, index, dtype, copy, raise_cast_failure)
447 subarr = _try_cast(arr, dtype, copy, raise_cast_failure)
448 else:
--> 449 subarr = _try_cast(data, dtype, copy, raise_cast_failure)
450
451 # scalar like, GH
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
560 dtype = cast(ExtensionDtype, dtype)
561 array_type = dtype.construct_array_type()._from_sequence
--> 562 subarr = array_type(arr, dtype=dtype, copy=copy)
563 elif dtype is not None and raise_cast_failure:
564 raise
~/Workspace/mars/mars/dataframe/arrays.py in _from_sequence(cls, scalars, dtype, copy)
253 def _from_sequence(cls, scalars, dtype=None, copy=False):
254 if not hasattr(scalars, 'dtype'):
--> 255 ret = np.empty(len(scalars), dtype=object)
256 for i, s in enumerate(scalars):
257 ret[i] = s
TypeError: object of type 'pyarrow.lib.NullScalar' has no len()
|
TypeError
|
def value_counts(self, dropna=False):
if self._use_arrow:
series = self._arrow_array.to_pandas()
else:
series = pd.Series(self._ndarray)
return type(self)(series.value_counts(dropna=dropna), dtype=self._dtype)
|
def value_counts(self, dropna=False):
series = self._arrow_array.to_pandas()
return type(self)(series.value_counts(dropna=dropna), dtype=self._dtype)
|
https://github.com/mars-project/mars/issues/1514
|
In [1]: import mars.dataframe as md
In [2]: df = md.DataFrame({'a': [1, 2, 3], 'b': ['a', 'b', 'c']})
In [3]: df['b'] = df['b'].astype(md.ArrowStringDtype())
In [6]: df.groupby('b').count()
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
545 elif not is_extension_array_dtype(subarr):
--> 546 subarr = construct_1d_ndarray_preserving_na(subarr, dtype, copy=copy)
547 except OutOfBoundsDatetime:
~/miniconda3/lib/python3.7/site-packages/pandas/core/dtypes/cast.py in construct_1d_ndarray_preserving_na(values, dtype, copy)
1506 """
-> 1507 subarr = np.array(values, dtype=dtype, copy=copy)
1508
TypeError: Cannot interpret '<mars.dataframe.arrays.ArrowStringDtype object at 0x7f7f81874150>' as a data type
During handling of the above exception, another exception occurred:
TypeError Traceback (most recent call last)
<ipython-input-6-884154032b17> in <module>
----> 1 df.groupby('b').count()
~/Workspace/mars/mars/dataframe/groupby/__init__.py in <lambda>(groupby, **kw)
40 setattr(cls, 'max', lambda groupby, **kw: agg(groupby, 'max', **kw))
41 setattr(cls, 'min', lambda groupby, **kw: agg(groupby, 'min', **kw))
---> 42 setattr(cls, 'count', lambda groupby, **kw: agg(groupby, 'count', **kw))
43 setattr(cls, 'size', lambda groupby, **kw: agg(groupby, 'size', **kw))
44 setattr(cls, 'mean', lambda groupby, **kw: agg(groupby, 'mean', **kw))
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in agg(groupby, func, method, *args, **kwargs)
650 agg_op = DataFrameGroupByAgg(func=func, method=method, raw_func=func,
651 groupby_params=groupby.op.groupby_params)
--> 652 return agg_op(groupby)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in __call__(self, groupby)
201
202 if self.output_types[0] == OutputType.dataframe:
--> 203 return self._call_dataframe(groupby, df)
204 else:
205 return self._call_series(groupby, df)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in _call_dataframe(self, groupby, input_df)
144
145 def _call_dataframe(self, groupby, input_df):
--> 146 grouped = groupby.op.build_mock_groupby()
147 agg_df = grouped.aggregate(self.func)
148
~/Workspace/mars/mars/dataframe/groupby/core.py in build_mock_groupby(self, **kwargs)
98 in_df = self.inputs[0]
99 if self.is_dataframe_obj:
--> 100 empty_df = build_empty_df(in_df.dtypes, index=pd.RangeIndex(2))
101 obj_dtypes = in_df.dtypes[in_df.dtypes == np.dtype('O')]
102 empty_df[obj_dtypes.index] = 'O'
~/Workspace/mars/mars/dataframe/utils.py in build_empty_df(dtypes, index)
446 df = pd.DataFrame(columns=pd.RangeIndex(len(columns)), index=index)
447 for i, d in enumerate(dtypes):
--> 448 df[i] = pd.Series(dtype=d, index=index)
449 df.columns = columns
450 return df
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
254 data = data._data
255 elif is_dict_like(data):
--> 256 data, index = self._init_dict(data, index, dtype)
257 dtype = None
258 copy = False
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in _init_dict(self, data, index, dtype)
347 # TODO: passing np.float64 to not break anything yet. See GH-17261
348 s = create_series_with_explicit_dtype(
--> 349 values, index=keys, dtype=dtype, dtype_if_empty=np.float64
350 )
351
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in create_series_with_explicit_dtype(data, index, dtype, name, copy, fastpath, dtype_if_empty)
623 dtype = dtype_if_empty
624 return Series(
--> 625 data=data, index=index, dtype=dtype, name=name, copy=copy, fastpath=fastpath
626 )
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
303 data = data.copy()
304 else:
--> 305 data = sanitize_array(data, index, dtype, copy, raise_cast_failure=True)
306
307 data = SingleBlockManager(data, index, fastpath=True)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in sanitize_array(data, index, dtype, copy, raise_cast_failure)
447 subarr = _try_cast(arr, dtype, copy, raise_cast_failure)
448 else:
--> 449 subarr = _try_cast(data, dtype, copy, raise_cast_failure)
450
451 # scalar like, GH
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
560 dtype = cast(ExtensionDtype, dtype)
561 array_type = dtype.construct_array_type()._from_sequence
--> 562 subarr = array_type(arr, dtype=dtype, copy=copy)
563 elif dtype is not None and raise_cast_failure:
564 raise
~/Workspace/mars/mars/dataframe/arrays.py in _from_sequence(cls, scalars, dtype, copy)
253 def _from_sequence(cls, scalars, dtype=None, copy=False):
254 if not hasattr(scalars, 'dtype'):
--> 255 ret = np.empty(len(scalars), dtype=object)
256 for i, s in enumerate(scalars):
257 ret[i] = s
TypeError: object of type 'pyarrow.lib.NullScalar' has no len()
|
TypeError
|
def __mars_tokenize__(self):
if self._use_arrow:
return [
memoryview(x)
for chunk in self._arrow_array.chunks
for x in chunk.buffers()
if x is not None
]
else:
return self._ndarray
|
def __mars_tokenize__(self):
return [
memoryview(x)
for chunk in self._arrow_array.chunks
for x in chunk.buffers()
if x is not None
]
|
https://github.com/mars-project/mars/issues/1514
|
In [1]: import mars.dataframe as md
In [2]: df = md.DataFrame({'a': [1, 2, 3], 'b': ['a', 'b', 'c']})
In [3]: df['b'] = df['b'].astype(md.ArrowStringDtype())
In [6]: df.groupby('b').count()
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
545 elif not is_extension_array_dtype(subarr):
--> 546 subarr = construct_1d_ndarray_preserving_na(subarr, dtype, copy=copy)
547 except OutOfBoundsDatetime:
~/miniconda3/lib/python3.7/site-packages/pandas/core/dtypes/cast.py in construct_1d_ndarray_preserving_na(values, dtype, copy)
1506 """
-> 1507 subarr = np.array(values, dtype=dtype, copy=copy)
1508
TypeError: Cannot interpret '<mars.dataframe.arrays.ArrowStringDtype object at 0x7f7f81874150>' as a data type
During handling of the above exception, another exception occurred:
TypeError Traceback (most recent call last)
<ipython-input-6-884154032b17> in <module>
----> 1 df.groupby('b').count()
~/Workspace/mars/mars/dataframe/groupby/__init__.py in <lambda>(groupby, **kw)
40 setattr(cls, 'max', lambda groupby, **kw: agg(groupby, 'max', **kw))
41 setattr(cls, 'min', lambda groupby, **kw: agg(groupby, 'min', **kw))
---> 42 setattr(cls, 'count', lambda groupby, **kw: agg(groupby, 'count', **kw))
43 setattr(cls, 'size', lambda groupby, **kw: agg(groupby, 'size', **kw))
44 setattr(cls, 'mean', lambda groupby, **kw: agg(groupby, 'mean', **kw))
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in agg(groupby, func, method, *args, **kwargs)
650 agg_op = DataFrameGroupByAgg(func=func, method=method, raw_func=func,
651 groupby_params=groupby.op.groupby_params)
--> 652 return agg_op(groupby)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in __call__(self, groupby)
201
202 if self.output_types[0] == OutputType.dataframe:
--> 203 return self._call_dataframe(groupby, df)
204 else:
205 return self._call_series(groupby, df)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in _call_dataframe(self, groupby, input_df)
144
145 def _call_dataframe(self, groupby, input_df):
--> 146 grouped = groupby.op.build_mock_groupby()
147 agg_df = grouped.aggregate(self.func)
148
~/Workspace/mars/mars/dataframe/groupby/core.py in build_mock_groupby(self, **kwargs)
98 in_df = self.inputs[0]
99 if self.is_dataframe_obj:
--> 100 empty_df = build_empty_df(in_df.dtypes, index=pd.RangeIndex(2))
101 obj_dtypes = in_df.dtypes[in_df.dtypes == np.dtype('O')]
102 empty_df[obj_dtypes.index] = 'O'
~/Workspace/mars/mars/dataframe/utils.py in build_empty_df(dtypes, index)
446 df = pd.DataFrame(columns=pd.RangeIndex(len(columns)), index=index)
447 for i, d in enumerate(dtypes):
--> 448 df[i] = pd.Series(dtype=d, index=index)
449 df.columns = columns
450 return df
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
254 data = data._data
255 elif is_dict_like(data):
--> 256 data, index = self._init_dict(data, index, dtype)
257 dtype = None
258 copy = False
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in _init_dict(self, data, index, dtype)
347 # TODO: passing np.float64 to not break anything yet. See GH-17261
348 s = create_series_with_explicit_dtype(
--> 349 values, index=keys, dtype=dtype, dtype_if_empty=np.float64
350 )
351
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in create_series_with_explicit_dtype(data, index, dtype, name, copy, fastpath, dtype_if_empty)
623 dtype = dtype_if_empty
624 return Series(
--> 625 data=data, index=index, dtype=dtype, name=name, copy=copy, fastpath=fastpath
626 )
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
303 data = data.copy()
304 else:
--> 305 data = sanitize_array(data, index, dtype, copy, raise_cast_failure=True)
306
307 data = SingleBlockManager(data, index, fastpath=True)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in sanitize_array(data, index, dtype, copy, raise_cast_failure)
447 subarr = _try_cast(arr, dtype, copy, raise_cast_failure)
448 else:
--> 449 subarr = _try_cast(data, dtype, copy, raise_cast_failure)
450
451 # scalar like, GH
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
560 dtype = cast(ExtensionDtype, dtype)
561 array_type = dtype.construct_array_type()._from_sequence
--> 562 subarr = array_type(arr, dtype=dtype, copy=copy)
563 elif dtype is not None and raise_cast_failure:
564 raise
~/Workspace/mars/mars/dataframe/arrays.py in _from_sequence(cls, scalars, dtype, copy)
253 def _from_sequence(cls, scalars, dtype=None, copy=False):
254 if not hasattr(scalars, 'dtype'):
--> 255 ret = np.empty(len(scalars), dtype=object)
256 for i, s in enumerate(scalars):
257 ret[i] = s
TypeError: object of type 'pyarrow.lib.NullScalar' has no len()
|
TypeError
|
def from_scalars(cls, values):
if pa is None or cls._pandas_only():
return cls._from_sequence(values)
else:
arrow_array = pa.chunked_array([cls._to_arrow_array(values)])
return cls(arrow_array)
|
def from_scalars(cls, values):
arrow_array = pa.chunked_array([cls._to_arrow_array(values)])
return cls(arrow_array)
|
https://github.com/mars-project/mars/issues/1514
|
In [1]: import mars.dataframe as md
In [2]: df = md.DataFrame({'a': [1, 2, 3], 'b': ['a', 'b', 'c']})
In [3]: df['b'] = df['b'].astype(md.ArrowStringDtype())
In [6]: df.groupby('b').count()
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
545 elif not is_extension_array_dtype(subarr):
--> 546 subarr = construct_1d_ndarray_preserving_na(subarr, dtype, copy=copy)
547 except OutOfBoundsDatetime:
~/miniconda3/lib/python3.7/site-packages/pandas/core/dtypes/cast.py in construct_1d_ndarray_preserving_na(values, dtype, copy)
1506 """
-> 1507 subarr = np.array(values, dtype=dtype, copy=copy)
1508
TypeError: Cannot interpret '<mars.dataframe.arrays.ArrowStringDtype object at 0x7f7f81874150>' as a data type
During handling of the above exception, another exception occurred:
TypeError Traceback (most recent call last)
<ipython-input-6-884154032b17> in <module>
----> 1 df.groupby('b').count()
~/Workspace/mars/mars/dataframe/groupby/__init__.py in <lambda>(groupby, **kw)
40 setattr(cls, 'max', lambda groupby, **kw: agg(groupby, 'max', **kw))
41 setattr(cls, 'min', lambda groupby, **kw: agg(groupby, 'min', **kw))
---> 42 setattr(cls, 'count', lambda groupby, **kw: agg(groupby, 'count', **kw))
43 setattr(cls, 'size', lambda groupby, **kw: agg(groupby, 'size', **kw))
44 setattr(cls, 'mean', lambda groupby, **kw: agg(groupby, 'mean', **kw))
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in agg(groupby, func, method, *args, **kwargs)
650 agg_op = DataFrameGroupByAgg(func=func, method=method, raw_func=func,
651 groupby_params=groupby.op.groupby_params)
--> 652 return agg_op(groupby)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in __call__(self, groupby)
201
202 if self.output_types[0] == OutputType.dataframe:
--> 203 return self._call_dataframe(groupby, df)
204 else:
205 return self._call_series(groupby, df)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in _call_dataframe(self, groupby, input_df)
144
145 def _call_dataframe(self, groupby, input_df):
--> 146 grouped = groupby.op.build_mock_groupby()
147 agg_df = grouped.aggregate(self.func)
148
~/Workspace/mars/mars/dataframe/groupby/core.py in build_mock_groupby(self, **kwargs)
98 in_df = self.inputs[0]
99 if self.is_dataframe_obj:
--> 100 empty_df = build_empty_df(in_df.dtypes, index=pd.RangeIndex(2))
101 obj_dtypes = in_df.dtypes[in_df.dtypes == np.dtype('O')]
102 empty_df[obj_dtypes.index] = 'O'
~/Workspace/mars/mars/dataframe/utils.py in build_empty_df(dtypes, index)
446 df = pd.DataFrame(columns=pd.RangeIndex(len(columns)), index=index)
447 for i, d in enumerate(dtypes):
--> 448 df[i] = pd.Series(dtype=d, index=index)
449 df.columns = columns
450 return df
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
254 data = data._data
255 elif is_dict_like(data):
--> 256 data, index = self._init_dict(data, index, dtype)
257 dtype = None
258 copy = False
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in _init_dict(self, data, index, dtype)
347 # TODO: passing np.float64 to not break anything yet. See GH-17261
348 s = create_series_with_explicit_dtype(
--> 349 values, index=keys, dtype=dtype, dtype_if_empty=np.float64
350 )
351
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in create_series_with_explicit_dtype(data, index, dtype, name, copy, fastpath, dtype_if_empty)
623 dtype = dtype_if_empty
624 return Series(
--> 625 data=data, index=index, dtype=dtype, name=name, copy=copy, fastpath=fastpath
626 )
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
303 data = data.copy()
304 else:
--> 305 data = sanitize_array(data, index, dtype, copy, raise_cast_failure=True)
306
307 data = SingleBlockManager(data, index, fastpath=True)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in sanitize_array(data, index, dtype, copy, raise_cast_failure)
447 subarr = _try_cast(arr, dtype, copy, raise_cast_failure)
448 else:
--> 449 subarr = _try_cast(data, dtype, copy, raise_cast_failure)
450
451 # scalar like, GH
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
560 dtype = cast(ExtensionDtype, dtype)
561 array_type = dtype.construct_array_type()._from_sequence
--> 562 subarr = array_type(arr, dtype=dtype, copy=copy)
563 elif dtype is not None and raise_cast_failure:
564 raise
~/Workspace/mars/mars/dataframe/arrays.py in _from_sequence(cls, scalars, dtype, copy)
253 def _from_sequence(cls, scalars, dtype=None, copy=False):
254 if not hasattr(scalars, 'dtype'):
--> 255 ret = np.empty(len(scalars), dtype=object)
256 for i, s in enumerate(scalars):
257 ret[i] = s
TypeError: object of type 'pyarrow.lib.NullScalar' has no len()
|
TypeError
|
def __setitem__(self, key, value):
if isinstance(value, (pd.Index, pd.Series)):
value = value.to_numpy()
if isinstance(value, type(self)):
value = value.to_numpy()
key = check_array_indexer(self, key)
scalar_key = is_scalar(key)
scalar_value = is_scalar(value)
if scalar_key and not scalar_value:
raise ValueError("setting an array element with a sequence.")
# validate new items
if scalar_value:
if pd.isna(value):
value = None
elif not isinstance(value, str):
raise ValueError(
f"Cannot set non-string value '{value}' into a ArrowStringArray."
)
else:
if not is_array_like(value):
value = np.asarray(value, dtype=object)
if len(value) and not lib.is_string_array(value, skipna=True):
raise ValueError("Must provide strings.")
if self._use_arrow:
string_array = np.asarray(self._arrow_array.to_pandas())
string_array[key] = value
self._arrow_array = pa.chunked_array([pa.array(string_array)])
else:
self._ndarray[key] = value
|
def __setitem__(self, key, value):
if isinstance(value, (pd.Index, pd.Series)):
value = value.to_numpy()
if isinstance(value, type(self)):
value = value.to_numpy()
key = check_array_indexer(self, key)
scalar_key = is_scalar(key)
scalar_value = is_scalar(value)
if scalar_key and not scalar_value:
raise ValueError("setting an array element with a sequence.")
# validate new items
if scalar_value:
if pd.isna(value):
value = None
elif not isinstance(value, str):
raise ValueError(
f"Cannot set non-string value '{value}' into a ArrowStringArray."
)
else:
if not is_array_like(value):
value = np.asarray(value, dtype=object)
if len(value) and not lib.is_string_array(value, skipna=True):
raise ValueError("Must provide strings.")
string_array = np.asarray(self._arrow_array.to_pandas())
string_array[key] = value
self._arrow_array = pa.chunked_array([pa.array(string_array)])
|
https://github.com/mars-project/mars/issues/1514
|
In [1]: import mars.dataframe as md
In [2]: df = md.DataFrame({'a': [1, 2, 3], 'b': ['a', 'b', 'c']})
In [3]: df['b'] = df['b'].astype(md.ArrowStringDtype())
In [6]: df.groupby('b').count()
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
545 elif not is_extension_array_dtype(subarr):
--> 546 subarr = construct_1d_ndarray_preserving_na(subarr, dtype, copy=copy)
547 except OutOfBoundsDatetime:
~/miniconda3/lib/python3.7/site-packages/pandas/core/dtypes/cast.py in construct_1d_ndarray_preserving_na(values, dtype, copy)
1506 """
-> 1507 subarr = np.array(values, dtype=dtype, copy=copy)
1508
TypeError: Cannot interpret '<mars.dataframe.arrays.ArrowStringDtype object at 0x7f7f81874150>' as a data type
During handling of the above exception, another exception occurred:
TypeError Traceback (most recent call last)
<ipython-input-6-884154032b17> in <module>
----> 1 df.groupby('b').count()
~/Workspace/mars/mars/dataframe/groupby/__init__.py in <lambda>(groupby, **kw)
40 setattr(cls, 'max', lambda groupby, **kw: agg(groupby, 'max', **kw))
41 setattr(cls, 'min', lambda groupby, **kw: agg(groupby, 'min', **kw))
---> 42 setattr(cls, 'count', lambda groupby, **kw: agg(groupby, 'count', **kw))
43 setattr(cls, 'size', lambda groupby, **kw: agg(groupby, 'size', **kw))
44 setattr(cls, 'mean', lambda groupby, **kw: agg(groupby, 'mean', **kw))
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in agg(groupby, func, method, *args, **kwargs)
650 agg_op = DataFrameGroupByAgg(func=func, method=method, raw_func=func,
651 groupby_params=groupby.op.groupby_params)
--> 652 return agg_op(groupby)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in __call__(self, groupby)
201
202 if self.output_types[0] == OutputType.dataframe:
--> 203 return self._call_dataframe(groupby, df)
204 else:
205 return self._call_series(groupby, df)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in _call_dataframe(self, groupby, input_df)
144
145 def _call_dataframe(self, groupby, input_df):
--> 146 grouped = groupby.op.build_mock_groupby()
147 agg_df = grouped.aggregate(self.func)
148
~/Workspace/mars/mars/dataframe/groupby/core.py in build_mock_groupby(self, **kwargs)
98 in_df = self.inputs[0]
99 if self.is_dataframe_obj:
--> 100 empty_df = build_empty_df(in_df.dtypes, index=pd.RangeIndex(2))
101 obj_dtypes = in_df.dtypes[in_df.dtypes == np.dtype('O')]
102 empty_df[obj_dtypes.index] = 'O'
~/Workspace/mars/mars/dataframe/utils.py in build_empty_df(dtypes, index)
446 df = pd.DataFrame(columns=pd.RangeIndex(len(columns)), index=index)
447 for i, d in enumerate(dtypes):
--> 448 df[i] = pd.Series(dtype=d, index=index)
449 df.columns = columns
450 return df
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
254 data = data._data
255 elif is_dict_like(data):
--> 256 data, index = self._init_dict(data, index, dtype)
257 dtype = None
258 copy = False
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in _init_dict(self, data, index, dtype)
347 # TODO: passing np.float64 to not break anything yet. See GH-17261
348 s = create_series_with_explicit_dtype(
--> 349 values, index=keys, dtype=dtype, dtype_if_empty=np.float64
350 )
351
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in create_series_with_explicit_dtype(data, index, dtype, name, copy, fastpath, dtype_if_empty)
623 dtype = dtype_if_empty
624 return Series(
--> 625 data=data, index=index, dtype=dtype, name=name, copy=copy, fastpath=fastpath
626 )
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
303 data = data.copy()
304 else:
--> 305 data = sanitize_array(data, index, dtype, copy, raise_cast_failure=True)
306
307 data = SingleBlockManager(data, index, fastpath=True)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in sanitize_array(data, index, dtype, copy, raise_cast_failure)
447 subarr = _try_cast(arr, dtype, copy, raise_cast_failure)
448 else:
--> 449 subarr = _try_cast(data, dtype, copy, raise_cast_failure)
450
451 # scalar like, GH
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
560 dtype = cast(ExtensionDtype, dtype)
561 array_type = dtype.construct_array_type()._from_sequence
--> 562 subarr = array_type(arr, dtype=dtype, copy=copy)
563 elif dtype is not None and raise_cast_failure:
564 raise
~/Workspace/mars/mars/dataframe/arrays.py in _from_sequence(cls, scalars, dtype, copy)
253 def _from_sequence(cls, scalars, dtype=None, copy=False):
254 if not hasattr(scalars, 'dtype'):
--> 255 ret = np.empty(len(scalars), dtype=object)
256 for i, s in enumerate(scalars):
257 ret[i] = s
TypeError: object of type 'pyarrow.lib.NullScalar' has no len()
|
TypeError
|
def _create_arithmetic_method(cls, op):
# Note: this handles both arithmetic and comparison methods.
def method(self, other):
is_arithmetic = True if op.__name__ in ops.ARITHMETIC_BINOPS else False
pandas_only = cls._pandas_only()
is_other_array = False
if not is_scalar(other):
is_other_array = True
other = np.asarray(other)
self_is_na = self.isna()
other_is_na = pd.isna(other)
mask = self_is_na | other_is_na
if pa is None or pandas_only:
if is_arithmetic:
ret = np.empty(self.shape, dtype=object)
else:
ret = np.zeros(self.shape, dtype=bool)
valid = ~mask
arr = (
self._arrow_array.to_pandas().to_numpy()
if self._use_arrow
else self._ndarray
)
o = other[valid] if is_other_array else other
ret[valid] = op(arr[valid], o)
if is_arithmetic:
return ArrowStringArray(ret)
else:
return pd.arrays.BooleanArray(ret, mask)
chunks = []
mask_chunks = []
start = 0
for chunk_array in self._arrow_array.chunks:
chunk_array = np.asarray(chunk_array.to_pandas())
end = start + len(chunk_array)
chunk_mask = mask[start:end]
chunk_valid = ~chunk_mask
if is_arithmetic:
result = np.empty(chunk_array.shape, dtype=object)
else:
result = np.zeros(chunk_array.shape, dtype=bool)
chunk_other = other
if is_other_array:
chunk_other = other[start:end]
chunk_other = chunk_other[chunk_valid]
# calculate only for both not None
result[chunk_valid] = op(chunk_array[chunk_valid], chunk_other)
if is_arithmetic:
chunks.append(pa.array(result, type=pa.string(), from_pandas=True))
else:
chunks.append(result)
mask_chunks.append(chunk_mask)
if is_arithmetic:
return ArrowStringArray(pa.chunked_array(chunks))
else:
return pd.arrays.BooleanArray(
np.concatenate(chunks), np.concatenate(mask_chunks)
)
return set_function_name(method, f"__{op.__name__}__", cls)
|
def _create_arithmetic_method(cls, op):
# Note: this handles both arithmetic and comparison methods.
def method(self, other):
is_arithmetic = True if op.__name__ in ops.ARITHMETIC_BINOPS else False
is_other_array = False
if not is_scalar(other):
is_other_array = True
other = np.asarray(other)
self_is_na = self.isna()
other_is_na = pd.isna(other)
mask = self_is_na | other_is_na
chunks = []
mask_chunks = []
start = 0
for chunk_array in self._arrow_array.chunks:
chunk_array = np.asarray(chunk_array.to_pandas())
end = start + len(chunk_array)
chunk_mask = mask[start:end]
chunk_valid = ~chunk_mask
if is_arithmetic:
result = np.empty(chunk_array.shape, dtype=object)
else:
result = np.zeros(chunk_array.shape, dtype=bool)
chunk_other = other
if is_other_array:
chunk_other = other[start:end]
chunk_other = chunk_other[chunk_valid]
# calculate only for both not None
result[chunk_valid] = op(chunk_array[chunk_valid], chunk_other)
if is_arithmetic:
chunks.append(pa.array(result, type=pa.string(), from_pandas=True))
else:
chunks.append(result)
mask_chunks.append(chunk_mask)
if is_arithmetic:
return ArrowStringArray(pa.chunked_array(chunks))
else:
return pd.arrays.BooleanArray(
np.concatenate(chunks), np.concatenate(mask_chunks)
)
return set_function_name(method, f"__{op.__name__}__", cls)
|
https://github.com/mars-project/mars/issues/1514
|
In [1]: import mars.dataframe as md
In [2]: df = md.DataFrame({'a': [1, 2, 3], 'b': ['a', 'b', 'c']})
In [3]: df['b'] = df['b'].astype(md.ArrowStringDtype())
In [6]: df.groupby('b').count()
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
545 elif not is_extension_array_dtype(subarr):
--> 546 subarr = construct_1d_ndarray_preserving_na(subarr, dtype, copy=copy)
547 except OutOfBoundsDatetime:
~/miniconda3/lib/python3.7/site-packages/pandas/core/dtypes/cast.py in construct_1d_ndarray_preserving_na(values, dtype, copy)
1506 """
-> 1507 subarr = np.array(values, dtype=dtype, copy=copy)
1508
TypeError: Cannot interpret '<mars.dataframe.arrays.ArrowStringDtype object at 0x7f7f81874150>' as a data type
During handling of the above exception, another exception occurred:
TypeError Traceback (most recent call last)
<ipython-input-6-884154032b17> in <module>
----> 1 df.groupby('b').count()
~/Workspace/mars/mars/dataframe/groupby/__init__.py in <lambda>(groupby, **kw)
40 setattr(cls, 'max', lambda groupby, **kw: agg(groupby, 'max', **kw))
41 setattr(cls, 'min', lambda groupby, **kw: agg(groupby, 'min', **kw))
---> 42 setattr(cls, 'count', lambda groupby, **kw: agg(groupby, 'count', **kw))
43 setattr(cls, 'size', lambda groupby, **kw: agg(groupby, 'size', **kw))
44 setattr(cls, 'mean', lambda groupby, **kw: agg(groupby, 'mean', **kw))
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in agg(groupby, func, method, *args, **kwargs)
650 agg_op = DataFrameGroupByAgg(func=func, method=method, raw_func=func,
651 groupby_params=groupby.op.groupby_params)
--> 652 return agg_op(groupby)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in __call__(self, groupby)
201
202 if self.output_types[0] == OutputType.dataframe:
--> 203 return self._call_dataframe(groupby, df)
204 else:
205 return self._call_series(groupby, df)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in _call_dataframe(self, groupby, input_df)
144
145 def _call_dataframe(self, groupby, input_df):
--> 146 grouped = groupby.op.build_mock_groupby()
147 agg_df = grouped.aggregate(self.func)
148
~/Workspace/mars/mars/dataframe/groupby/core.py in build_mock_groupby(self, **kwargs)
98 in_df = self.inputs[0]
99 if self.is_dataframe_obj:
--> 100 empty_df = build_empty_df(in_df.dtypes, index=pd.RangeIndex(2))
101 obj_dtypes = in_df.dtypes[in_df.dtypes == np.dtype('O')]
102 empty_df[obj_dtypes.index] = 'O'
~/Workspace/mars/mars/dataframe/utils.py in build_empty_df(dtypes, index)
446 df = pd.DataFrame(columns=pd.RangeIndex(len(columns)), index=index)
447 for i, d in enumerate(dtypes):
--> 448 df[i] = pd.Series(dtype=d, index=index)
449 df.columns = columns
450 return df
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
254 data = data._data
255 elif is_dict_like(data):
--> 256 data, index = self._init_dict(data, index, dtype)
257 dtype = None
258 copy = False
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in _init_dict(self, data, index, dtype)
347 # TODO: passing np.float64 to not break anything yet. See GH-17261
348 s = create_series_with_explicit_dtype(
--> 349 values, index=keys, dtype=dtype, dtype_if_empty=np.float64
350 )
351
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in create_series_with_explicit_dtype(data, index, dtype, name, copy, fastpath, dtype_if_empty)
623 dtype = dtype_if_empty
624 return Series(
--> 625 data=data, index=index, dtype=dtype, name=name, copy=copy, fastpath=fastpath
626 )
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
303 data = data.copy()
304 else:
--> 305 data = sanitize_array(data, index, dtype, copy, raise_cast_failure=True)
306
307 data = SingleBlockManager(data, index, fastpath=True)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in sanitize_array(data, index, dtype, copy, raise_cast_failure)
447 subarr = _try_cast(arr, dtype, copy, raise_cast_failure)
448 else:
--> 449 subarr = _try_cast(data, dtype, copy, raise_cast_failure)
450
451 # scalar like, GH
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
560 dtype = cast(ExtensionDtype, dtype)
561 array_type = dtype.construct_array_type()._from_sequence
--> 562 subarr = array_type(arr, dtype=dtype, copy=copy)
563 elif dtype is not None and raise_cast_failure:
564 raise
~/Workspace/mars/mars/dataframe/arrays.py in _from_sequence(cls, scalars, dtype, copy)
253 def _from_sequence(cls, scalars, dtype=None, copy=False):
254 if not hasattr(scalars, 'dtype'):
--> 255 ret = np.empty(len(scalars), dtype=object)
256 for i, s in enumerate(scalars):
257 ret[i] = s
TypeError: object of type 'pyarrow.lib.NullScalar' has no len()
|
TypeError
|
def method(self, other):
is_arithmetic = True if op.__name__ in ops.ARITHMETIC_BINOPS else False
pandas_only = cls._pandas_only()
is_other_array = False
if not is_scalar(other):
is_other_array = True
other = np.asarray(other)
self_is_na = self.isna()
other_is_na = pd.isna(other)
mask = self_is_na | other_is_na
if pa is None or pandas_only:
if is_arithmetic:
ret = np.empty(self.shape, dtype=object)
else:
ret = np.zeros(self.shape, dtype=bool)
valid = ~mask
arr = (
self._arrow_array.to_pandas().to_numpy()
if self._use_arrow
else self._ndarray
)
o = other[valid] if is_other_array else other
ret[valid] = op(arr[valid], o)
if is_arithmetic:
return ArrowStringArray(ret)
else:
return pd.arrays.BooleanArray(ret, mask)
chunks = []
mask_chunks = []
start = 0
for chunk_array in self._arrow_array.chunks:
chunk_array = np.asarray(chunk_array.to_pandas())
end = start + len(chunk_array)
chunk_mask = mask[start:end]
chunk_valid = ~chunk_mask
if is_arithmetic:
result = np.empty(chunk_array.shape, dtype=object)
else:
result = np.zeros(chunk_array.shape, dtype=bool)
chunk_other = other
if is_other_array:
chunk_other = other[start:end]
chunk_other = chunk_other[chunk_valid]
# calculate only for both not None
result[chunk_valid] = op(chunk_array[chunk_valid], chunk_other)
if is_arithmetic:
chunks.append(pa.array(result, type=pa.string(), from_pandas=True))
else:
chunks.append(result)
mask_chunks.append(chunk_mask)
if is_arithmetic:
return ArrowStringArray(pa.chunked_array(chunks))
else:
return pd.arrays.BooleanArray(
np.concatenate(chunks), np.concatenate(mask_chunks)
)
|
def method(self, other):
is_arithmetic = True if op.__name__ in ops.ARITHMETIC_BINOPS else False
is_other_array = False
if not is_scalar(other):
is_other_array = True
other = np.asarray(other)
self_is_na = self.isna()
other_is_na = pd.isna(other)
mask = self_is_na | other_is_na
chunks = []
mask_chunks = []
start = 0
for chunk_array in self._arrow_array.chunks:
chunk_array = np.asarray(chunk_array.to_pandas())
end = start + len(chunk_array)
chunk_mask = mask[start:end]
chunk_valid = ~chunk_mask
if is_arithmetic:
result = np.empty(chunk_array.shape, dtype=object)
else:
result = np.zeros(chunk_array.shape, dtype=bool)
chunk_other = other
if is_other_array:
chunk_other = other[start:end]
chunk_other = chunk_other[chunk_valid]
# calculate only for both not None
result[chunk_valid] = op(chunk_array[chunk_valid], chunk_other)
if is_arithmetic:
chunks.append(pa.array(result, type=pa.string(), from_pandas=True))
else:
chunks.append(result)
mask_chunks.append(chunk_mask)
if is_arithmetic:
return ArrowStringArray(pa.chunked_array(chunks))
else:
return pd.arrays.BooleanArray(
np.concatenate(chunks), np.concatenate(mask_chunks)
)
|
https://github.com/mars-project/mars/issues/1514
|
In [1]: import mars.dataframe as md
In [2]: df = md.DataFrame({'a': [1, 2, 3], 'b': ['a', 'b', 'c']})
In [3]: df['b'] = df['b'].astype(md.ArrowStringDtype())
In [6]: df.groupby('b').count()
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
545 elif not is_extension_array_dtype(subarr):
--> 546 subarr = construct_1d_ndarray_preserving_na(subarr, dtype, copy=copy)
547 except OutOfBoundsDatetime:
~/miniconda3/lib/python3.7/site-packages/pandas/core/dtypes/cast.py in construct_1d_ndarray_preserving_na(values, dtype, copy)
1506 """
-> 1507 subarr = np.array(values, dtype=dtype, copy=copy)
1508
TypeError: Cannot interpret '<mars.dataframe.arrays.ArrowStringDtype object at 0x7f7f81874150>' as a data type
During handling of the above exception, another exception occurred:
TypeError Traceback (most recent call last)
<ipython-input-6-884154032b17> in <module>
----> 1 df.groupby('b').count()
~/Workspace/mars/mars/dataframe/groupby/__init__.py in <lambda>(groupby, **kw)
40 setattr(cls, 'max', lambda groupby, **kw: agg(groupby, 'max', **kw))
41 setattr(cls, 'min', lambda groupby, **kw: agg(groupby, 'min', **kw))
---> 42 setattr(cls, 'count', lambda groupby, **kw: agg(groupby, 'count', **kw))
43 setattr(cls, 'size', lambda groupby, **kw: agg(groupby, 'size', **kw))
44 setattr(cls, 'mean', lambda groupby, **kw: agg(groupby, 'mean', **kw))
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in agg(groupby, func, method, *args, **kwargs)
650 agg_op = DataFrameGroupByAgg(func=func, method=method, raw_func=func,
651 groupby_params=groupby.op.groupby_params)
--> 652 return agg_op(groupby)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in __call__(self, groupby)
201
202 if self.output_types[0] == OutputType.dataframe:
--> 203 return self._call_dataframe(groupby, df)
204 else:
205 return self._call_series(groupby, df)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in _call_dataframe(self, groupby, input_df)
144
145 def _call_dataframe(self, groupby, input_df):
--> 146 grouped = groupby.op.build_mock_groupby()
147 agg_df = grouped.aggregate(self.func)
148
~/Workspace/mars/mars/dataframe/groupby/core.py in build_mock_groupby(self, **kwargs)
98 in_df = self.inputs[0]
99 if self.is_dataframe_obj:
--> 100 empty_df = build_empty_df(in_df.dtypes, index=pd.RangeIndex(2))
101 obj_dtypes = in_df.dtypes[in_df.dtypes == np.dtype('O')]
102 empty_df[obj_dtypes.index] = 'O'
~/Workspace/mars/mars/dataframe/utils.py in build_empty_df(dtypes, index)
446 df = pd.DataFrame(columns=pd.RangeIndex(len(columns)), index=index)
447 for i, d in enumerate(dtypes):
--> 448 df[i] = pd.Series(dtype=d, index=index)
449 df.columns = columns
450 return df
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
254 data = data._data
255 elif is_dict_like(data):
--> 256 data, index = self._init_dict(data, index, dtype)
257 dtype = None
258 copy = False
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in _init_dict(self, data, index, dtype)
347 # TODO: passing np.float64 to not break anything yet. See GH-17261
348 s = create_series_with_explicit_dtype(
--> 349 values, index=keys, dtype=dtype, dtype_if_empty=np.float64
350 )
351
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in create_series_with_explicit_dtype(data, index, dtype, name, copy, fastpath, dtype_if_empty)
623 dtype = dtype_if_empty
624 return Series(
--> 625 data=data, index=index, dtype=dtype, name=name, copy=copy, fastpath=fastpath
626 )
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
303 data = data.copy()
304 else:
--> 305 data = sanitize_array(data, index, dtype, copy, raise_cast_failure=True)
306
307 data = SingleBlockManager(data, index, fastpath=True)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in sanitize_array(data, index, dtype, copy, raise_cast_failure)
447 subarr = _try_cast(arr, dtype, copy, raise_cast_failure)
448 else:
--> 449 subarr = _try_cast(data, dtype, copy, raise_cast_failure)
450
451 # scalar like, GH
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
560 dtype = cast(ExtensionDtype, dtype)
561 array_type = dtype.construct_array_type()._from_sequence
--> 562 subarr = array_type(arr, dtype=dtype, copy=copy)
563 elif dtype is not None and raise_cast_failure:
564 raise
~/Workspace/mars/mars/dataframe/arrays.py in _from_sequence(cls, scalars, dtype, copy)
253 def _from_sequence(cls, scalars, dtype=None, copy=False):
254 if not hasattr(scalars, 'dtype'):
--> 255 ret = np.empty(len(scalars), dtype=object)
256 for i, s in enumerate(scalars):
257 ret[i] = s
TypeError: object of type 'pyarrow.lib.NullScalar' has no len()
|
TypeError
|
def __init__(self, values, dtype: ArrowListDtype = None, copy=False):
if dtype is None:
if isinstance(values, type(self)):
dtype = values.dtype
elif pa is not None:
if isinstance(values, pa.Array):
dtype = ArrowListDtype(values.type.value_type)
elif isinstance(values, pa.ChunkedArray):
dtype = ArrowListDtype(values.type.value_type)
else:
values = pa.array(values)
if values.type == pa.null():
dtype = ArrowListDtype(pa.string())
else:
dtype = ArrowListDtype(values.type.value_type)
else:
value_type = np.asarray(values[0]).dtype
dtype = ArrowListDtype(value_type)
super().__init__(values, dtype=dtype, copy=copy)
|
def __init__(self, values, dtype: ArrowListDtype = None, copy=False):
if dtype is None:
if isinstance(values, type(self)):
dtype = values.dtype
elif isinstance(values, pa.Array):
dtype = ArrowListDtype(values.type.value_type)
elif isinstance(values, pa.ChunkedArray):
dtype = ArrowListDtype(values.type.value_type)
else:
values = pa.array(values)
dtype = ArrowListDtype(values.type.value_type)
super().__init__(values, dtype=dtype, copy=copy)
|
https://github.com/mars-project/mars/issues/1514
|
In [1]: import mars.dataframe as md
In [2]: df = md.DataFrame({'a': [1, 2, 3], 'b': ['a', 'b', 'c']})
In [3]: df['b'] = df['b'].astype(md.ArrowStringDtype())
In [6]: df.groupby('b').count()
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
545 elif not is_extension_array_dtype(subarr):
--> 546 subarr = construct_1d_ndarray_preserving_na(subarr, dtype, copy=copy)
547 except OutOfBoundsDatetime:
~/miniconda3/lib/python3.7/site-packages/pandas/core/dtypes/cast.py in construct_1d_ndarray_preserving_na(values, dtype, copy)
1506 """
-> 1507 subarr = np.array(values, dtype=dtype, copy=copy)
1508
TypeError: Cannot interpret '<mars.dataframe.arrays.ArrowStringDtype object at 0x7f7f81874150>' as a data type
During handling of the above exception, another exception occurred:
TypeError Traceback (most recent call last)
<ipython-input-6-884154032b17> in <module>
----> 1 df.groupby('b').count()
~/Workspace/mars/mars/dataframe/groupby/__init__.py in <lambda>(groupby, **kw)
40 setattr(cls, 'max', lambda groupby, **kw: agg(groupby, 'max', **kw))
41 setattr(cls, 'min', lambda groupby, **kw: agg(groupby, 'min', **kw))
---> 42 setattr(cls, 'count', lambda groupby, **kw: agg(groupby, 'count', **kw))
43 setattr(cls, 'size', lambda groupby, **kw: agg(groupby, 'size', **kw))
44 setattr(cls, 'mean', lambda groupby, **kw: agg(groupby, 'mean', **kw))
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in agg(groupby, func, method, *args, **kwargs)
650 agg_op = DataFrameGroupByAgg(func=func, method=method, raw_func=func,
651 groupby_params=groupby.op.groupby_params)
--> 652 return agg_op(groupby)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in __call__(self, groupby)
201
202 if self.output_types[0] == OutputType.dataframe:
--> 203 return self._call_dataframe(groupby, df)
204 else:
205 return self._call_series(groupby, df)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in _call_dataframe(self, groupby, input_df)
144
145 def _call_dataframe(self, groupby, input_df):
--> 146 grouped = groupby.op.build_mock_groupby()
147 agg_df = grouped.aggregate(self.func)
148
~/Workspace/mars/mars/dataframe/groupby/core.py in build_mock_groupby(self, **kwargs)
98 in_df = self.inputs[0]
99 if self.is_dataframe_obj:
--> 100 empty_df = build_empty_df(in_df.dtypes, index=pd.RangeIndex(2))
101 obj_dtypes = in_df.dtypes[in_df.dtypes == np.dtype('O')]
102 empty_df[obj_dtypes.index] = 'O'
~/Workspace/mars/mars/dataframe/utils.py in build_empty_df(dtypes, index)
446 df = pd.DataFrame(columns=pd.RangeIndex(len(columns)), index=index)
447 for i, d in enumerate(dtypes):
--> 448 df[i] = pd.Series(dtype=d, index=index)
449 df.columns = columns
450 return df
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
254 data = data._data
255 elif is_dict_like(data):
--> 256 data, index = self._init_dict(data, index, dtype)
257 dtype = None
258 copy = False
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in _init_dict(self, data, index, dtype)
347 # TODO: passing np.float64 to not break anything yet. See GH-17261
348 s = create_series_with_explicit_dtype(
--> 349 values, index=keys, dtype=dtype, dtype_if_empty=np.float64
350 )
351
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in create_series_with_explicit_dtype(data, index, dtype, name, copy, fastpath, dtype_if_empty)
623 dtype = dtype_if_empty
624 return Series(
--> 625 data=data, index=index, dtype=dtype, name=name, copy=copy, fastpath=fastpath
626 )
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
303 data = data.copy()
304 else:
--> 305 data = sanitize_array(data, index, dtype, copy, raise_cast_failure=True)
306
307 data = SingleBlockManager(data, index, fastpath=True)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in sanitize_array(data, index, dtype, copy, raise_cast_failure)
447 subarr = _try_cast(arr, dtype, copy, raise_cast_failure)
448 else:
--> 449 subarr = _try_cast(data, dtype, copy, raise_cast_failure)
450
451 # scalar like, GH
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
560 dtype = cast(ExtensionDtype, dtype)
561 array_type = dtype.construct_array_type()._from_sequence
--> 562 subarr = array_type(arr, dtype=dtype, copy=copy)
563 elif dtype is not None and raise_cast_failure:
564 raise
~/Workspace/mars/mars/dataframe/arrays.py in _from_sequence(cls, scalars, dtype, copy)
253 def _from_sequence(cls, scalars, dtype=None, copy=False):
254 if not hasattr(scalars, 'dtype'):
--> 255 ret = np.empty(len(scalars), dtype=object)
256 for i, s in enumerate(scalars):
257 ret[i] = s
TypeError: object of type 'pyarrow.lib.NullScalar' has no len()
|
TypeError
|
def to_numpy(self, dtype=None, copy=False, na_value=lib.no_default):
if self._use_arrow:
s = self._arrow_array.to_pandas()
else:
s = pd.Series(self._ndarray)
s = s.map(lambda x: x.tolist() if hasattr(x, "tolist") else x)
if copy or na_value is not lib.no_default:
s = s.copy()
if na_value is not lib.no_default:
s[self.isna()] = na_value
return np.asarray(s)
|
def to_numpy(self, dtype=None, copy=False, na_value=lib.no_default):
s = self._arrow_array.to_pandas().map(lambda x: x.tolist() if x is not None else x)
if copy or na_value is not lib.no_default:
s = s.copy()
if na_value is not lib.no_default:
s[self.isna()] = na_value
return np.asarray(s)
|
https://github.com/mars-project/mars/issues/1514
|
In [1]: import mars.dataframe as md
In [2]: df = md.DataFrame({'a': [1, 2, 3], 'b': ['a', 'b', 'c']})
In [3]: df['b'] = df['b'].astype(md.ArrowStringDtype())
In [6]: df.groupby('b').count()
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
545 elif not is_extension_array_dtype(subarr):
--> 546 subarr = construct_1d_ndarray_preserving_na(subarr, dtype, copy=copy)
547 except OutOfBoundsDatetime:
~/miniconda3/lib/python3.7/site-packages/pandas/core/dtypes/cast.py in construct_1d_ndarray_preserving_na(values, dtype, copy)
1506 """
-> 1507 subarr = np.array(values, dtype=dtype, copy=copy)
1508
TypeError: Cannot interpret '<mars.dataframe.arrays.ArrowStringDtype object at 0x7f7f81874150>' as a data type
During handling of the above exception, another exception occurred:
TypeError Traceback (most recent call last)
<ipython-input-6-884154032b17> in <module>
----> 1 df.groupby('b').count()
~/Workspace/mars/mars/dataframe/groupby/__init__.py in <lambda>(groupby, **kw)
40 setattr(cls, 'max', lambda groupby, **kw: agg(groupby, 'max', **kw))
41 setattr(cls, 'min', lambda groupby, **kw: agg(groupby, 'min', **kw))
---> 42 setattr(cls, 'count', lambda groupby, **kw: agg(groupby, 'count', **kw))
43 setattr(cls, 'size', lambda groupby, **kw: agg(groupby, 'size', **kw))
44 setattr(cls, 'mean', lambda groupby, **kw: agg(groupby, 'mean', **kw))
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in agg(groupby, func, method, *args, **kwargs)
650 agg_op = DataFrameGroupByAgg(func=func, method=method, raw_func=func,
651 groupby_params=groupby.op.groupby_params)
--> 652 return agg_op(groupby)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in __call__(self, groupby)
201
202 if self.output_types[0] == OutputType.dataframe:
--> 203 return self._call_dataframe(groupby, df)
204 else:
205 return self._call_series(groupby, df)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in _call_dataframe(self, groupby, input_df)
144
145 def _call_dataframe(self, groupby, input_df):
--> 146 grouped = groupby.op.build_mock_groupby()
147 agg_df = grouped.aggregate(self.func)
148
~/Workspace/mars/mars/dataframe/groupby/core.py in build_mock_groupby(self, **kwargs)
98 in_df = self.inputs[0]
99 if self.is_dataframe_obj:
--> 100 empty_df = build_empty_df(in_df.dtypes, index=pd.RangeIndex(2))
101 obj_dtypes = in_df.dtypes[in_df.dtypes == np.dtype('O')]
102 empty_df[obj_dtypes.index] = 'O'
~/Workspace/mars/mars/dataframe/utils.py in build_empty_df(dtypes, index)
446 df = pd.DataFrame(columns=pd.RangeIndex(len(columns)), index=index)
447 for i, d in enumerate(dtypes):
--> 448 df[i] = pd.Series(dtype=d, index=index)
449 df.columns = columns
450 return df
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
254 data = data._data
255 elif is_dict_like(data):
--> 256 data, index = self._init_dict(data, index, dtype)
257 dtype = None
258 copy = False
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in _init_dict(self, data, index, dtype)
347 # TODO: passing np.float64 to not break anything yet. See GH-17261
348 s = create_series_with_explicit_dtype(
--> 349 values, index=keys, dtype=dtype, dtype_if_empty=np.float64
350 )
351
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in create_series_with_explicit_dtype(data, index, dtype, name, copy, fastpath, dtype_if_empty)
623 dtype = dtype_if_empty
624 return Series(
--> 625 data=data, index=index, dtype=dtype, name=name, copy=copy, fastpath=fastpath
626 )
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
303 data = data.copy()
304 else:
--> 305 data = sanitize_array(data, index, dtype, copy, raise_cast_failure=True)
306
307 data = SingleBlockManager(data, index, fastpath=True)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in sanitize_array(data, index, dtype, copy, raise_cast_failure)
447 subarr = _try_cast(arr, dtype, copy, raise_cast_failure)
448 else:
--> 449 subarr = _try_cast(data, dtype, copy, raise_cast_failure)
450
451 # scalar like, GH
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
560 dtype = cast(ExtensionDtype, dtype)
561 array_type = dtype.construct_array_type()._from_sequence
--> 562 subarr = array_type(arr, dtype=dtype, copy=copy)
563 elif dtype is not None and raise_cast_failure:
564 raise
~/Workspace/mars/mars/dataframe/arrays.py in _from_sequence(cls, scalars, dtype, copy)
253 def _from_sequence(cls, scalars, dtype=None, copy=False):
254 if not hasattr(scalars, 'dtype'):
--> 255 ret = np.empty(len(scalars), dtype=object)
256 for i, s in enumerate(scalars):
257 ret[i] = s
TypeError: object of type 'pyarrow.lib.NullScalar' has no len()
|
TypeError
|
def __setitem__(self, key, value):
if isinstance(value, (pd.Index, pd.Series)):
value = value.to_numpy()
key = check_array_indexer(self, key)
scalar_key = is_scalar(key)
# validate new items
if scalar_key:
if pd.isna(value):
value = None
elif not is_list_like(value):
raise ValueError("Must provide list.")
if self._use_arrow:
array = np.asarray(self._arrow_array.to_pandas())
array[key] = value
self._arrow_array = pa.chunked_array(
[pa.array(array, type=self.dtype.arrow_type)]
)
else:
self._ndarray[key] = value
|
def __setitem__(self, key, value):
if isinstance(value, (pd.Index, pd.Series)):
value = value.to_numpy()
key = check_array_indexer(self, key)
scalar_key = is_scalar(key)
# validate new items
if scalar_key:
if pd.isna(value):
value = None
elif not is_list_like(value):
raise ValueError("Must provide list.")
array = np.asarray(self._arrow_array.to_pandas())
array[key] = value
self._arrow_array = pa.chunked_array([pa.array(array, type=self.dtype.arrow_type)])
|
https://github.com/mars-project/mars/issues/1514
|
In [1]: import mars.dataframe as md
In [2]: df = md.DataFrame({'a': [1, 2, 3], 'b': ['a', 'b', 'c']})
In [3]: df['b'] = df['b'].astype(md.ArrowStringDtype())
In [6]: df.groupby('b').count()
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
545 elif not is_extension_array_dtype(subarr):
--> 546 subarr = construct_1d_ndarray_preserving_na(subarr, dtype, copy=copy)
547 except OutOfBoundsDatetime:
~/miniconda3/lib/python3.7/site-packages/pandas/core/dtypes/cast.py in construct_1d_ndarray_preserving_na(values, dtype, copy)
1506 """
-> 1507 subarr = np.array(values, dtype=dtype, copy=copy)
1508
TypeError: Cannot interpret '<mars.dataframe.arrays.ArrowStringDtype object at 0x7f7f81874150>' as a data type
During handling of the above exception, another exception occurred:
TypeError Traceback (most recent call last)
<ipython-input-6-884154032b17> in <module>
----> 1 df.groupby('b').count()
~/Workspace/mars/mars/dataframe/groupby/__init__.py in <lambda>(groupby, **kw)
40 setattr(cls, 'max', lambda groupby, **kw: agg(groupby, 'max', **kw))
41 setattr(cls, 'min', lambda groupby, **kw: agg(groupby, 'min', **kw))
---> 42 setattr(cls, 'count', lambda groupby, **kw: agg(groupby, 'count', **kw))
43 setattr(cls, 'size', lambda groupby, **kw: agg(groupby, 'size', **kw))
44 setattr(cls, 'mean', lambda groupby, **kw: agg(groupby, 'mean', **kw))
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in agg(groupby, func, method, *args, **kwargs)
650 agg_op = DataFrameGroupByAgg(func=func, method=method, raw_func=func,
651 groupby_params=groupby.op.groupby_params)
--> 652 return agg_op(groupby)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in __call__(self, groupby)
201
202 if self.output_types[0] == OutputType.dataframe:
--> 203 return self._call_dataframe(groupby, df)
204 else:
205 return self._call_series(groupby, df)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in _call_dataframe(self, groupby, input_df)
144
145 def _call_dataframe(self, groupby, input_df):
--> 146 grouped = groupby.op.build_mock_groupby()
147 agg_df = grouped.aggregate(self.func)
148
~/Workspace/mars/mars/dataframe/groupby/core.py in build_mock_groupby(self, **kwargs)
98 in_df = self.inputs[0]
99 if self.is_dataframe_obj:
--> 100 empty_df = build_empty_df(in_df.dtypes, index=pd.RangeIndex(2))
101 obj_dtypes = in_df.dtypes[in_df.dtypes == np.dtype('O')]
102 empty_df[obj_dtypes.index] = 'O'
~/Workspace/mars/mars/dataframe/utils.py in build_empty_df(dtypes, index)
446 df = pd.DataFrame(columns=pd.RangeIndex(len(columns)), index=index)
447 for i, d in enumerate(dtypes):
--> 448 df[i] = pd.Series(dtype=d, index=index)
449 df.columns = columns
450 return df
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
254 data = data._data
255 elif is_dict_like(data):
--> 256 data, index = self._init_dict(data, index, dtype)
257 dtype = None
258 copy = False
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in _init_dict(self, data, index, dtype)
347 # TODO: passing np.float64 to not break anything yet. See GH-17261
348 s = create_series_with_explicit_dtype(
--> 349 values, index=keys, dtype=dtype, dtype_if_empty=np.float64
350 )
351
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in create_series_with_explicit_dtype(data, index, dtype, name, copy, fastpath, dtype_if_empty)
623 dtype = dtype_if_empty
624 return Series(
--> 625 data=data, index=index, dtype=dtype, name=name, copy=copy, fastpath=fastpath
626 )
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
303 data = data.copy()
304 else:
--> 305 data = sanitize_array(data, index, dtype, copy, raise_cast_failure=True)
306
307 data = SingleBlockManager(data, index, fastpath=True)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in sanitize_array(data, index, dtype, copy, raise_cast_failure)
447 subarr = _try_cast(arr, dtype, copy, raise_cast_failure)
448 else:
--> 449 subarr = _try_cast(data, dtype, copy, raise_cast_failure)
450
451 # scalar like, GH
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
560 dtype = cast(ExtensionDtype, dtype)
561 array_type = dtype.construct_array_type()._from_sequence
--> 562 subarr = array_type(arr, dtype=dtype, copy=copy)
563 elif dtype is not None and raise_cast_failure:
564 raise
~/Workspace/mars/mars/dataframe/arrays.py in _from_sequence(cls, scalars, dtype, copy)
253 def _from_sequence(cls, scalars, dtype=None, copy=False):
254 if not hasattr(scalars, 'dtype'):
--> 255 ret = np.empty(len(scalars), dtype=object)
256 for i, s in enumerate(scalars):
257 ret[i] = s
TypeError: object of type 'pyarrow.lib.NullScalar' has no len()
|
TypeError
|
def astype(self, dtype, copy=True):
msg = f"cannot astype from {self.dtype} to {dtype}"
dtype = pandas_dtype(dtype)
if isinstance(dtype, ArrowListDtype):
if self.dtype == dtype:
if copy:
return self.copy()
return self
else:
if self._use_arrow:
try:
arrow_array = self._arrow_array.cast(dtype.arrow_type)
return ArrowListArray(arrow_array)
except (NotImplementedError, pa.ArrowInvalid):
raise TypeError(msg)
else:
def f(x):
return pd.Series(x).astype(dtype.type).tolist()
try:
arr = pd.Series(self._ndarray)
ret = arr.map(f).to_numpy()
return ArrowStringArray(ret)
except ValueError:
raise TypeError(msg)
try:
return super().astype(dtype, copy=copy)
except ValueError:
raise TypeError(msg)
|
def astype(self, dtype, copy=True):
msg = f"cannot astype from {self.dtype} to {dtype}"
dtype = pandas_dtype(dtype)
if isinstance(dtype, ArrowListDtype):
if self.dtype == dtype:
if copy:
return self.copy()
return self
else:
try:
arrow_array = self._arrow_array.cast(dtype.arrow_type)
return ArrowListArray(arrow_array)
except (NotImplementedError, pa.ArrowInvalid):
raise TypeError(msg)
try:
return super().astype(dtype, copy=copy)
except ValueError:
raise TypeError(msg)
|
https://github.com/mars-project/mars/issues/1514
|
In [1]: import mars.dataframe as md
In [2]: df = md.DataFrame({'a': [1, 2, 3], 'b': ['a', 'b', 'c']})
In [3]: df['b'] = df['b'].astype(md.ArrowStringDtype())
In [6]: df.groupby('b').count()
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
545 elif not is_extension_array_dtype(subarr):
--> 546 subarr = construct_1d_ndarray_preserving_na(subarr, dtype, copy=copy)
547 except OutOfBoundsDatetime:
~/miniconda3/lib/python3.7/site-packages/pandas/core/dtypes/cast.py in construct_1d_ndarray_preserving_na(values, dtype, copy)
1506 """
-> 1507 subarr = np.array(values, dtype=dtype, copy=copy)
1508
TypeError: Cannot interpret '<mars.dataframe.arrays.ArrowStringDtype object at 0x7f7f81874150>' as a data type
During handling of the above exception, another exception occurred:
TypeError Traceback (most recent call last)
<ipython-input-6-884154032b17> in <module>
----> 1 df.groupby('b').count()
~/Workspace/mars/mars/dataframe/groupby/__init__.py in <lambda>(groupby, **kw)
40 setattr(cls, 'max', lambda groupby, **kw: agg(groupby, 'max', **kw))
41 setattr(cls, 'min', lambda groupby, **kw: agg(groupby, 'min', **kw))
---> 42 setattr(cls, 'count', lambda groupby, **kw: agg(groupby, 'count', **kw))
43 setattr(cls, 'size', lambda groupby, **kw: agg(groupby, 'size', **kw))
44 setattr(cls, 'mean', lambda groupby, **kw: agg(groupby, 'mean', **kw))
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in agg(groupby, func, method, *args, **kwargs)
650 agg_op = DataFrameGroupByAgg(func=func, method=method, raw_func=func,
651 groupby_params=groupby.op.groupby_params)
--> 652 return agg_op(groupby)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in __call__(self, groupby)
201
202 if self.output_types[0] == OutputType.dataframe:
--> 203 return self._call_dataframe(groupby, df)
204 else:
205 return self._call_series(groupby, df)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in _call_dataframe(self, groupby, input_df)
144
145 def _call_dataframe(self, groupby, input_df):
--> 146 grouped = groupby.op.build_mock_groupby()
147 agg_df = grouped.aggregate(self.func)
148
~/Workspace/mars/mars/dataframe/groupby/core.py in build_mock_groupby(self, **kwargs)
98 in_df = self.inputs[0]
99 if self.is_dataframe_obj:
--> 100 empty_df = build_empty_df(in_df.dtypes, index=pd.RangeIndex(2))
101 obj_dtypes = in_df.dtypes[in_df.dtypes == np.dtype('O')]
102 empty_df[obj_dtypes.index] = 'O'
~/Workspace/mars/mars/dataframe/utils.py in build_empty_df(dtypes, index)
446 df = pd.DataFrame(columns=pd.RangeIndex(len(columns)), index=index)
447 for i, d in enumerate(dtypes):
--> 448 df[i] = pd.Series(dtype=d, index=index)
449 df.columns = columns
450 return df
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
254 data = data._data
255 elif is_dict_like(data):
--> 256 data, index = self._init_dict(data, index, dtype)
257 dtype = None
258 copy = False
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in _init_dict(self, data, index, dtype)
347 # TODO: passing np.float64 to not break anything yet. See GH-17261
348 s = create_series_with_explicit_dtype(
--> 349 values, index=keys, dtype=dtype, dtype_if_empty=np.float64
350 )
351
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in create_series_with_explicit_dtype(data, index, dtype, name, copy, fastpath, dtype_if_empty)
623 dtype = dtype_if_empty
624 return Series(
--> 625 data=data, index=index, dtype=dtype, name=name, copy=copy, fastpath=fastpath
626 )
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
303 data = data.copy()
304 else:
--> 305 data = sanitize_array(data, index, dtype, copy, raise_cast_failure=True)
306
307 data = SingleBlockManager(data, index, fastpath=True)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in sanitize_array(data, index, dtype, copy, raise_cast_failure)
447 subarr = _try_cast(arr, dtype, copy, raise_cast_failure)
448 else:
--> 449 subarr = _try_cast(data, dtype, copy, raise_cast_failure)
450
451 # scalar like, GH
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
560 dtype = cast(ExtensionDtype, dtype)
561 array_type = dtype.construct_array_type()._from_sequence
--> 562 subarr = array_type(arr, dtype=dtype, copy=copy)
563 elif dtype is not None and raise_cast_failure:
564 raise
~/Workspace/mars/mars/dataframe/arrays.py in _from_sequence(cls, scalars, dtype, copy)
253 def _from_sequence(cls, scalars, dtype=None, copy=False):
254 if not hasattr(scalars, 'dtype'):
--> 255 ret = np.empty(len(scalars), dtype=object)
256 for i, s in enumerate(scalars):
257 ret[i] = s
TypeError: object of type 'pyarrow.lib.NullScalar' has no len()
|
TypeError
|
def _infer_df_func_returns(self, df, dtypes, index):
if isinstance(self._func, np.ufunc):
output_type, new_dtypes, index_value, new_elementwise = (
OutputType.dataframe,
None,
"inherit",
True,
)
else:
output_type, new_dtypes, index_value, new_elementwise = None, None, None, False
try:
empty_df = build_df(df, size=2)
with np.errstate(all="ignore"):
infer_df = empty_df.apply(
self._func,
axis=self._axis,
raw=self._raw,
result_type=self._result_type,
args=self.args,
**self.kwds,
)
if index_value is None:
if infer_df.index is empty_df.index:
index_value = "inherit"
else:
index_value = parse_index(pd.RangeIndex(-1))
if isinstance(infer_df, pd.DataFrame):
output_type = output_type or OutputType.dataframe
new_dtypes = new_dtypes or infer_df.dtypes
else:
output_type = output_type or OutputType.series
new_dtypes = new_dtypes or infer_df.dtype
new_elementwise = False if new_elementwise is None else new_elementwise
except: # noqa: E722 # nosec
pass
self.output_types = [output_type] if not self.output_types else self.output_types
dtypes = new_dtypes if dtypes is None else dtypes
index_value = index_value if index is None else parse_index(index)
self._elementwise = (
new_elementwise if self._elementwise is None else self._elementwise
)
return dtypes, index_value
|
def _infer_df_func_returns(self, in_dtypes, dtypes, index):
if isinstance(self._func, np.ufunc):
output_type, new_dtypes, index_value, new_elementwise = (
OutputType.dataframe,
None,
"inherit",
True,
)
else:
output_type, new_dtypes, index_value, new_elementwise = None, None, None, False
try:
empty_df = build_empty_df(in_dtypes, index=pd.RangeIndex(2))
with np.errstate(all="ignore"):
infer_df = empty_df.apply(
self._func,
axis=self._axis,
raw=self._raw,
result_type=self._result_type,
args=self.args,
**self.kwds,
)
if index_value is None:
if infer_df.index is empty_df.index:
index_value = "inherit"
else:
index_value = parse_index(pd.RangeIndex(-1))
if isinstance(infer_df, pd.DataFrame):
output_type = output_type or OutputType.dataframe
new_dtypes = new_dtypes or infer_df.dtypes
else:
output_type = output_type or OutputType.series
new_dtypes = new_dtypes or infer_df.dtype
new_elementwise = False if new_elementwise is None else new_elementwise
except: # noqa: E722 # nosec
pass
self.output_types = [output_type] if not self.output_types else self.output_types
dtypes = new_dtypes if dtypes is None else dtypes
index_value = index_value if index is None else parse_index(index)
self._elementwise = (
new_elementwise if self._elementwise is None else self._elementwise
)
return dtypes, index_value
|
https://github.com/mars-project/mars/issues/1514
|
In [1]: import mars.dataframe as md
In [2]: df = md.DataFrame({'a': [1, 2, 3], 'b': ['a', 'b', 'c']})
In [3]: df['b'] = df['b'].astype(md.ArrowStringDtype())
In [6]: df.groupby('b').count()
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
545 elif not is_extension_array_dtype(subarr):
--> 546 subarr = construct_1d_ndarray_preserving_na(subarr, dtype, copy=copy)
547 except OutOfBoundsDatetime:
~/miniconda3/lib/python3.7/site-packages/pandas/core/dtypes/cast.py in construct_1d_ndarray_preserving_na(values, dtype, copy)
1506 """
-> 1507 subarr = np.array(values, dtype=dtype, copy=copy)
1508
TypeError: Cannot interpret '<mars.dataframe.arrays.ArrowStringDtype object at 0x7f7f81874150>' as a data type
During handling of the above exception, another exception occurred:
TypeError Traceback (most recent call last)
<ipython-input-6-884154032b17> in <module>
----> 1 df.groupby('b').count()
~/Workspace/mars/mars/dataframe/groupby/__init__.py in <lambda>(groupby, **kw)
40 setattr(cls, 'max', lambda groupby, **kw: agg(groupby, 'max', **kw))
41 setattr(cls, 'min', lambda groupby, **kw: agg(groupby, 'min', **kw))
---> 42 setattr(cls, 'count', lambda groupby, **kw: agg(groupby, 'count', **kw))
43 setattr(cls, 'size', lambda groupby, **kw: agg(groupby, 'size', **kw))
44 setattr(cls, 'mean', lambda groupby, **kw: agg(groupby, 'mean', **kw))
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in agg(groupby, func, method, *args, **kwargs)
650 agg_op = DataFrameGroupByAgg(func=func, method=method, raw_func=func,
651 groupby_params=groupby.op.groupby_params)
--> 652 return agg_op(groupby)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in __call__(self, groupby)
201
202 if self.output_types[0] == OutputType.dataframe:
--> 203 return self._call_dataframe(groupby, df)
204 else:
205 return self._call_series(groupby, df)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in _call_dataframe(self, groupby, input_df)
144
145 def _call_dataframe(self, groupby, input_df):
--> 146 grouped = groupby.op.build_mock_groupby()
147 agg_df = grouped.aggregate(self.func)
148
~/Workspace/mars/mars/dataframe/groupby/core.py in build_mock_groupby(self, **kwargs)
98 in_df = self.inputs[0]
99 if self.is_dataframe_obj:
--> 100 empty_df = build_empty_df(in_df.dtypes, index=pd.RangeIndex(2))
101 obj_dtypes = in_df.dtypes[in_df.dtypes == np.dtype('O')]
102 empty_df[obj_dtypes.index] = 'O'
~/Workspace/mars/mars/dataframe/utils.py in build_empty_df(dtypes, index)
446 df = pd.DataFrame(columns=pd.RangeIndex(len(columns)), index=index)
447 for i, d in enumerate(dtypes):
--> 448 df[i] = pd.Series(dtype=d, index=index)
449 df.columns = columns
450 return df
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
254 data = data._data
255 elif is_dict_like(data):
--> 256 data, index = self._init_dict(data, index, dtype)
257 dtype = None
258 copy = False
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in _init_dict(self, data, index, dtype)
347 # TODO: passing np.float64 to not break anything yet. See GH-17261
348 s = create_series_with_explicit_dtype(
--> 349 values, index=keys, dtype=dtype, dtype_if_empty=np.float64
350 )
351
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in create_series_with_explicit_dtype(data, index, dtype, name, copy, fastpath, dtype_if_empty)
623 dtype = dtype_if_empty
624 return Series(
--> 625 data=data, index=index, dtype=dtype, name=name, copy=copy, fastpath=fastpath
626 )
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
303 data = data.copy()
304 else:
--> 305 data = sanitize_array(data, index, dtype, copy, raise_cast_failure=True)
306
307 data = SingleBlockManager(data, index, fastpath=True)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in sanitize_array(data, index, dtype, copy, raise_cast_failure)
447 subarr = _try_cast(arr, dtype, copy, raise_cast_failure)
448 else:
--> 449 subarr = _try_cast(data, dtype, copy, raise_cast_failure)
450
451 # scalar like, GH
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
560 dtype = cast(ExtensionDtype, dtype)
561 array_type = dtype.construct_array_type()._from_sequence
--> 562 subarr = array_type(arr, dtype=dtype, copy=copy)
563 elif dtype is not None and raise_cast_failure:
564 raise
~/Workspace/mars/mars/dataframe/arrays.py in _from_sequence(cls, scalars, dtype, copy)
253 def _from_sequence(cls, scalars, dtype=None, copy=False):
254 if not hasattr(scalars, 'dtype'):
--> 255 ret = np.empty(len(scalars), dtype=object)
256 for i, s in enumerate(scalars):
257 ret[i] = s
TypeError: object of type 'pyarrow.lib.NullScalar' has no len()
|
TypeError
|
def _infer_series_func_returns(self, df):
try:
empty_series = build_series(df, size=2, name=df.name)
with np.errstate(all="ignore"):
infer_series = empty_series.apply(self._func, args=self.args, **self.kwds)
new_dtype = infer_series.dtype
name = infer_series.name
except: # noqa: E722 # nosec # pylint: disable=bare-except
new_dtype = np.dtype("object")
name = None
return new_dtype, name
|
def _infer_series_func_returns(self, in_dtype):
try:
empty_series = build_empty_series(in_dtype, index=pd.RangeIndex(2))
with np.errstate(all="ignore"):
infer_series = empty_series.apply(self._func, args=self.args, **self.kwds)
new_dtype = infer_series.dtype
except: # noqa: E722 # nosec # pylint: disable=bare-except
new_dtype = np.dtype("object")
return new_dtype
|
https://github.com/mars-project/mars/issues/1514
|
In [1]: import mars.dataframe as md
In [2]: df = md.DataFrame({'a': [1, 2, 3], 'b': ['a', 'b', 'c']})
In [3]: df['b'] = df['b'].astype(md.ArrowStringDtype())
In [6]: df.groupby('b').count()
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
545 elif not is_extension_array_dtype(subarr):
--> 546 subarr = construct_1d_ndarray_preserving_na(subarr, dtype, copy=copy)
547 except OutOfBoundsDatetime:
~/miniconda3/lib/python3.7/site-packages/pandas/core/dtypes/cast.py in construct_1d_ndarray_preserving_na(values, dtype, copy)
1506 """
-> 1507 subarr = np.array(values, dtype=dtype, copy=copy)
1508
TypeError: Cannot interpret '<mars.dataframe.arrays.ArrowStringDtype object at 0x7f7f81874150>' as a data type
During handling of the above exception, another exception occurred:
TypeError Traceback (most recent call last)
<ipython-input-6-884154032b17> in <module>
----> 1 df.groupby('b').count()
~/Workspace/mars/mars/dataframe/groupby/__init__.py in <lambda>(groupby, **kw)
40 setattr(cls, 'max', lambda groupby, **kw: agg(groupby, 'max', **kw))
41 setattr(cls, 'min', lambda groupby, **kw: agg(groupby, 'min', **kw))
---> 42 setattr(cls, 'count', lambda groupby, **kw: agg(groupby, 'count', **kw))
43 setattr(cls, 'size', lambda groupby, **kw: agg(groupby, 'size', **kw))
44 setattr(cls, 'mean', lambda groupby, **kw: agg(groupby, 'mean', **kw))
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in agg(groupby, func, method, *args, **kwargs)
650 agg_op = DataFrameGroupByAgg(func=func, method=method, raw_func=func,
651 groupby_params=groupby.op.groupby_params)
--> 652 return agg_op(groupby)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in __call__(self, groupby)
201
202 if self.output_types[0] == OutputType.dataframe:
--> 203 return self._call_dataframe(groupby, df)
204 else:
205 return self._call_series(groupby, df)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in _call_dataframe(self, groupby, input_df)
144
145 def _call_dataframe(self, groupby, input_df):
--> 146 grouped = groupby.op.build_mock_groupby()
147 agg_df = grouped.aggregate(self.func)
148
~/Workspace/mars/mars/dataframe/groupby/core.py in build_mock_groupby(self, **kwargs)
98 in_df = self.inputs[0]
99 if self.is_dataframe_obj:
--> 100 empty_df = build_empty_df(in_df.dtypes, index=pd.RangeIndex(2))
101 obj_dtypes = in_df.dtypes[in_df.dtypes == np.dtype('O')]
102 empty_df[obj_dtypes.index] = 'O'
~/Workspace/mars/mars/dataframe/utils.py in build_empty_df(dtypes, index)
446 df = pd.DataFrame(columns=pd.RangeIndex(len(columns)), index=index)
447 for i, d in enumerate(dtypes):
--> 448 df[i] = pd.Series(dtype=d, index=index)
449 df.columns = columns
450 return df
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
254 data = data._data
255 elif is_dict_like(data):
--> 256 data, index = self._init_dict(data, index, dtype)
257 dtype = None
258 copy = False
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in _init_dict(self, data, index, dtype)
347 # TODO: passing np.float64 to not break anything yet. See GH-17261
348 s = create_series_with_explicit_dtype(
--> 349 values, index=keys, dtype=dtype, dtype_if_empty=np.float64
350 )
351
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in create_series_with_explicit_dtype(data, index, dtype, name, copy, fastpath, dtype_if_empty)
623 dtype = dtype_if_empty
624 return Series(
--> 625 data=data, index=index, dtype=dtype, name=name, copy=copy, fastpath=fastpath
626 )
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
303 data = data.copy()
304 else:
--> 305 data = sanitize_array(data, index, dtype, copy, raise_cast_failure=True)
306
307 data = SingleBlockManager(data, index, fastpath=True)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in sanitize_array(data, index, dtype, copy, raise_cast_failure)
447 subarr = _try_cast(arr, dtype, copy, raise_cast_failure)
448 else:
--> 449 subarr = _try_cast(data, dtype, copy, raise_cast_failure)
450
451 # scalar like, GH
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
560 dtype = cast(ExtensionDtype, dtype)
561 array_type = dtype.construct_array_type()._from_sequence
--> 562 subarr = array_type(arr, dtype=dtype, copy=copy)
563 elif dtype is not None and raise_cast_failure:
564 raise
~/Workspace/mars/mars/dataframe/arrays.py in _from_sequence(cls, scalars, dtype, copy)
253 def _from_sequence(cls, scalars, dtype=None, copy=False):
254 if not hasattr(scalars, 'dtype'):
--> 255 ret = np.empty(len(scalars), dtype=object)
256 for i, s in enumerate(scalars):
257 ret[i] = s
TypeError: object of type 'pyarrow.lib.NullScalar' has no len()
|
TypeError
|
def _call_dataframe(self, df, dtypes=None, index=None):
dtypes, index_value = self._infer_df_func_returns(df, dtypes, index)
for arg, desc in zip(
(self.output_types, dtypes, index_value), ("output_types", "dtypes", "index")
):
if arg is None:
raise TypeError(
f"Cannot determine {desc} by calculating with enumerate data, "
"please specify it as arguments"
)
if index_value == "inherit":
index_value = df.index_value
if self._elementwise:
shape = df.shape
elif self.output_types[0] == OutputType.dataframe:
shape = [np.nan, np.nan]
shape[1 - self.axis] = df.shape[1 - self.axis]
shape = tuple(shape)
else:
shape = (df.shape[1 - self.axis],)
if self.output_types[0] == OutputType.dataframe:
if self.axis == 0:
return self.new_dataframe(
[df],
shape=shape,
dtypes=dtypes,
index_value=index_value,
columns_value=parse_index(dtypes.index),
)
else:
return self.new_dataframe(
[df],
shape=shape,
dtypes=dtypes,
index_value=df.index_value,
columns_value=parse_index(dtypes.index),
)
else:
return self.new_series([df], shape=shape, dtype=dtypes, index_value=index_value)
|
def _call_dataframe(self, df, dtypes=None, index=None):
dtypes, index_value = self._infer_df_func_returns(df.dtypes, dtypes, index)
for arg, desc in zip(
(self.output_types, dtypes, index_value), ("output_types", "dtypes", "index")
):
if arg is None:
raise TypeError(
f"Cannot determine {desc} by calculating with enumerate data, "
"please specify it as arguments"
)
if index_value == "inherit":
index_value = df.index_value
if self._elementwise:
shape = df.shape
elif self.output_types[0] == OutputType.dataframe:
shape = [np.nan, np.nan]
shape[1 - self.axis] = df.shape[1 - self.axis]
shape = tuple(shape)
else:
shape = (df.shape[1 - self.axis],)
if self.output_types[0] == OutputType.dataframe:
if self.axis == 0:
return self.new_dataframe(
[df],
shape=shape,
dtypes=dtypes,
index_value=index_value,
columns_value=parse_index(dtypes.index),
)
else:
return self.new_dataframe(
[df],
shape=shape,
dtypes=dtypes,
index_value=df.index_value,
columns_value=parse_index(dtypes.index),
)
else:
return self.new_series([df], shape=shape, dtype=dtypes, index_value=index_value)
|
https://github.com/mars-project/mars/issues/1514
|
In [1]: import mars.dataframe as md
In [2]: df = md.DataFrame({'a': [1, 2, 3], 'b': ['a', 'b', 'c']})
In [3]: df['b'] = df['b'].astype(md.ArrowStringDtype())
In [6]: df.groupby('b').count()
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
545 elif not is_extension_array_dtype(subarr):
--> 546 subarr = construct_1d_ndarray_preserving_na(subarr, dtype, copy=copy)
547 except OutOfBoundsDatetime:
~/miniconda3/lib/python3.7/site-packages/pandas/core/dtypes/cast.py in construct_1d_ndarray_preserving_na(values, dtype, copy)
1506 """
-> 1507 subarr = np.array(values, dtype=dtype, copy=copy)
1508
TypeError: Cannot interpret '<mars.dataframe.arrays.ArrowStringDtype object at 0x7f7f81874150>' as a data type
During handling of the above exception, another exception occurred:
TypeError Traceback (most recent call last)
<ipython-input-6-884154032b17> in <module>
----> 1 df.groupby('b').count()
~/Workspace/mars/mars/dataframe/groupby/__init__.py in <lambda>(groupby, **kw)
40 setattr(cls, 'max', lambda groupby, **kw: agg(groupby, 'max', **kw))
41 setattr(cls, 'min', lambda groupby, **kw: agg(groupby, 'min', **kw))
---> 42 setattr(cls, 'count', lambda groupby, **kw: agg(groupby, 'count', **kw))
43 setattr(cls, 'size', lambda groupby, **kw: agg(groupby, 'size', **kw))
44 setattr(cls, 'mean', lambda groupby, **kw: agg(groupby, 'mean', **kw))
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in agg(groupby, func, method, *args, **kwargs)
650 agg_op = DataFrameGroupByAgg(func=func, method=method, raw_func=func,
651 groupby_params=groupby.op.groupby_params)
--> 652 return agg_op(groupby)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in __call__(self, groupby)
201
202 if self.output_types[0] == OutputType.dataframe:
--> 203 return self._call_dataframe(groupby, df)
204 else:
205 return self._call_series(groupby, df)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in _call_dataframe(self, groupby, input_df)
144
145 def _call_dataframe(self, groupby, input_df):
--> 146 grouped = groupby.op.build_mock_groupby()
147 agg_df = grouped.aggregate(self.func)
148
~/Workspace/mars/mars/dataframe/groupby/core.py in build_mock_groupby(self, **kwargs)
98 in_df = self.inputs[0]
99 if self.is_dataframe_obj:
--> 100 empty_df = build_empty_df(in_df.dtypes, index=pd.RangeIndex(2))
101 obj_dtypes = in_df.dtypes[in_df.dtypes == np.dtype('O')]
102 empty_df[obj_dtypes.index] = 'O'
~/Workspace/mars/mars/dataframe/utils.py in build_empty_df(dtypes, index)
446 df = pd.DataFrame(columns=pd.RangeIndex(len(columns)), index=index)
447 for i, d in enumerate(dtypes):
--> 448 df[i] = pd.Series(dtype=d, index=index)
449 df.columns = columns
450 return df
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
254 data = data._data
255 elif is_dict_like(data):
--> 256 data, index = self._init_dict(data, index, dtype)
257 dtype = None
258 copy = False
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in _init_dict(self, data, index, dtype)
347 # TODO: passing np.float64 to not break anything yet. See GH-17261
348 s = create_series_with_explicit_dtype(
--> 349 values, index=keys, dtype=dtype, dtype_if_empty=np.float64
350 )
351
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in create_series_with_explicit_dtype(data, index, dtype, name, copy, fastpath, dtype_if_empty)
623 dtype = dtype_if_empty
624 return Series(
--> 625 data=data, index=index, dtype=dtype, name=name, copy=copy, fastpath=fastpath
626 )
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
303 data = data.copy()
304 else:
--> 305 data = sanitize_array(data, index, dtype, copy, raise_cast_failure=True)
306
307 data = SingleBlockManager(data, index, fastpath=True)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in sanitize_array(data, index, dtype, copy, raise_cast_failure)
447 subarr = _try_cast(arr, dtype, copy, raise_cast_failure)
448 else:
--> 449 subarr = _try_cast(data, dtype, copy, raise_cast_failure)
450
451 # scalar like, GH
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
560 dtype = cast(ExtensionDtype, dtype)
561 array_type = dtype.construct_array_type()._from_sequence
--> 562 subarr = array_type(arr, dtype=dtype, copy=copy)
563 elif dtype is not None and raise_cast_failure:
564 raise
~/Workspace/mars/mars/dataframe/arrays.py in _from_sequence(cls, scalars, dtype, copy)
253 def _from_sequence(cls, scalars, dtype=None, copy=False):
254 if not hasattr(scalars, 'dtype'):
--> 255 ret = np.empty(len(scalars), dtype=object)
256 for i, s in enumerate(scalars):
257 ret[i] = s
TypeError: object of type 'pyarrow.lib.NullScalar' has no len()
|
TypeError
|
def _call_series(self, series):
if self._convert_dtype:
dtype, name = self._infer_series_func_returns(series)
else:
dtype, name = np.dtype("object"), None
return self.new_series(
[series],
dtype=dtype,
shape=series.shape,
index_value=series.index_value,
name=name,
)
|
def _call_series(self, series):
if self._convert_dtype:
dtype = self._infer_series_func_returns(series.dtype)
else:
dtype = np.dtype("object")
return self.new_series(
[series], dtype=dtype, shape=series.shape, index_value=series.index_value
)
|
https://github.com/mars-project/mars/issues/1514
|
In [1]: import mars.dataframe as md
In [2]: df = md.DataFrame({'a': [1, 2, 3], 'b': ['a', 'b', 'c']})
In [3]: df['b'] = df['b'].astype(md.ArrowStringDtype())
In [6]: df.groupby('b').count()
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
545 elif not is_extension_array_dtype(subarr):
--> 546 subarr = construct_1d_ndarray_preserving_na(subarr, dtype, copy=copy)
547 except OutOfBoundsDatetime:
~/miniconda3/lib/python3.7/site-packages/pandas/core/dtypes/cast.py in construct_1d_ndarray_preserving_na(values, dtype, copy)
1506 """
-> 1507 subarr = np.array(values, dtype=dtype, copy=copy)
1508
TypeError: Cannot interpret '<mars.dataframe.arrays.ArrowStringDtype object at 0x7f7f81874150>' as a data type
During handling of the above exception, another exception occurred:
TypeError Traceback (most recent call last)
<ipython-input-6-884154032b17> in <module>
----> 1 df.groupby('b').count()
~/Workspace/mars/mars/dataframe/groupby/__init__.py in <lambda>(groupby, **kw)
40 setattr(cls, 'max', lambda groupby, **kw: agg(groupby, 'max', **kw))
41 setattr(cls, 'min', lambda groupby, **kw: agg(groupby, 'min', **kw))
---> 42 setattr(cls, 'count', lambda groupby, **kw: agg(groupby, 'count', **kw))
43 setattr(cls, 'size', lambda groupby, **kw: agg(groupby, 'size', **kw))
44 setattr(cls, 'mean', lambda groupby, **kw: agg(groupby, 'mean', **kw))
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in agg(groupby, func, method, *args, **kwargs)
650 agg_op = DataFrameGroupByAgg(func=func, method=method, raw_func=func,
651 groupby_params=groupby.op.groupby_params)
--> 652 return agg_op(groupby)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in __call__(self, groupby)
201
202 if self.output_types[0] == OutputType.dataframe:
--> 203 return self._call_dataframe(groupby, df)
204 else:
205 return self._call_series(groupby, df)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in _call_dataframe(self, groupby, input_df)
144
145 def _call_dataframe(self, groupby, input_df):
--> 146 grouped = groupby.op.build_mock_groupby()
147 agg_df = grouped.aggregate(self.func)
148
~/Workspace/mars/mars/dataframe/groupby/core.py in build_mock_groupby(self, **kwargs)
98 in_df = self.inputs[0]
99 if self.is_dataframe_obj:
--> 100 empty_df = build_empty_df(in_df.dtypes, index=pd.RangeIndex(2))
101 obj_dtypes = in_df.dtypes[in_df.dtypes == np.dtype('O')]
102 empty_df[obj_dtypes.index] = 'O'
~/Workspace/mars/mars/dataframe/utils.py in build_empty_df(dtypes, index)
446 df = pd.DataFrame(columns=pd.RangeIndex(len(columns)), index=index)
447 for i, d in enumerate(dtypes):
--> 448 df[i] = pd.Series(dtype=d, index=index)
449 df.columns = columns
450 return df
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
254 data = data._data
255 elif is_dict_like(data):
--> 256 data, index = self._init_dict(data, index, dtype)
257 dtype = None
258 copy = False
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in _init_dict(self, data, index, dtype)
347 # TODO: passing np.float64 to not break anything yet. See GH-17261
348 s = create_series_with_explicit_dtype(
--> 349 values, index=keys, dtype=dtype, dtype_if_empty=np.float64
350 )
351
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in create_series_with_explicit_dtype(data, index, dtype, name, copy, fastpath, dtype_if_empty)
623 dtype = dtype_if_empty
624 return Series(
--> 625 data=data, index=index, dtype=dtype, name=name, copy=copy, fastpath=fastpath
626 )
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
303 data = data.copy()
304 else:
--> 305 data = sanitize_array(data, index, dtype, copy, raise_cast_failure=True)
306
307 data = SingleBlockManager(data, index, fastpath=True)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in sanitize_array(data, index, dtype, copy, raise_cast_failure)
447 subarr = _try_cast(arr, dtype, copy, raise_cast_failure)
448 else:
--> 449 subarr = _try_cast(data, dtype, copy, raise_cast_failure)
450
451 # scalar like, GH
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
560 dtype = cast(ExtensionDtype, dtype)
561 array_type = dtype.construct_array_type()._from_sequence
--> 562 subarr = array_type(arr, dtype=dtype, copy=copy)
563 elif dtype is not None and raise_cast_failure:
564 raise
~/Workspace/mars/mars/dataframe/arrays.py in _from_sequence(cls, scalars, dtype, copy)
253 def _from_sequence(cls, scalars, dtype=None, copy=False):
254 if not hasattr(scalars, 'dtype'):
--> 255 ret = np.empty(len(scalars), dtype=object)
256 for i, s in enumerate(scalars):
257 ret[i] = s
TypeError: object of type 'pyarrow.lib.NullScalar' has no len()
|
TypeError
|
def _infer_df_func_returns(self, df, dtypes):
if self.output_types[0] == OutputType.dataframe:
test_df = build_df(df, fill_value=1, size=2)
try:
with np.errstate(all="ignore"):
if self.call_agg:
infer_df = test_df.agg(
self._func, axis=self._axis, *self.args, **self.kwds
)
else:
infer_df = test_df.transform(
self._func, axis=self._axis, *self.args, **self.kwds
)
except: # noqa: E722
infer_df = None
else:
test_df = build_series(df, size=2, name=df.name)
try:
with np.errstate(all="ignore"):
if self.call_agg:
infer_df = test_df.agg(self._func, args=self.args, **self.kwds)
else:
infer_df = test_df.transform(
self._func,
convert_dtype=self.convert_dtype,
args=self.args,
**self.kwds,
)
except: # noqa: E722
infer_df = None
if infer_df is None and dtypes is None:
raise TypeError("Failed to infer dtype, please specify dtypes as arguments.")
if infer_df is None:
is_df = self.output_types[0] == OutputType.dataframe
else:
is_df = isinstance(infer_df, pd.DataFrame)
if is_df:
new_dtypes = dtypes or infer_df.dtypes
self.output_types = [OutputType.dataframe]
else:
new_dtypes = dtypes or (infer_df.name, infer_df.dtype)
self.output_types = [OutputType.series]
return new_dtypes
|
def _infer_df_func_returns(self, in_dtypes, dtypes):
if self.output_types[0] == OutputType.dataframe:
empty_df = build_empty_df(in_dtypes, index=pd.RangeIndex(2))
try:
with np.errstate(all="ignore"):
if self.call_agg:
infer_df = empty_df.agg(
self._func, axis=self._axis, *self.args, **self.kwds
)
else:
infer_df = empty_df.transform(
self._func, axis=self._axis, *self.args, **self.kwds
)
except: # noqa: E722
infer_df = None
else:
empty_df = build_empty_series(
in_dtypes[1], index=pd.RangeIndex(2), name=in_dtypes[0]
)
try:
with np.errstate(all="ignore"):
if self.call_agg:
infer_df = empty_df.agg(self._func, args=self.args, **self.kwds)
else:
infer_df = empty_df.transform(
self._func,
convert_dtype=self.convert_dtype,
args=self.args,
**self.kwds,
)
except: # noqa: E722
infer_df = None
if infer_df is None and dtypes is None:
raise TypeError("Failed to infer dtype, please specify dtypes as arguments.")
if infer_df is None:
is_df = self.output_types[0] == OutputType.dataframe
else:
is_df = isinstance(infer_df, pd.DataFrame)
if is_df:
new_dtypes = dtypes or infer_df.dtypes
self.output_types = [OutputType.dataframe]
else:
new_dtypes = dtypes or (infer_df.name, infer_df.dtype)
self.output_types = [OutputType.series]
return new_dtypes
|
https://github.com/mars-project/mars/issues/1514
|
In [1]: import mars.dataframe as md
In [2]: df = md.DataFrame({'a': [1, 2, 3], 'b': ['a', 'b', 'c']})
In [3]: df['b'] = df['b'].astype(md.ArrowStringDtype())
In [6]: df.groupby('b').count()
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
545 elif not is_extension_array_dtype(subarr):
--> 546 subarr = construct_1d_ndarray_preserving_na(subarr, dtype, copy=copy)
547 except OutOfBoundsDatetime:
~/miniconda3/lib/python3.7/site-packages/pandas/core/dtypes/cast.py in construct_1d_ndarray_preserving_na(values, dtype, copy)
1506 """
-> 1507 subarr = np.array(values, dtype=dtype, copy=copy)
1508
TypeError: Cannot interpret '<mars.dataframe.arrays.ArrowStringDtype object at 0x7f7f81874150>' as a data type
During handling of the above exception, another exception occurred:
TypeError Traceback (most recent call last)
<ipython-input-6-884154032b17> in <module>
----> 1 df.groupby('b').count()
~/Workspace/mars/mars/dataframe/groupby/__init__.py in <lambda>(groupby, **kw)
40 setattr(cls, 'max', lambda groupby, **kw: agg(groupby, 'max', **kw))
41 setattr(cls, 'min', lambda groupby, **kw: agg(groupby, 'min', **kw))
---> 42 setattr(cls, 'count', lambda groupby, **kw: agg(groupby, 'count', **kw))
43 setattr(cls, 'size', lambda groupby, **kw: agg(groupby, 'size', **kw))
44 setattr(cls, 'mean', lambda groupby, **kw: agg(groupby, 'mean', **kw))
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in agg(groupby, func, method, *args, **kwargs)
650 agg_op = DataFrameGroupByAgg(func=func, method=method, raw_func=func,
651 groupby_params=groupby.op.groupby_params)
--> 652 return agg_op(groupby)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in __call__(self, groupby)
201
202 if self.output_types[0] == OutputType.dataframe:
--> 203 return self._call_dataframe(groupby, df)
204 else:
205 return self._call_series(groupby, df)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in _call_dataframe(self, groupby, input_df)
144
145 def _call_dataframe(self, groupby, input_df):
--> 146 grouped = groupby.op.build_mock_groupby()
147 agg_df = grouped.aggregate(self.func)
148
~/Workspace/mars/mars/dataframe/groupby/core.py in build_mock_groupby(self, **kwargs)
98 in_df = self.inputs[0]
99 if self.is_dataframe_obj:
--> 100 empty_df = build_empty_df(in_df.dtypes, index=pd.RangeIndex(2))
101 obj_dtypes = in_df.dtypes[in_df.dtypes == np.dtype('O')]
102 empty_df[obj_dtypes.index] = 'O'
~/Workspace/mars/mars/dataframe/utils.py in build_empty_df(dtypes, index)
446 df = pd.DataFrame(columns=pd.RangeIndex(len(columns)), index=index)
447 for i, d in enumerate(dtypes):
--> 448 df[i] = pd.Series(dtype=d, index=index)
449 df.columns = columns
450 return df
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
254 data = data._data
255 elif is_dict_like(data):
--> 256 data, index = self._init_dict(data, index, dtype)
257 dtype = None
258 copy = False
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in _init_dict(self, data, index, dtype)
347 # TODO: passing np.float64 to not break anything yet. See GH-17261
348 s = create_series_with_explicit_dtype(
--> 349 values, index=keys, dtype=dtype, dtype_if_empty=np.float64
350 )
351
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in create_series_with_explicit_dtype(data, index, dtype, name, copy, fastpath, dtype_if_empty)
623 dtype = dtype_if_empty
624 return Series(
--> 625 data=data, index=index, dtype=dtype, name=name, copy=copy, fastpath=fastpath
626 )
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
303 data = data.copy()
304 else:
--> 305 data = sanitize_array(data, index, dtype, copy, raise_cast_failure=True)
306
307 data = SingleBlockManager(data, index, fastpath=True)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in sanitize_array(data, index, dtype, copy, raise_cast_failure)
447 subarr = _try_cast(arr, dtype, copy, raise_cast_failure)
448 else:
--> 449 subarr = _try_cast(data, dtype, copy, raise_cast_failure)
450
451 # scalar like, GH
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
560 dtype = cast(ExtensionDtype, dtype)
561 array_type = dtype.construct_array_type()._from_sequence
--> 562 subarr = array_type(arr, dtype=dtype, copy=copy)
563 elif dtype is not None and raise_cast_failure:
564 raise
~/Workspace/mars/mars/dataframe/arrays.py in _from_sequence(cls, scalars, dtype, copy)
253 def _from_sequence(cls, scalars, dtype=None, copy=False):
254 if not hasattr(scalars, 'dtype'):
--> 255 ret = np.empty(len(scalars), dtype=object)
256 for i, s in enumerate(scalars):
257 ret[i] = s
TypeError: object of type 'pyarrow.lib.NullScalar' has no len()
|
TypeError
|
def __call__(self, df, dtypes=None, index=None):
axis = getattr(self, "axis", None) or 0
self._axis = validate_axis(axis, df)
dtypes = self._infer_df_func_returns(df, dtypes)
for arg, desc in zip((self.output_types, dtypes), ("output_types", "dtypes")):
if arg is None:
raise TypeError(
f"Cannot determine {desc} by calculating with enumerate data, "
"please specify it as arguments"
)
if self.output_types[0] == OutputType.dataframe:
new_shape = list(df.shape)
new_index_value = df.index_value
if len(new_shape) == 1:
new_shape.append(len(dtypes))
else:
new_shape[1] = len(dtypes)
if self.call_agg:
new_shape[self.axis] = np.nan
new_index_value = parse_index(None, (df.key, df.index_value.key))
return self.new_dataframe(
[df],
shape=tuple(new_shape),
dtypes=dtypes,
index_value=new_index_value,
columns_value=parse_index(dtypes.index, store_data=True),
)
else:
name, dtype = dtypes
if isinstance(df, DATAFRAME_TYPE):
new_shape = (df.shape[1 - axis],)
new_index_value = [df.columns_value, df.index_value][axis]
else:
new_shape = (np.nan,) if self.call_agg else df.shape
new_index_value = df.index_value
return self.new_series(
[df], shape=new_shape, name=name, dtype=dtype, index_value=new_index_value
)
|
def __call__(self, df, dtypes=None, index=None):
axis = getattr(self, "axis", None) or 0
self._axis = validate_axis(axis, df)
if self.output_types[0] == OutputType.dataframe:
dtypes = self._infer_df_func_returns(df.dtypes, dtypes)
else:
dtypes = self._infer_df_func_returns((df.name, df.dtype), dtypes)
for arg, desc in zip((self.output_types, dtypes), ("output_types", "dtypes")):
if arg is None:
raise TypeError(
f"Cannot determine {desc} by calculating with enumerate data, "
"please specify it as arguments"
)
if self.output_types[0] == OutputType.dataframe:
new_shape = list(df.shape)
new_index_value = df.index_value
if len(new_shape) == 1:
new_shape.append(len(dtypes))
else:
new_shape[1] = len(dtypes)
if self.call_agg:
new_shape[self.axis] = np.nan
new_index_value = parse_index(None, (df.key, df.index_value.key))
return self.new_dataframe(
[df],
shape=tuple(new_shape),
dtypes=dtypes,
index_value=new_index_value,
columns_value=parse_index(dtypes.index, store_data=True),
)
else:
name, dtype = dtypes
if isinstance(df, DATAFRAME_TYPE):
new_shape = (df.shape[1 - axis],)
new_index_value = [df.columns_value, df.index_value][axis]
else:
new_shape = (np.nan,) if self.call_agg else df.shape
new_index_value = df.index_value
return self.new_series(
[df], shape=new_shape, name=name, dtype=dtype, index_value=new_index_value
)
|
https://github.com/mars-project/mars/issues/1514
|
In [1]: import mars.dataframe as md
In [2]: df = md.DataFrame({'a': [1, 2, 3], 'b': ['a', 'b', 'c']})
In [3]: df['b'] = df['b'].astype(md.ArrowStringDtype())
In [6]: df.groupby('b').count()
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
545 elif not is_extension_array_dtype(subarr):
--> 546 subarr = construct_1d_ndarray_preserving_na(subarr, dtype, copy=copy)
547 except OutOfBoundsDatetime:
~/miniconda3/lib/python3.7/site-packages/pandas/core/dtypes/cast.py in construct_1d_ndarray_preserving_na(values, dtype, copy)
1506 """
-> 1507 subarr = np.array(values, dtype=dtype, copy=copy)
1508
TypeError: Cannot interpret '<mars.dataframe.arrays.ArrowStringDtype object at 0x7f7f81874150>' as a data type
During handling of the above exception, another exception occurred:
TypeError Traceback (most recent call last)
<ipython-input-6-884154032b17> in <module>
----> 1 df.groupby('b').count()
~/Workspace/mars/mars/dataframe/groupby/__init__.py in <lambda>(groupby, **kw)
40 setattr(cls, 'max', lambda groupby, **kw: agg(groupby, 'max', **kw))
41 setattr(cls, 'min', lambda groupby, **kw: agg(groupby, 'min', **kw))
---> 42 setattr(cls, 'count', lambda groupby, **kw: agg(groupby, 'count', **kw))
43 setattr(cls, 'size', lambda groupby, **kw: agg(groupby, 'size', **kw))
44 setattr(cls, 'mean', lambda groupby, **kw: agg(groupby, 'mean', **kw))
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in agg(groupby, func, method, *args, **kwargs)
650 agg_op = DataFrameGroupByAgg(func=func, method=method, raw_func=func,
651 groupby_params=groupby.op.groupby_params)
--> 652 return agg_op(groupby)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in __call__(self, groupby)
201
202 if self.output_types[0] == OutputType.dataframe:
--> 203 return self._call_dataframe(groupby, df)
204 else:
205 return self._call_series(groupby, df)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in _call_dataframe(self, groupby, input_df)
144
145 def _call_dataframe(self, groupby, input_df):
--> 146 grouped = groupby.op.build_mock_groupby()
147 agg_df = grouped.aggregate(self.func)
148
~/Workspace/mars/mars/dataframe/groupby/core.py in build_mock_groupby(self, **kwargs)
98 in_df = self.inputs[0]
99 if self.is_dataframe_obj:
--> 100 empty_df = build_empty_df(in_df.dtypes, index=pd.RangeIndex(2))
101 obj_dtypes = in_df.dtypes[in_df.dtypes == np.dtype('O')]
102 empty_df[obj_dtypes.index] = 'O'
~/Workspace/mars/mars/dataframe/utils.py in build_empty_df(dtypes, index)
446 df = pd.DataFrame(columns=pd.RangeIndex(len(columns)), index=index)
447 for i, d in enumerate(dtypes):
--> 448 df[i] = pd.Series(dtype=d, index=index)
449 df.columns = columns
450 return df
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
254 data = data._data
255 elif is_dict_like(data):
--> 256 data, index = self._init_dict(data, index, dtype)
257 dtype = None
258 copy = False
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in _init_dict(self, data, index, dtype)
347 # TODO: passing np.float64 to not break anything yet. See GH-17261
348 s = create_series_with_explicit_dtype(
--> 349 values, index=keys, dtype=dtype, dtype_if_empty=np.float64
350 )
351
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in create_series_with_explicit_dtype(data, index, dtype, name, copy, fastpath, dtype_if_empty)
623 dtype = dtype_if_empty
624 return Series(
--> 625 data=data, index=index, dtype=dtype, name=name, copy=copy, fastpath=fastpath
626 )
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
303 data = data.copy()
304 else:
--> 305 data = sanitize_array(data, index, dtype, copy, raise_cast_failure=True)
306
307 data = SingleBlockManager(data, index, fastpath=True)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in sanitize_array(data, index, dtype, copy, raise_cast_failure)
447 subarr = _try_cast(arr, dtype, copy, raise_cast_failure)
448 else:
--> 449 subarr = _try_cast(data, dtype, copy, raise_cast_failure)
450
451 # scalar like, GH
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
560 dtype = cast(ExtensionDtype, dtype)
561 array_type = dtype.construct_array_type()._from_sequence
--> 562 subarr = array_type(arr, dtype=dtype, copy=copy)
563 elif dtype is not None and raise_cast_failure:
564 raise
~/Workspace/mars/mars/dataframe/arrays.py in _from_sequence(cls, scalars, dtype, copy)
253 def _from_sequence(cls, scalars, dtype=None, copy=False):
254 if not hasattr(scalars, 'dtype'):
--> 255 ret = np.empty(len(scalars), dtype=object)
256 for i, s in enumerate(scalars):
257 ret[i] = s
TypeError: object of type 'pyarrow.lib.NullScalar' has no len()
|
TypeError
|
def to_pandas(self):
data = getattr(self, "_data", None)
if data is None:
sortorder = getattr(self, "_sortorder", None)
return pd.MultiIndex.from_arrays(
[np.array([], dtype=dtype) for dtype in self._dtypes],
sortorder=sortorder,
names=self._names,
)
return pd.MultiIndex.from_tuples(
[tuple(d) for d in data], sortorder=self._sortorder, names=self._names
)
|
def to_pandas(self):
data = getattr(self, "_data", None)
if data is None:
sortorder = getattr(self, "_sortorder", None)
return pd.MultiIndex.from_arrays(
[[] for _ in range(len(self._names))],
sortorder=sortorder,
names=self._names,
)
return pd.MultiIndex.from_tuples(
[tuple(d) for d in data], sortorder=self._sortorder, names=self._names
)
|
https://github.com/mars-project/mars/issues/1514
|
In [1]: import mars.dataframe as md
In [2]: df = md.DataFrame({'a': [1, 2, 3], 'b': ['a', 'b', 'c']})
In [3]: df['b'] = df['b'].astype(md.ArrowStringDtype())
In [6]: df.groupby('b').count()
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
545 elif not is_extension_array_dtype(subarr):
--> 546 subarr = construct_1d_ndarray_preserving_na(subarr, dtype, copy=copy)
547 except OutOfBoundsDatetime:
~/miniconda3/lib/python3.7/site-packages/pandas/core/dtypes/cast.py in construct_1d_ndarray_preserving_na(values, dtype, copy)
1506 """
-> 1507 subarr = np.array(values, dtype=dtype, copy=copy)
1508
TypeError: Cannot interpret '<mars.dataframe.arrays.ArrowStringDtype object at 0x7f7f81874150>' as a data type
During handling of the above exception, another exception occurred:
TypeError Traceback (most recent call last)
<ipython-input-6-884154032b17> in <module>
----> 1 df.groupby('b').count()
~/Workspace/mars/mars/dataframe/groupby/__init__.py in <lambda>(groupby, **kw)
40 setattr(cls, 'max', lambda groupby, **kw: agg(groupby, 'max', **kw))
41 setattr(cls, 'min', lambda groupby, **kw: agg(groupby, 'min', **kw))
---> 42 setattr(cls, 'count', lambda groupby, **kw: agg(groupby, 'count', **kw))
43 setattr(cls, 'size', lambda groupby, **kw: agg(groupby, 'size', **kw))
44 setattr(cls, 'mean', lambda groupby, **kw: agg(groupby, 'mean', **kw))
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in agg(groupby, func, method, *args, **kwargs)
650 agg_op = DataFrameGroupByAgg(func=func, method=method, raw_func=func,
651 groupby_params=groupby.op.groupby_params)
--> 652 return agg_op(groupby)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in __call__(self, groupby)
201
202 if self.output_types[0] == OutputType.dataframe:
--> 203 return self._call_dataframe(groupby, df)
204 else:
205 return self._call_series(groupby, df)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in _call_dataframe(self, groupby, input_df)
144
145 def _call_dataframe(self, groupby, input_df):
--> 146 grouped = groupby.op.build_mock_groupby()
147 agg_df = grouped.aggregate(self.func)
148
~/Workspace/mars/mars/dataframe/groupby/core.py in build_mock_groupby(self, **kwargs)
98 in_df = self.inputs[0]
99 if self.is_dataframe_obj:
--> 100 empty_df = build_empty_df(in_df.dtypes, index=pd.RangeIndex(2))
101 obj_dtypes = in_df.dtypes[in_df.dtypes == np.dtype('O')]
102 empty_df[obj_dtypes.index] = 'O'
~/Workspace/mars/mars/dataframe/utils.py in build_empty_df(dtypes, index)
446 df = pd.DataFrame(columns=pd.RangeIndex(len(columns)), index=index)
447 for i, d in enumerate(dtypes):
--> 448 df[i] = pd.Series(dtype=d, index=index)
449 df.columns = columns
450 return df
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
254 data = data._data
255 elif is_dict_like(data):
--> 256 data, index = self._init_dict(data, index, dtype)
257 dtype = None
258 copy = False
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in _init_dict(self, data, index, dtype)
347 # TODO: passing np.float64 to not break anything yet. See GH-17261
348 s = create_series_with_explicit_dtype(
--> 349 values, index=keys, dtype=dtype, dtype_if_empty=np.float64
350 )
351
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in create_series_with_explicit_dtype(data, index, dtype, name, copy, fastpath, dtype_if_empty)
623 dtype = dtype_if_empty
624 return Series(
--> 625 data=data, index=index, dtype=dtype, name=name, copy=copy, fastpath=fastpath
626 )
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
303 data = data.copy()
304 else:
--> 305 data = sanitize_array(data, index, dtype, copy, raise_cast_failure=True)
306
307 data = SingleBlockManager(data, index, fastpath=True)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in sanitize_array(data, index, dtype, copy, raise_cast_failure)
447 subarr = _try_cast(arr, dtype, copy, raise_cast_failure)
448 else:
--> 449 subarr = _try_cast(data, dtype, copy, raise_cast_failure)
450
451 # scalar like, GH
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
560 dtype = cast(ExtensionDtype, dtype)
561 array_type = dtype.construct_array_type()._from_sequence
--> 562 subarr = array_type(arr, dtype=dtype, copy=copy)
563 elif dtype is not None and raise_cast_failure:
564 raise
~/Workspace/mars/mars/dataframe/arrays.py in _from_sequence(cls, scalars, dtype, copy)
253 def _from_sequence(cls, scalars, dtype=None, copy=False):
254 if not hasattr(scalars, 'dtype'):
--> 255 ret = np.empty(len(scalars), dtype=object)
256 for i, s in enumerate(scalars):
257 ret[i] = s
TypeError: object of type 'pyarrow.lib.NullScalar' has no len()
|
TypeError
|
def _infer_df_func_returns(self, in_groupby, in_df, dtypes, index):
index_value, output_type, new_dtypes = None, None, None
try:
if in_df.op.output_types[0] == OutputType.dataframe:
test_df = build_df(in_df, size=2)
else:
test_df = build_series(in_df, size=2, name=in_df.name)
selection = getattr(in_groupby.op, "selection", None)
if selection:
test_df = test_df[selection]
with np.errstate(all="ignore"):
infer_df = self.func(test_df, *self.args, **self.kwds)
# todo return proper index when sort=True is implemented
index_value = parse_index(None, in_df.key, self.func)
if isinstance(infer_df, pd.DataFrame):
output_type = output_type or OutputType.dataframe
new_dtypes = new_dtypes or infer_df.dtypes
elif isinstance(infer_df, pd.Series):
output_type = output_type or OutputType.series
new_dtypes = new_dtypes or (infer_df.name, infer_df.dtype)
else:
output_type = OutputType.series
new_dtypes = (None, pd.Series(infer_df).dtype)
except: # noqa: E722 # nosec
pass
self.output_types = [output_type] if not self.output_types else self.output_types
dtypes = new_dtypes if dtypes is None else dtypes
index_value = index_value if index is None else parse_index(index)
return dtypes, index_value
|
def _infer_df_func_returns(self, in_groupby, in_df, dtypes, index):
index_value, output_type, new_dtypes = None, None, None
try:
if in_df.op.output_types[0] == OutputType.dataframe:
empty_df = build_empty_df(in_df.dtypes, index=pd.RangeIndex(2))
else:
empty_df = build_empty_series(
in_df.dtype, index=pd.RangeIndex(2), name=in_df.name
)
selection = getattr(in_groupby.op, "selection", None)
if selection:
empty_df = empty_df[selection]
with np.errstate(all="ignore"):
infer_df = self.func(empty_df, *self.args, **self.kwds)
# todo return proper index when sort=True is implemented
index_value = parse_index(None, in_df.key, self.func)
if isinstance(infer_df, pd.DataFrame):
output_type = output_type or OutputType.dataframe
new_dtypes = new_dtypes or infer_df.dtypes
elif isinstance(infer_df, pd.Series):
output_type = output_type or OutputType.series
new_dtypes = new_dtypes or (infer_df.name, infer_df.dtype)
else:
output_type = OutputType.series
new_dtypes = (None, pd.Series(infer_df).dtype)
except: # noqa: E722 # nosec
pass
self.output_types = [output_type] if not self.output_types else self.output_types
dtypes = new_dtypes if dtypes is None else dtypes
index_value = index_value if index is None else parse_index(index)
return dtypes, index_value
|
https://github.com/mars-project/mars/issues/1514
|
In [1]: import mars.dataframe as md
In [2]: df = md.DataFrame({'a': [1, 2, 3], 'b': ['a', 'b', 'c']})
In [3]: df['b'] = df['b'].astype(md.ArrowStringDtype())
In [6]: df.groupby('b').count()
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
545 elif not is_extension_array_dtype(subarr):
--> 546 subarr = construct_1d_ndarray_preserving_na(subarr, dtype, copy=copy)
547 except OutOfBoundsDatetime:
~/miniconda3/lib/python3.7/site-packages/pandas/core/dtypes/cast.py in construct_1d_ndarray_preserving_na(values, dtype, copy)
1506 """
-> 1507 subarr = np.array(values, dtype=dtype, copy=copy)
1508
TypeError: Cannot interpret '<mars.dataframe.arrays.ArrowStringDtype object at 0x7f7f81874150>' as a data type
During handling of the above exception, another exception occurred:
TypeError Traceback (most recent call last)
<ipython-input-6-884154032b17> in <module>
----> 1 df.groupby('b').count()
~/Workspace/mars/mars/dataframe/groupby/__init__.py in <lambda>(groupby, **kw)
40 setattr(cls, 'max', lambda groupby, **kw: agg(groupby, 'max', **kw))
41 setattr(cls, 'min', lambda groupby, **kw: agg(groupby, 'min', **kw))
---> 42 setattr(cls, 'count', lambda groupby, **kw: agg(groupby, 'count', **kw))
43 setattr(cls, 'size', lambda groupby, **kw: agg(groupby, 'size', **kw))
44 setattr(cls, 'mean', lambda groupby, **kw: agg(groupby, 'mean', **kw))
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in agg(groupby, func, method, *args, **kwargs)
650 agg_op = DataFrameGroupByAgg(func=func, method=method, raw_func=func,
651 groupby_params=groupby.op.groupby_params)
--> 652 return agg_op(groupby)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in __call__(self, groupby)
201
202 if self.output_types[0] == OutputType.dataframe:
--> 203 return self._call_dataframe(groupby, df)
204 else:
205 return self._call_series(groupby, df)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in _call_dataframe(self, groupby, input_df)
144
145 def _call_dataframe(self, groupby, input_df):
--> 146 grouped = groupby.op.build_mock_groupby()
147 agg_df = grouped.aggregate(self.func)
148
~/Workspace/mars/mars/dataframe/groupby/core.py in build_mock_groupby(self, **kwargs)
98 in_df = self.inputs[0]
99 if self.is_dataframe_obj:
--> 100 empty_df = build_empty_df(in_df.dtypes, index=pd.RangeIndex(2))
101 obj_dtypes = in_df.dtypes[in_df.dtypes == np.dtype('O')]
102 empty_df[obj_dtypes.index] = 'O'
~/Workspace/mars/mars/dataframe/utils.py in build_empty_df(dtypes, index)
446 df = pd.DataFrame(columns=pd.RangeIndex(len(columns)), index=index)
447 for i, d in enumerate(dtypes):
--> 448 df[i] = pd.Series(dtype=d, index=index)
449 df.columns = columns
450 return df
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
254 data = data._data
255 elif is_dict_like(data):
--> 256 data, index = self._init_dict(data, index, dtype)
257 dtype = None
258 copy = False
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in _init_dict(self, data, index, dtype)
347 # TODO: passing np.float64 to not break anything yet. See GH-17261
348 s = create_series_with_explicit_dtype(
--> 349 values, index=keys, dtype=dtype, dtype_if_empty=np.float64
350 )
351
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in create_series_with_explicit_dtype(data, index, dtype, name, copy, fastpath, dtype_if_empty)
623 dtype = dtype_if_empty
624 return Series(
--> 625 data=data, index=index, dtype=dtype, name=name, copy=copy, fastpath=fastpath
626 )
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
303 data = data.copy()
304 else:
--> 305 data = sanitize_array(data, index, dtype, copy, raise_cast_failure=True)
306
307 data = SingleBlockManager(data, index, fastpath=True)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in sanitize_array(data, index, dtype, copy, raise_cast_failure)
447 subarr = _try_cast(arr, dtype, copy, raise_cast_failure)
448 else:
--> 449 subarr = _try_cast(data, dtype, copy, raise_cast_failure)
450
451 # scalar like, GH
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
560 dtype = cast(ExtensionDtype, dtype)
561 array_type = dtype.construct_array_type()._from_sequence
--> 562 subarr = array_type(arr, dtype=dtype, copy=copy)
563 elif dtype is not None and raise_cast_failure:
564 raise
~/Workspace/mars/mars/dataframe/arrays.py in _from_sequence(cls, scalars, dtype, copy)
253 def _from_sequence(cls, scalars, dtype=None, copy=False):
254 if not hasattr(scalars, 'dtype'):
--> 255 ret = np.empty(len(scalars), dtype=object)
256 for i, s in enumerate(scalars):
257 ret[i] = s
TypeError: object of type 'pyarrow.lib.NullScalar' has no len()
|
TypeError
|
def build_mock_groupby(self, **kwargs):
in_df = self.inputs[0]
if self.is_dataframe_obj:
empty_df = build_df(in_df, size=2)
obj_dtypes = in_df.dtypes[in_df.dtypes == np.dtype("O")]
empty_df[obj_dtypes.index] = "O"
else:
if in_df.dtype == np.dtype("O"):
empty_df = pd.Series(
"O", index=pd.RangeIndex(2), name=in_df.name, dtype=np.dtype("O")
)
else:
empty_df = build_series(in_df, size=2, name=in_df.name)
new_kw = self.groupby_params
new_kw.update(kwargs)
if new_kw.get("level"):
new_kw["level"] = 0
if isinstance(new_kw["by"], list):
new_by = []
for v in new_kw["by"]:
if isinstance(v, (Base, Entity)):
new_by.append(build_series(v, size=2, name=v.name))
else:
new_by.append(v)
new_kw["by"] = new_by
return empty_df.groupby(**new_kw)
|
def build_mock_groupby(self, **kwargs):
in_df = self.inputs[0]
if self.is_dataframe_obj:
empty_df = build_empty_df(in_df.dtypes, index=pd.RangeIndex(2))
obj_dtypes = in_df.dtypes[in_df.dtypes == np.dtype("O")]
empty_df[obj_dtypes.index] = "O"
else:
if in_df.dtype == np.dtype("O"):
empty_df = pd.Series(
"O", index=pd.RangeIndex(2), name=in_df.name, dtype=np.dtype("O")
)
else:
empty_df = build_empty_series(
in_df.dtype, index=pd.RangeIndex(2), name=in_df.name
)
new_kw = self.groupby_params
new_kw.update(kwargs)
if new_kw.get("level"):
new_kw["level"] = 0
if isinstance(new_kw["by"], list):
new_by = []
for v in new_kw["by"]:
if isinstance(v, (Base, Entity)):
new_by.append(
build_empty_series(v.dtype, index=pd.RangeIndex(2), name=v.name)
)
else:
new_by.append(v)
new_kw["by"] = new_by
return empty_df.groupby(**new_kw)
|
https://github.com/mars-project/mars/issues/1514
|
In [1]: import mars.dataframe as md
In [2]: df = md.DataFrame({'a': [1, 2, 3], 'b': ['a', 'b', 'c']})
In [3]: df['b'] = df['b'].astype(md.ArrowStringDtype())
In [6]: df.groupby('b').count()
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
545 elif not is_extension_array_dtype(subarr):
--> 546 subarr = construct_1d_ndarray_preserving_na(subarr, dtype, copy=copy)
547 except OutOfBoundsDatetime:
~/miniconda3/lib/python3.7/site-packages/pandas/core/dtypes/cast.py in construct_1d_ndarray_preserving_na(values, dtype, copy)
1506 """
-> 1507 subarr = np.array(values, dtype=dtype, copy=copy)
1508
TypeError: Cannot interpret '<mars.dataframe.arrays.ArrowStringDtype object at 0x7f7f81874150>' as a data type
During handling of the above exception, another exception occurred:
TypeError Traceback (most recent call last)
<ipython-input-6-884154032b17> in <module>
----> 1 df.groupby('b').count()
~/Workspace/mars/mars/dataframe/groupby/__init__.py in <lambda>(groupby, **kw)
40 setattr(cls, 'max', lambda groupby, **kw: agg(groupby, 'max', **kw))
41 setattr(cls, 'min', lambda groupby, **kw: agg(groupby, 'min', **kw))
---> 42 setattr(cls, 'count', lambda groupby, **kw: agg(groupby, 'count', **kw))
43 setattr(cls, 'size', lambda groupby, **kw: agg(groupby, 'size', **kw))
44 setattr(cls, 'mean', lambda groupby, **kw: agg(groupby, 'mean', **kw))
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in agg(groupby, func, method, *args, **kwargs)
650 agg_op = DataFrameGroupByAgg(func=func, method=method, raw_func=func,
651 groupby_params=groupby.op.groupby_params)
--> 652 return agg_op(groupby)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in __call__(self, groupby)
201
202 if self.output_types[0] == OutputType.dataframe:
--> 203 return self._call_dataframe(groupby, df)
204 else:
205 return self._call_series(groupby, df)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in _call_dataframe(self, groupby, input_df)
144
145 def _call_dataframe(self, groupby, input_df):
--> 146 grouped = groupby.op.build_mock_groupby()
147 agg_df = grouped.aggregate(self.func)
148
~/Workspace/mars/mars/dataframe/groupby/core.py in build_mock_groupby(self, **kwargs)
98 in_df = self.inputs[0]
99 if self.is_dataframe_obj:
--> 100 empty_df = build_empty_df(in_df.dtypes, index=pd.RangeIndex(2))
101 obj_dtypes = in_df.dtypes[in_df.dtypes == np.dtype('O')]
102 empty_df[obj_dtypes.index] = 'O'
~/Workspace/mars/mars/dataframe/utils.py in build_empty_df(dtypes, index)
446 df = pd.DataFrame(columns=pd.RangeIndex(len(columns)), index=index)
447 for i, d in enumerate(dtypes):
--> 448 df[i] = pd.Series(dtype=d, index=index)
449 df.columns = columns
450 return df
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
254 data = data._data
255 elif is_dict_like(data):
--> 256 data, index = self._init_dict(data, index, dtype)
257 dtype = None
258 copy = False
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in _init_dict(self, data, index, dtype)
347 # TODO: passing np.float64 to not break anything yet. See GH-17261
348 s = create_series_with_explicit_dtype(
--> 349 values, index=keys, dtype=dtype, dtype_if_empty=np.float64
350 )
351
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in create_series_with_explicit_dtype(data, index, dtype, name, copy, fastpath, dtype_if_empty)
623 dtype = dtype_if_empty
624 return Series(
--> 625 data=data, index=index, dtype=dtype, name=name, copy=copy, fastpath=fastpath
626 )
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
303 data = data.copy()
304 else:
--> 305 data = sanitize_array(data, index, dtype, copy, raise_cast_failure=True)
306
307 data = SingleBlockManager(data, index, fastpath=True)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in sanitize_array(data, index, dtype, copy, raise_cast_failure)
447 subarr = _try_cast(arr, dtype, copy, raise_cast_failure)
448 else:
--> 449 subarr = _try_cast(data, dtype, copy, raise_cast_failure)
450
451 # scalar like, GH
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
560 dtype = cast(ExtensionDtype, dtype)
561 array_type = dtype.construct_array_type()._from_sequence
--> 562 subarr = array_type(arr, dtype=dtype, copy=copy)
563 elif dtype is not None and raise_cast_failure:
564 raise
~/Workspace/mars/mars/dataframe/arrays.py in _from_sequence(cls, scalars, dtype, copy)
253 def _from_sequence(cls, scalars, dtype=None, copy=False):
254 if not hasattr(scalars, 'dtype'):
--> 255 ret = np.empty(len(scalars), dtype=object)
256 for i, s in enumerate(scalars):
257 ret[i] = s
TypeError: object of type 'pyarrow.lib.NullScalar' has no len()
|
TypeError
|
def _calc_renamed_df(self, df, errors="ignore"):
empty_df = build_df(df)
return empty_df.rename(
columns=self._columns_mapper,
index=self._index_mapper,
level=self._level,
errors=errors,
)
|
def _calc_renamed_df(self, dtypes, index, errors="ignore"):
empty_df = build_empty_df(dtypes, index=index)
return empty_df.rename(
columns=self._columns_mapper,
index=self._index_mapper,
level=self._level,
errors=errors,
)
|
https://github.com/mars-project/mars/issues/1514
|
In [1]: import mars.dataframe as md
In [2]: df = md.DataFrame({'a': [1, 2, 3], 'b': ['a', 'b', 'c']})
In [3]: df['b'] = df['b'].astype(md.ArrowStringDtype())
In [6]: df.groupby('b').count()
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
545 elif not is_extension_array_dtype(subarr):
--> 546 subarr = construct_1d_ndarray_preserving_na(subarr, dtype, copy=copy)
547 except OutOfBoundsDatetime:
~/miniconda3/lib/python3.7/site-packages/pandas/core/dtypes/cast.py in construct_1d_ndarray_preserving_na(values, dtype, copy)
1506 """
-> 1507 subarr = np.array(values, dtype=dtype, copy=copy)
1508
TypeError: Cannot interpret '<mars.dataframe.arrays.ArrowStringDtype object at 0x7f7f81874150>' as a data type
During handling of the above exception, another exception occurred:
TypeError Traceback (most recent call last)
<ipython-input-6-884154032b17> in <module>
----> 1 df.groupby('b').count()
~/Workspace/mars/mars/dataframe/groupby/__init__.py in <lambda>(groupby, **kw)
40 setattr(cls, 'max', lambda groupby, **kw: agg(groupby, 'max', **kw))
41 setattr(cls, 'min', lambda groupby, **kw: agg(groupby, 'min', **kw))
---> 42 setattr(cls, 'count', lambda groupby, **kw: agg(groupby, 'count', **kw))
43 setattr(cls, 'size', lambda groupby, **kw: agg(groupby, 'size', **kw))
44 setattr(cls, 'mean', lambda groupby, **kw: agg(groupby, 'mean', **kw))
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in agg(groupby, func, method, *args, **kwargs)
650 agg_op = DataFrameGroupByAgg(func=func, method=method, raw_func=func,
651 groupby_params=groupby.op.groupby_params)
--> 652 return agg_op(groupby)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in __call__(self, groupby)
201
202 if self.output_types[0] == OutputType.dataframe:
--> 203 return self._call_dataframe(groupby, df)
204 else:
205 return self._call_series(groupby, df)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in _call_dataframe(self, groupby, input_df)
144
145 def _call_dataframe(self, groupby, input_df):
--> 146 grouped = groupby.op.build_mock_groupby()
147 agg_df = grouped.aggregate(self.func)
148
~/Workspace/mars/mars/dataframe/groupby/core.py in build_mock_groupby(self, **kwargs)
98 in_df = self.inputs[0]
99 if self.is_dataframe_obj:
--> 100 empty_df = build_empty_df(in_df.dtypes, index=pd.RangeIndex(2))
101 obj_dtypes = in_df.dtypes[in_df.dtypes == np.dtype('O')]
102 empty_df[obj_dtypes.index] = 'O'
~/Workspace/mars/mars/dataframe/utils.py in build_empty_df(dtypes, index)
446 df = pd.DataFrame(columns=pd.RangeIndex(len(columns)), index=index)
447 for i, d in enumerate(dtypes):
--> 448 df[i] = pd.Series(dtype=d, index=index)
449 df.columns = columns
450 return df
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
254 data = data._data
255 elif is_dict_like(data):
--> 256 data, index = self._init_dict(data, index, dtype)
257 dtype = None
258 copy = False
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in _init_dict(self, data, index, dtype)
347 # TODO: passing np.float64 to not break anything yet. See GH-17261
348 s = create_series_with_explicit_dtype(
--> 349 values, index=keys, dtype=dtype, dtype_if_empty=np.float64
350 )
351
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in create_series_with_explicit_dtype(data, index, dtype, name, copy, fastpath, dtype_if_empty)
623 dtype = dtype_if_empty
624 return Series(
--> 625 data=data, index=index, dtype=dtype, name=name, copy=copy, fastpath=fastpath
626 )
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
303 data = data.copy()
304 else:
--> 305 data = sanitize_array(data, index, dtype, copy, raise_cast_failure=True)
306
307 data = SingleBlockManager(data, index, fastpath=True)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in sanitize_array(data, index, dtype, copy, raise_cast_failure)
447 subarr = _try_cast(arr, dtype, copy, raise_cast_failure)
448 else:
--> 449 subarr = _try_cast(data, dtype, copy, raise_cast_failure)
450
451 # scalar like, GH
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
560 dtype = cast(ExtensionDtype, dtype)
561 array_type = dtype.construct_array_type()._from_sequence
--> 562 subarr = array_type(arr, dtype=dtype, copy=copy)
563 elif dtype is not None and raise_cast_failure:
564 raise
~/Workspace/mars/mars/dataframe/arrays.py in _from_sequence(cls, scalars, dtype, copy)
253 def _from_sequence(cls, scalars, dtype=None, copy=False):
254 if not hasattr(scalars, 'dtype'):
--> 255 ret = np.empty(len(scalars), dtype=object)
256 for i, s in enumerate(scalars):
257 ret[i] = s
TypeError: object of type 'pyarrow.lib.NullScalar' has no len()
|
TypeError
|
def _calc_renamed_series(self, df, errors="ignore"):
empty_series = build_series(df, name=df.name)
new_series = empty_series.rename(
index=self._index_mapper, level=self._level, errors=errors
)
if self._new_name:
new_series.name = self._new_name
return new_series
|
def _calc_renamed_series(self, name, dtype, index, errors="ignore"):
empty_series = build_empty_series(dtype, index=index, name=name)
new_series = empty_series.rename(
index=self._index_mapper, level=self._level, errors=errors
)
if self._new_name:
new_series.name = self._new_name
return new_series
|
https://github.com/mars-project/mars/issues/1514
|
In [1]: import mars.dataframe as md
In [2]: df = md.DataFrame({'a': [1, 2, 3], 'b': ['a', 'b', 'c']})
In [3]: df['b'] = df['b'].astype(md.ArrowStringDtype())
In [6]: df.groupby('b').count()
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
545 elif not is_extension_array_dtype(subarr):
--> 546 subarr = construct_1d_ndarray_preserving_na(subarr, dtype, copy=copy)
547 except OutOfBoundsDatetime:
~/miniconda3/lib/python3.7/site-packages/pandas/core/dtypes/cast.py in construct_1d_ndarray_preserving_na(values, dtype, copy)
1506 """
-> 1507 subarr = np.array(values, dtype=dtype, copy=copy)
1508
TypeError: Cannot interpret '<mars.dataframe.arrays.ArrowStringDtype object at 0x7f7f81874150>' as a data type
During handling of the above exception, another exception occurred:
TypeError Traceback (most recent call last)
<ipython-input-6-884154032b17> in <module>
----> 1 df.groupby('b').count()
~/Workspace/mars/mars/dataframe/groupby/__init__.py in <lambda>(groupby, **kw)
40 setattr(cls, 'max', lambda groupby, **kw: agg(groupby, 'max', **kw))
41 setattr(cls, 'min', lambda groupby, **kw: agg(groupby, 'min', **kw))
---> 42 setattr(cls, 'count', lambda groupby, **kw: agg(groupby, 'count', **kw))
43 setattr(cls, 'size', lambda groupby, **kw: agg(groupby, 'size', **kw))
44 setattr(cls, 'mean', lambda groupby, **kw: agg(groupby, 'mean', **kw))
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in agg(groupby, func, method, *args, **kwargs)
650 agg_op = DataFrameGroupByAgg(func=func, method=method, raw_func=func,
651 groupby_params=groupby.op.groupby_params)
--> 652 return agg_op(groupby)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in __call__(self, groupby)
201
202 if self.output_types[0] == OutputType.dataframe:
--> 203 return self._call_dataframe(groupby, df)
204 else:
205 return self._call_series(groupby, df)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in _call_dataframe(self, groupby, input_df)
144
145 def _call_dataframe(self, groupby, input_df):
--> 146 grouped = groupby.op.build_mock_groupby()
147 agg_df = grouped.aggregate(self.func)
148
~/Workspace/mars/mars/dataframe/groupby/core.py in build_mock_groupby(self, **kwargs)
98 in_df = self.inputs[0]
99 if self.is_dataframe_obj:
--> 100 empty_df = build_empty_df(in_df.dtypes, index=pd.RangeIndex(2))
101 obj_dtypes = in_df.dtypes[in_df.dtypes == np.dtype('O')]
102 empty_df[obj_dtypes.index] = 'O'
~/Workspace/mars/mars/dataframe/utils.py in build_empty_df(dtypes, index)
446 df = pd.DataFrame(columns=pd.RangeIndex(len(columns)), index=index)
447 for i, d in enumerate(dtypes):
--> 448 df[i] = pd.Series(dtype=d, index=index)
449 df.columns = columns
450 return df
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
254 data = data._data
255 elif is_dict_like(data):
--> 256 data, index = self._init_dict(data, index, dtype)
257 dtype = None
258 copy = False
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in _init_dict(self, data, index, dtype)
347 # TODO: passing np.float64 to not break anything yet. See GH-17261
348 s = create_series_with_explicit_dtype(
--> 349 values, index=keys, dtype=dtype, dtype_if_empty=np.float64
350 )
351
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in create_series_with_explicit_dtype(data, index, dtype, name, copy, fastpath, dtype_if_empty)
623 dtype = dtype_if_empty
624 return Series(
--> 625 data=data, index=index, dtype=dtype, name=name, copy=copy, fastpath=fastpath
626 )
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
303 data = data.copy()
304 else:
--> 305 data = sanitize_array(data, index, dtype, copy, raise_cast_failure=True)
306
307 data = SingleBlockManager(data, index, fastpath=True)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in sanitize_array(data, index, dtype, copy, raise_cast_failure)
447 subarr = _try_cast(arr, dtype, copy, raise_cast_failure)
448 else:
--> 449 subarr = _try_cast(data, dtype, copy, raise_cast_failure)
450
451 # scalar like, GH
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
560 dtype = cast(ExtensionDtype, dtype)
561 array_type = dtype.construct_array_type()._from_sequence
--> 562 subarr = array_type(arr, dtype=dtype, copy=copy)
563 elif dtype is not None and raise_cast_failure:
564 raise
~/Workspace/mars/mars/dataframe/arrays.py in _from_sequence(cls, scalars, dtype, copy)
253 def _from_sequence(cls, scalars, dtype=None, copy=False):
254 if not hasattr(scalars, 'dtype'):
--> 255 ret = np.empty(len(scalars), dtype=object)
256 for i, s in enumerate(scalars):
257 ret[i] = s
TypeError: object of type 'pyarrow.lib.NullScalar' has no len()
|
TypeError
|
def __call__(self, df):
params = df.params
raw_index = df.index_value.to_pandas()
if df.ndim == 2:
new_df = self._calc_renamed_df(df, errors=self.errors)
new_index = new_df.index
elif isinstance(df, SERIES_TYPE):
new_df = self._calc_renamed_series(df, errors=self.errors)
new_index = new_df.index
else:
new_df = new_index = raw_index.rename(self._index_mapper or self._new_name)
if self._columns_mapper is not None:
params["columns_value"] = parse_index(new_df.columns, store_data=True)
params["dtypes"] = new_df.dtypes
if self._index_mapper is not None:
params["index_value"] = parse_index(new_index)
if df.ndim == 1:
params["name"] = new_df.name
return self.new_tileable([df], **params)
|
def __call__(self, df):
params = df.params
raw_index = df.index_value.to_pandas()
if df.ndim == 2:
new_df = self._calc_renamed_df(df.dtypes, raw_index, errors=self.errors)
new_index = new_df.index
elif isinstance(df, SERIES_TYPE):
new_df = self._calc_renamed_series(
df.name, df.dtype, raw_index, errors=self.errors
)
new_index = new_df.index
else:
new_df = new_index = raw_index.rename(self._index_mapper or self._new_name)
if self._columns_mapper is not None:
params["columns_value"] = parse_index(new_df.columns, store_data=True)
params["dtypes"] = new_df.dtypes
if self._index_mapper is not None:
params["index_value"] = parse_index(new_index)
if df.ndim == 1:
params["name"] = new_df.name
return self.new_tileable([df], **params)
|
https://github.com/mars-project/mars/issues/1514
|
In [1]: import mars.dataframe as md
In [2]: df = md.DataFrame({'a': [1, 2, 3], 'b': ['a', 'b', 'c']})
In [3]: df['b'] = df['b'].astype(md.ArrowStringDtype())
In [6]: df.groupby('b').count()
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
545 elif not is_extension_array_dtype(subarr):
--> 546 subarr = construct_1d_ndarray_preserving_na(subarr, dtype, copy=copy)
547 except OutOfBoundsDatetime:
~/miniconda3/lib/python3.7/site-packages/pandas/core/dtypes/cast.py in construct_1d_ndarray_preserving_na(values, dtype, copy)
1506 """
-> 1507 subarr = np.array(values, dtype=dtype, copy=copy)
1508
TypeError: Cannot interpret '<mars.dataframe.arrays.ArrowStringDtype object at 0x7f7f81874150>' as a data type
During handling of the above exception, another exception occurred:
TypeError Traceback (most recent call last)
<ipython-input-6-884154032b17> in <module>
----> 1 df.groupby('b').count()
~/Workspace/mars/mars/dataframe/groupby/__init__.py in <lambda>(groupby, **kw)
40 setattr(cls, 'max', lambda groupby, **kw: agg(groupby, 'max', **kw))
41 setattr(cls, 'min', lambda groupby, **kw: agg(groupby, 'min', **kw))
---> 42 setattr(cls, 'count', lambda groupby, **kw: agg(groupby, 'count', **kw))
43 setattr(cls, 'size', lambda groupby, **kw: agg(groupby, 'size', **kw))
44 setattr(cls, 'mean', lambda groupby, **kw: agg(groupby, 'mean', **kw))
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in agg(groupby, func, method, *args, **kwargs)
650 agg_op = DataFrameGroupByAgg(func=func, method=method, raw_func=func,
651 groupby_params=groupby.op.groupby_params)
--> 652 return agg_op(groupby)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in __call__(self, groupby)
201
202 if self.output_types[0] == OutputType.dataframe:
--> 203 return self._call_dataframe(groupby, df)
204 else:
205 return self._call_series(groupby, df)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in _call_dataframe(self, groupby, input_df)
144
145 def _call_dataframe(self, groupby, input_df):
--> 146 grouped = groupby.op.build_mock_groupby()
147 agg_df = grouped.aggregate(self.func)
148
~/Workspace/mars/mars/dataframe/groupby/core.py in build_mock_groupby(self, **kwargs)
98 in_df = self.inputs[0]
99 if self.is_dataframe_obj:
--> 100 empty_df = build_empty_df(in_df.dtypes, index=pd.RangeIndex(2))
101 obj_dtypes = in_df.dtypes[in_df.dtypes == np.dtype('O')]
102 empty_df[obj_dtypes.index] = 'O'
~/Workspace/mars/mars/dataframe/utils.py in build_empty_df(dtypes, index)
446 df = pd.DataFrame(columns=pd.RangeIndex(len(columns)), index=index)
447 for i, d in enumerate(dtypes):
--> 448 df[i] = pd.Series(dtype=d, index=index)
449 df.columns = columns
450 return df
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
254 data = data._data
255 elif is_dict_like(data):
--> 256 data, index = self._init_dict(data, index, dtype)
257 dtype = None
258 copy = False
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in _init_dict(self, data, index, dtype)
347 # TODO: passing np.float64 to not break anything yet. See GH-17261
348 s = create_series_with_explicit_dtype(
--> 349 values, index=keys, dtype=dtype, dtype_if_empty=np.float64
350 )
351
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in create_series_with_explicit_dtype(data, index, dtype, name, copy, fastpath, dtype_if_empty)
623 dtype = dtype_if_empty
624 return Series(
--> 625 data=data, index=index, dtype=dtype, name=name, copy=copy, fastpath=fastpath
626 )
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
303 data = data.copy()
304 else:
--> 305 data = sanitize_array(data, index, dtype, copy, raise_cast_failure=True)
306
307 data = SingleBlockManager(data, index, fastpath=True)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in sanitize_array(data, index, dtype, copy, raise_cast_failure)
447 subarr = _try_cast(arr, dtype, copy, raise_cast_failure)
448 else:
--> 449 subarr = _try_cast(data, dtype, copy, raise_cast_failure)
450
451 # scalar like, GH
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
560 dtype = cast(ExtensionDtype, dtype)
561 array_type = dtype.construct_array_type()._from_sequence
--> 562 subarr = array_type(arr, dtype=dtype, copy=copy)
563 elif dtype is not None and raise_cast_failure:
564 raise
~/Workspace/mars/mars/dataframe/arrays.py in _from_sequence(cls, scalars, dtype, copy)
253 def _from_sequence(cls, scalars, dtype=None, copy=False):
254 if not hasattr(scalars, 'dtype'):
--> 255 ret = np.empty(len(scalars), dtype=object)
256 for i, s in enumerate(scalars):
257 ret[i] = s
TypeError: object of type 'pyarrow.lib.NullScalar' has no len()
|
TypeError
|
def tile(cls, op: "DataFrameRename"):
inp = op.inputs[0]
out = op.outputs[0]
chunks = []
dtypes_cache = dict()
for c in inp.chunks:
params = c.params
new_op = op.copy().reset_key()
if op.columns_mapper is not None:
try:
new_dtypes = dtypes_cache[c.index[0]]
except KeyError:
new_dtypes = dtypes_cache[c.index[0]] = op._calc_renamed_df(c).dtypes
params["columns_value"] = parse_index(new_dtypes.index, store_data=True)
params["dtypes"] = new_dtypes
if op.index_mapper is not None:
params["index_value"] = out.index_value
if op.new_name is not None:
params["name"] = out.name
if isinstance(op.columns_mapper, dict):
idx = params["dtypes"].index
if op._level is not None:
idx = idx.get_level_values(op._level)
new_op._columns_mapper = {
k: v for k, v in op.columns_mapper.items() if v in idx
}
chunks.append(new_op.new_chunk([c], **params))
new_op = op.copy().reset_key()
return new_op.new_tileables([inp], chunks=chunks, nsplits=inp.nsplits, **out.params)
|
def tile(cls, op: "DataFrameRename"):
inp = op.inputs[0]
out = op.outputs[0]
chunks = []
dtypes_cache = dict()
for c in inp.chunks:
params = c.params
new_op = op.copy().reset_key()
if op.columns_mapper is not None:
try:
new_dtypes = dtypes_cache[c.index[0]]
except KeyError:
new_dtypes = dtypes_cache[c.index[0]] = op._calc_renamed_df(
c.dtypes, c.index_value.to_pandas()
).dtypes
params["columns_value"] = parse_index(new_dtypes.index, store_data=True)
params["dtypes"] = new_dtypes
if op.index_mapper is not None:
params["index_value"] = out.index_value
if op.new_name is not None:
params["name"] = out.name
if isinstance(op.columns_mapper, dict):
idx = params["dtypes"].index
if op._level is not None:
idx = idx.get_level_values(op._level)
new_op._columns_mapper = {
k: v for k, v in op.columns_mapper.items() if v in idx
}
chunks.append(new_op.new_chunk([c], **params))
new_op = op.copy().reset_key()
return new_op.new_tileables([inp], chunks=chunks, nsplits=inp.nsplits, **out.params)
|
https://github.com/mars-project/mars/issues/1514
|
In [1]: import mars.dataframe as md
In [2]: df = md.DataFrame({'a': [1, 2, 3], 'b': ['a', 'b', 'c']})
In [3]: df['b'] = df['b'].astype(md.ArrowStringDtype())
In [6]: df.groupby('b').count()
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
545 elif not is_extension_array_dtype(subarr):
--> 546 subarr = construct_1d_ndarray_preserving_na(subarr, dtype, copy=copy)
547 except OutOfBoundsDatetime:
~/miniconda3/lib/python3.7/site-packages/pandas/core/dtypes/cast.py in construct_1d_ndarray_preserving_na(values, dtype, copy)
1506 """
-> 1507 subarr = np.array(values, dtype=dtype, copy=copy)
1508
TypeError: Cannot interpret '<mars.dataframe.arrays.ArrowStringDtype object at 0x7f7f81874150>' as a data type
During handling of the above exception, another exception occurred:
TypeError Traceback (most recent call last)
<ipython-input-6-884154032b17> in <module>
----> 1 df.groupby('b').count()
~/Workspace/mars/mars/dataframe/groupby/__init__.py in <lambda>(groupby, **kw)
40 setattr(cls, 'max', lambda groupby, **kw: agg(groupby, 'max', **kw))
41 setattr(cls, 'min', lambda groupby, **kw: agg(groupby, 'min', **kw))
---> 42 setattr(cls, 'count', lambda groupby, **kw: agg(groupby, 'count', **kw))
43 setattr(cls, 'size', lambda groupby, **kw: agg(groupby, 'size', **kw))
44 setattr(cls, 'mean', lambda groupby, **kw: agg(groupby, 'mean', **kw))
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in agg(groupby, func, method, *args, **kwargs)
650 agg_op = DataFrameGroupByAgg(func=func, method=method, raw_func=func,
651 groupby_params=groupby.op.groupby_params)
--> 652 return agg_op(groupby)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in __call__(self, groupby)
201
202 if self.output_types[0] == OutputType.dataframe:
--> 203 return self._call_dataframe(groupby, df)
204 else:
205 return self._call_series(groupby, df)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in _call_dataframe(self, groupby, input_df)
144
145 def _call_dataframe(self, groupby, input_df):
--> 146 grouped = groupby.op.build_mock_groupby()
147 agg_df = grouped.aggregate(self.func)
148
~/Workspace/mars/mars/dataframe/groupby/core.py in build_mock_groupby(self, **kwargs)
98 in_df = self.inputs[0]
99 if self.is_dataframe_obj:
--> 100 empty_df = build_empty_df(in_df.dtypes, index=pd.RangeIndex(2))
101 obj_dtypes = in_df.dtypes[in_df.dtypes == np.dtype('O')]
102 empty_df[obj_dtypes.index] = 'O'
~/Workspace/mars/mars/dataframe/utils.py in build_empty_df(dtypes, index)
446 df = pd.DataFrame(columns=pd.RangeIndex(len(columns)), index=index)
447 for i, d in enumerate(dtypes):
--> 448 df[i] = pd.Series(dtype=d, index=index)
449 df.columns = columns
450 return df
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
254 data = data._data
255 elif is_dict_like(data):
--> 256 data, index = self._init_dict(data, index, dtype)
257 dtype = None
258 copy = False
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in _init_dict(self, data, index, dtype)
347 # TODO: passing np.float64 to not break anything yet. See GH-17261
348 s = create_series_with_explicit_dtype(
--> 349 values, index=keys, dtype=dtype, dtype_if_empty=np.float64
350 )
351
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in create_series_with_explicit_dtype(data, index, dtype, name, copy, fastpath, dtype_if_empty)
623 dtype = dtype_if_empty
624 return Series(
--> 625 data=data, index=index, dtype=dtype, name=name, copy=copy, fastpath=fastpath
626 )
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
303 data = data.copy()
304 else:
--> 305 data = sanitize_array(data, index, dtype, copy, raise_cast_failure=True)
306
307 data = SingleBlockManager(data, index, fastpath=True)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in sanitize_array(data, index, dtype, copy, raise_cast_failure)
447 subarr = _try_cast(arr, dtype, copy, raise_cast_failure)
448 else:
--> 449 subarr = _try_cast(data, dtype, copy, raise_cast_failure)
450
451 # scalar like, GH
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
560 dtype = cast(ExtensionDtype, dtype)
561 array_type = dtype.construct_array_type()._from_sequence
--> 562 subarr = array_type(arr, dtype=dtype, copy=copy)
563 elif dtype is not None and raise_cast_failure:
564 raise
~/Workspace/mars/mars/dataframe/arrays.py in _from_sequence(cls, scalars, dtype, copy)
253 def _from_sequence(cls, scalars, dtype=None, copy=False):
254 if not hasattr(scalars, 'dtype'):
--> 255 ret = np.empty(len(scalars), dtype=object)
256 for i, s in enumerate(scalars):
257 ret[i] = s
TypeError: object of type 'pyarrow.lib.NullScalar' has no len()
|
TypeError
|
def _calc_result_shape(self, df):
if self.output_types[0] == OutputType.dataframe:
test_obj = build_df(df, size=10)
else:
test_obj = build_series(df, size=10, name=df.name)
result_df = test_obj.agg(self.func, axis=self.axis)
if isinstance(result_df, pd.DataFrame):
self.output_types = [OutputType.dataframe]
return result_df.dtypes, result_df.index
elif isinstance(result_df, pd.Series):
self.output_types = [OutputType.series]
return pd.Series([result_df.dtype], index=[result_df.name]), result_df.index
else:
self.output_types = [OutputType.scalar]
return np.array(result_df).dtype, None
|
def _calc_result_shape(self, df):
if self.output_types[0] == OutputType.dataframe:
empty_obj = build_empty_df(df.dtypes, index=pd.RangeIndex(0, 10))
else:
empty_obj = build_empty_series(
df.dtype, index=pd.RangeIndex(0, 10), name=df.name
)
result_df = empty_obj.agg(self.func, axis=self.axis)
if isinstance(result_df, pd.DataFrame):
self.output_types = [OutputType.dataframe]
return result_df.dtypes, result_df.index
elif isinstance(result_df, pd.Series):
self.output_types = [OutputType.series]
return pd.Series([result_df.dtype], index=[result_df.name]), result_df.index
else:
self.output_types = [OutputType.scalar]
return np.array(result_df).dtype, None
|
https://github.com/mars-project/mars/issues/1514
|
In [1]: import mars.dataframe as md
In [2]: df = md.DataFrame({'a': [1, 2, 3], 'b': ['a', 'b', 'c']})
In [3]: df['b'] = df['b'].astype(md.ArrowStringDtype())
In [6]: df.groupby('b').count()
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
545 elif not is_extension_array_dtype(subarr):
--> 546 subarr = construct_1d_ndarray_preserving_na(subarr, dtype, copy=copy)
547 except OutOfBoundsDatetime:
~/miniconda3/lib/python3.7/site-packages/pandas/core/dtypes/cast.py in construct_1d_ndarray_preserving_na(values, dtype, copy)
1506 """
-> 1507 subarr = np.array(values, dtype=dtype, copy=copy)
1508
TypeError: Cannot interpret '<mars.dataframe.arrays.ArrowStringDtype object at 0x7f7f81874150>' as a data type
During handling of the above exception, another exception occurred:
TypeError Traceback (most recent call last)
<ipython-input-6-884154032b17> in <module>
----> 1 df.groupby('b').count()
~/Workspace/mars/mars/dataframe/groupby/__init__.py in <lambda>(groupby, **kw)
40 setattr(cls, 'max', lambda groupby, **kw: agg(groupby, 'max', **kw))
41 setattr(cls, 'min', lambda groupby, **kw: agg(groupby, 'min', **kw))
---> 42 setattr(cls, 'count', lambda groupby, **kw: agg(groupby, 'count', **kw))
43 setattr(cls, 'size', lambda groupby, **kw: agg(groupby, 'size', **kw))
44 setattr(cls, 'mean', lambda groupby, **kw: agg(groupby, 'mean', **kw))
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in agg(groupby, func, method, *args, **kwargs)
650 agg_op = DataFrameGroupByAgg(func=func, method=method, raw_func=func,
651 groupby_params=groupby.op.groupby_params)
--> 652 return agg_op(groupby)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in __call__(self, groupby)
201
202 if self.output_types[0] == OutputType.dataframe:
--> 203 return self._call_dataframe(groupby, df)
204 else:
205 return self._call_series(groupby, df)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in _call_dataframe(self, groupby, input_df)
144
145 def _call_dataframe(self, groupby, input_df):
--> 146 grouped = groupby.op.build_mock_groupby()
147 agg_df = grouped.aggregate(self.func)
148
~/Workspace/mars/mars/dataframe/groupby/core.py in build_mock_groupby(self, **kwargs)
98 in_df = self.inputs[0]
99 if self.is_dataframe_obj:
--> 100 empty_df = build_empty_df(in_df.dtypes, index=pd.RangeIndex(2))
101 obj_dtypes = in_df.dtypes[in_df.dtypes == np.dtype('O')]
102 empty_df[obj_dtypes.index] = 'O'
~/Workspace/mars/mars/dataframe/utils.py in build_empty_df(dtypes, index)
446 df = pd.DataFrame(columns=pd.RangeIndex(len(columns)), index=index)
447 for i, d in enumerate(dtypes):
--> 448 df[i] = pd.Series(dtype=d, index=index)
449 df.columns = columns
450 return df
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
254 data = data._data
255 elif is_dict_like(data):
--> 256 data, index = self._init_dict(data, index, dtype)
257 dtype = None
258 copy = False
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in _init_dict(self, data, index, dtype)
347 # TODO: passing np.float64 to not break anything yet. See GH-17261
348 s = create_series_with_explicit_dtype(
--> 349 values, index=keys, dtype=dtype, dtype_if_empty=np.float64
350 )
351
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in create_series_with_explicit_dtype(data, index, dtype, name, copy, fastpath, dtype_if_empty)
623 dtype = dtype_if_empty
624 return Series(
--> 625 data=data, index=index, dtype=dtype, name=name, copy=copy, fastpath=fastpath
626 )
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
303 data = data.copy()
304 else:
--> 305 data = sanitize_array(data, index, dtype, copy, raise_cast_failure=True)
306
307 data = SingleBlockManager(data, index, fastpath=True)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in sanitize_array(data, index, dtype, copy, raise_cast_failure)
447 subarr = _try_cast(arr, dtype, copy, raise_cast_failure)
448 else:
--> 449 subarr = _try_cast(data, dtype, copy, raise_cast_failure)
450
451 # scalar like, GH
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
560 dtype = cast(ExtensionDtype, dtype)
561 array_type = dtype.construct_array_type()._from_sequence
--> 562 subarr = array_type(arr, dtype=dtype, copy=copy)
563 elif dtype is not None and raise_cast_failure:
564 raise
~/Workspace/mars/mars/dataframe/arrays.py in _from_sequence(cls, scalars, dtype, copy)
253 def _from_sequence(cls, scalars, dtype=None, copy=False):
254 if not hasattr(scalars, 'dtype'):
--> 255 ret = np.empty(len(scalars), dtype=object)
256 for i, s in enumerate(scalars):
257 ret[i] = s
TypeError: object of type 'pyarrow.lib.NullScalar' has no len()
|
TypeError
|
def parse_index(index_value, *args, store_data=False, key=None):
from .core import IndexValue
def _extract_property(index, tp, ret_data):
kw = {
"_min_val": _get_index_min(index),
"_max_val": _get_index_max(index),
"_min_val_close": True,
"_max_val_close": True,
"_key": key or _tokenize_index(index, *args),
}
if ret_data:
kw["_data"] = index.values
for field in tp._FIELDS:
if field in kw or field == "_data":
continue
val = getattr(index, field.lstrip("_"), None)
if val is not None:
kw[field] = val
return kw
def _tokenize_index(index, *token_objects):
if not index.empty:
return tokenize(index)
else:
return tokenize(index, *token_objects)
def _get_index_min(index):
try:
return index.min()
except ValueError:
if isinstance(index, pd.IntervalIndex):
return None
raise
except TypeError:
return None
def _get_index_max(index):
try:
return index.max()
except ValueError:
if isinstance(index, pd.IntervalIndex):
return None
raise
except TypeError:
return None
def _serialize_index(index):
tp = getattr(IndexValue, type(index).__name__)
properties = _extract_property(index, tp, store_data)
return tp(**properties)
def _serialize_range_index(index):
if is_pd_range_empty(index):
properties = {
"_is_monotonic_increasing": True,
"_is_monotonic_decreasing": False,
"_is_unique": True,
"_min_val": _get_index_min(index),
"_max_val": _get_index_max(index),
"_min_val_close": True,
"_max_val_close": False,
"_key": key or _tokenize_index(index, *args),
"_name": index.name,
"_dtype": index.dtype,
}
else:
properties = _extract_property(index, IndexValue.RangeIndex, False)
return IndexValue.RangeIndex(
_slice=slice(
_get_range_index_start(index),
_get_range_index_stop(index),
_get_range_index_step(index),
),
**properties,
)
def _serialize_multi_index(index):
kw = _extract_property(index, IndexValue.MultiIndex, store_data)
kw["_sortorder"] = index.sortorder
kw["_dtypes"] = [lev.dtype for lev in index.levels]
return IndexValue.MultiIndex(**kw)
if index_value is None:
return IndexValue(
_index_value=IndexValue.Index(
_is_monotonic_increasing=False,
_is_monotonic_decreasing=False,
_is_unique=False,
_min_val=None,
_max_val=None,
_min_val_close=True,
_max_val_close=True,
_key=key or tokenize(*args),
)
)
if isinstance(index_value, pd.RangeIndex):
return IndexValue(_index_value=_serialize_range_index(index_value))
elif isinstance(index_value, pd.MultiIndex):
return IndexValue(_index_value=_serialize_multi_index(index_value))
else:
return IndexValue(_index_value=_serialize_index(index_value))
|
def parse_index(index_value, *args, store_data=False, key=None):
from .core import IndexValue
def _extract_property(index, tp, ret_data):
kw = {
"_min_val": _get_index_min(index),
"_max_val": _get_index_max(index),
"_min_val_close": True,
"_max_val_close": True,
"_key": key or _tokenize_index(index, *args),
}
if ret_data:
kw["_data"] = index.values
for field in tp._FIELDS:
if field in kw or field == "_data":
continue
val = getattr(index, field.lstrip("_"), None)
if val is not None:
kw[field] = val
return kw
def _tokenize_index(index, *token_objects):
if not index.empty:
return tokenize(index)
else:
return tokenize(index, *token_objects)
def _get_index_min(index):
try:
return index.min()
except ValueError:
if isinstance(index, pd.IntervalIndex):
return None
raise
except TypeError:
return None
def _get_index_max(index):
try:
return index.max()
except ValueError:
if isinstance(index, pd.IntervalIndex):
return None
raise
except TypeError:
return None
def _serialize_index(index):
tp = getattr(IndexValue, type(index).__name__)
properties = _extract_property(index, tp, store_data)
return tp(**properties)
def _serialize_range_index(index):
if is_pd_range_empty(index):
properties = {
"_is_monotonic_increasing": True,
"_is_monotonic_decreasing": False,
"_is_unique": True,
"_min_val": _get_index_min(index),
"_max_val": _get_index_max(index),
"_min_val_close": True,
"_max_val_close": False,
"_key": key or _tokenize_index(index, *args),
"_name": index.name,
"_dtype": index.dtype,
}
else:
properties = _extract_property(index, IndexValue.RangeIndex, False)
return IndexValue.RangeIndex(
_slice=slice(
_get_range_index_start(index),
_get_range_index_stop(index),
_get_range_index_step(index),
),
**properties,
)
def _serialize_multi_index(index):
kw = _extract_property(index, IndexValue.MultiIndex, store_data)
kw["_sortorder"] = index.sortorder
return IndexValue.MultiIndex(**kw)
if index_value is None:
return IndexValue(
_index_value=IndexValue.Index(
_is_monotonic_increasing=False,
_is_monotonic_decreasing=False,
_is_unique=False,
_min_val=None,
_max_val=None,
_min_val_close=True,
_max_val_close=True,
_key=key or tokenize(*args),
)
)
if isinstance(index_value, pd.RangeIndex):
return IndexValue(_index_value=_serialize_range_index(index_value))
elif isinstance(index_value, pd.MultiIndex):
return IndexValue(_index_value=_serialize_multi_index(index_value))
else:
return IndexValue(_index_value=_serialize_index(index_value))
|
https://github.com/mars-project/mars/issues/1514
|
In [1]: import mars.dataframe as md
In [2]: df = md.DataFrame({'a': [1, 2, 3], 'b': ['a', 'b', 'c']})
In [3]: df['b'] = df['b'].astype(md.ArrowStringDtype())
In [6]: df.groupby('b').count()
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
545 elif not is_extension_array_dtype(subarr):
--> 546 subarr = construct_1d_ndarray_preserving_na(subarr, dtype, copy=copy)
547 except OutOfBoundsDatetime:
~/miniconda3/lib/python3.7/site-packages/pandas/core/dtypes/cast.py in construct_1d_ndarray_preserving_na(values, dtype, copy)
1506 """
-> 1507 subarr = np.array(values, dtype=dtype, copy=copy)
1508
TypeError: Cannot interpret '<mars.dataframe.arrays.ArrowStringDtype object at 0x7f7f81874150>' as a data type
During handling of the above exception, another exception occurred:
TypeError Traceback (most recent call last)
<ipython-input-6-884154032b17> in <module>
----> 1 df.groupby('b').count()
~/Workspace/mars/mars/dataframe/groupby/__init__.py in <lambda>(groupby, **kw)
40 setattr(cls, 'max', lambda groupby, **kw: agg(groupby, 'max', **kw))
41 setattr(cls, 'min', lambda groupby, **kw: agg(groupby, 'min', **kw))
---> 42 setattr(cls, 'count', lambda groupby, **kw: agg(groupby, 'count', **kw))
43 setattr(cls, 'size', lambda groupby, **kw: agg(groupby, 'size', **kw))
44 setattr(cls, 'mean', lambda groupby, **kw: agg(groupby, 'mean', **kw))
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in agg(groupby, func, method, *args, **kwargs)
650 agg_op = DataFrameGroupByAgg(func=func, method=method, raw_func=func,
651 groupby_params=groupby.op.groupby_params)
--> 652 return agg_op(groupby)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in __call__(self, groupby)
201
202 if self.output_types[0] == OutputType.dataframe:
--> 203 return self._call_dataframe(groupby, df)
204 else:
205 return self._call_series(groupby, df)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in _call_dataframe(self, groupby, input_df)
144
145 def _call_dataframe(self, groupby, input_df):
--> 146 grouped = groupby.op.build_mock_groupby()
147 agg_df = grouped.aggregate(self.func)
148
~/Workspace/mars/mars/dataframe/groupby/core.py in build_mock_groupby(self, **kwargs)
98 in_df = self.inputs[0]
99 if self.is_dataframe_obj:
--> 100 empty_df = build_empty_df(in_df.dtypes, index=pd.RangeIndex(2))
101 obj_dtypes = in_df.dtypes[in_df.dtypes == np.dtype('O')]
102 empty_df[obj_dtypes.index] = 'O'
~/Workspace/mars/mars/dataframe/utils.py in build_empty_df(dtypes, index)
446 df = pd.DataFrame(columns=pd.RangeIndex(len(columns)), index=index)
447 for i, d in enumerate(dtypes):
--> 448 df[i] = pd.Series(dtype=d, index=index)
449 df.columns = columns
450 return df
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
254 data = data._data
255 elif is_dict_like(data):
--> 256 data, index = self._init_dict(data, index, dtype)
257 dtype = None
258 copy = False
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in _init_dict(self, data, index, dtype)
347 # TODO: passing np.float64 to not break anything yet. See GH-17261
348 s = create_series_with_explicit_dtype(
--> 349 values, index=keys, dtype=dtype, dtype_if_empty=np.float64
350 )
351
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in create_series_with_explicit_dtype(data, index, dtype, name, copy, fastpath, dtype_if_empty)
623 dtype = dtype_if_empty
624 return Series(
--> 625 data=data, index=index, dtype=dtype, name=name, copy=copy, fastpath=fastpath
626 )
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
303 data = data.copy()
304 else:
--> 305 data = sanitize_array(data, index, dtype, copy, raise_cast_failure=True)
306
307 data = SingleBlockManager(data, index, fastpath=True)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in sanitize_array(data, index, dtype, copy, raise_cast_failure)
447 subarr = _try_cast(arr, dtype, copy, raise_cast_failure)
448 else:
--> 449 subarr = _try_cast(data, dtype, copy, raise_cast_failure)
450
451 # scalar like, GH
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
560 dtype = cast(ExtensionDtype, dtype)
561 array_type = dtype.construct_array_type()._from_sequence
--> 562 subarr = array_type(arr, dtype=dtype, copy=copy)
563 elif dtype is not None and raise_cast_failure:
564 raise
~/Workspace/mars/mars/dataframe/arrays.py in _from_sequence(cls, scalars, dtype, copy)
253 def _from_sequence(cls, scalars, dtype=None, copy=False):
254 if not hasattr(scalars, 'dtype'):
--> 255 ret = np.empty(len(scalars), dtype=object)
256 for i, s in enumerate(scalars):
257 ret[i] = s
TypeError: object of type 'pyarrow.lib.NullScalar' has no len()
|
TypeError
|
def _serialize_multi_index(index):
kw = _extract_property(index, IndexValue.MultiIndex, store_data)
kw["_sortorder"] = index.sortorder
kw["_dtypes"] = [lev.dtype for lev in index.levels]
return IndexValue.MultiIndex(**kw)
|
def _serialize_multi_index(index):
kw = _extract_property(index, IndexValue.MultiIndex, store_data)
kw["_sortorder"] = index.sortorder
return IndexValue.MultiIndex(**kw)
|
https://github.com/mars-project/mars/issues/1514
|
In [1]: import mars.dataframe as md
In [2]: df = md.DataFrame({'a': [1, 2, 3], 'b': ['a', 'b', 'c']})
In [3]: df['b'] = df['b'].astype(md.ArrowStringDtype())
In [6]: df.groupby('b').count()
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
545 elif not is_extension_array_dtype(subarr):
--> 546 subarr = construct_1d_ndarray_preserving_na(subarr, dtype, copy=copy)
547 except OutOfBoundsDatetime:
~/miniconda3/lib/python3.7/site-packages/pandas/core/dtypes/cast.py in construct_1d_ndarray_preserving_na(values, dtype, copy)
1506 """
-> 1507 subarr = np.array(values, dtype=dtype, copy=copy)
1508
TypeError: Cannot interpret '<mars.dataframe.arrays.ArrowStringDtype object at 0x7f7f81874150>' as a data type
During handling of the above exception, another exception occurred:
TypeError Traceback (most recent call last)
<ipython-input-6-884154032b17> in <module>
----> 1 df.groupby('b').count()
~/Workspace/mars/mars/dataframe/groupby/__init__.py in <lambda>(groupby, **kw)
40 setattr(cls, 'max', lambda groupby, **kw: agg(groupby, 'max', **kw))
41 setattr(cls, 'min', lambda groupby, **kw: agg(groupby, 'min', **kw))
---> 42 setattr(cls, 'count', lambda groupby, **kw: agg(groupby, 'count', **kw))
43 setattr(cls, 'size', lambda groupby, **kw: agg(groupby, 'size', **kw))
44 setattr(cls, 'mean', lambda groupby, **kw: agg(groupby, 'mean', **kw))
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in agg(groupby, func, method, *args, **kwargs)
650 agg_op = DataFrameGroupByAgg(func=func, method=method, raw_func=func,
651 groupby_params=groupby.op.groupby_params)
--> 652 return agg_op(groupby)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in __call__(self, groupby)
201
202 if self.output_types[0] == OutputType.dataframe:
--> 203 return self._call_dataframe(groupby, df)
204 else:
205 return self._call_series(groupby, df)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in _call_dataframe(self, groupby, input_df)
144
145 def _call_dataframe(self, groupby, input_df):
--> 146 grouped = groupby.op.build_mock_groupby()
147 agg_df = grouped.aggregate(self.func)
148
~/Workspace/mars/mars/dataframe/groupby/core.py in build_mock_groupby(self, **kwargs)
98 in_df = self.inputs[0]
99 if self.is_dataframe_obj:
--> 100 empty_df = build_empty_df(in_df.dtypes, index=pd.RangeIndex(2))
101 obj_dtypes = in_df.dtypes[in_df.dtypes == np.dtype('O')]
102 empty_df[obj_dtypes.index] = 'O'
~/Workspace/mars/mars/dataframe/utils.py in build_empty_df(dtypes, index)
446 df = pd.DataFrame(columns=pd.RangeIndex(len(columns)), index=index)
447 for i, d in enumerate(dtypes):
--> 448 df[i] = pd.Series(dtype=d, index=index)
449 df.columns = columns
450 return df
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
254 data = data._data
255 elif is_dict_like(data):
--> 256 data, index = self._init_dict(data, index, dtype)
257 dtype = None
258 copy = False
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in _init_dict(self, data, index, dtype)
347 # TODO: passing np.float64 to not break anything yet. See GH-17261
348 s = create_series_with_explicit_dtype(
--> 349 values, index=keys, dtype=dtype, dtype_if_empty=np.float64
350 )
351
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in create_series_with_explicit_dtype(data, index, dtype, name, copy, fastpath, dtype_if_empty)
623 dtype = dtype_if_empty
624 return Series(
--> 625 data=data, index=index, dtype=dtype, name=name, copy=copy, fastpath=fastpath
626 )
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
303 data = data.copy()
304 else:
--> 305 data = sanitize_array(data, index, dtype, copy, raise_cast_failure=True)
306
307 data = SingleBlockManager(data, index, fastpath=True)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in sanitize_array(data, index, dtype, copy, raise_cast_failure)
447 subarr = _try_cast(arr, dtype, copy, raise_cast_failure)
448 else:
--> 449 subarr = _try_cast(data, dtype, copy, raise_cast_failure)
450
451 # scalar like, GH
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
560 dtype = cast(ExtensionDtype, dtype)
561 array_type = dtype.construct_array_type()._from_sequence
--> 562 subarr = array_type(arr, dtype=dtype, copy=copy)
563 elif dtype is not None and raise_cast_failure:
564 raise
~/Workspace/mars/mars/dataframe/arrays.py in _from_sequence(cls, scalars, dtype, copy)
253 def _from_sequence(cls, scalars, dtype=None, copy=False):
254 if not hasattr(scalars, 'dtype'):
--> 255 ret = np.empty(len(scalars), dtype=object)
256 for i, s in enumerate(scalars):
257 ret[i] = s
TypeError: object of type 'pyarrow.lib.NullScalar' has no len()
|
TypeError
|
def _generate_value(dtype, fill_value):
# special handle for datetime64 and timedelta64
dispatch = {
np.datetime64: pd.Timestamp,
np.timedelta64: pd.Timedelta,
pd.CategoricalDtype.type: lambda x: pd.CategoricalDtype([x]),
# for object, we do not know the actual dtype,
# just convert to str for common usage
np.object_: lambda x: str(fill_value),
}
# otherwise, just use dtype.type itself to convert
convert = dispatch.get(dtype.type, dtype.type)
return convert(fill_value)
|
def _generate_value(dtype, fill_value):
# special handle for datetime64 and timedelta64
dispatch = {
np.datetime64: pd.Timestamp,
np.timedelta64: pd.Timedelta,
}
# otherwise, just use dtype.type itself to convert
convert = dispatch.get(dtype.type, dtype.type)
return convert(fill_value)
|
https://github.com/mars-project/mars/issues/1514
|
In [1]: import mars.dataframe as md
In [2]: df = md.DataFrame({'a': [1, 2, 3], 'b': ['a', 'b', 'c']})
In [3]: df['b'] = df['b'].astype(md.ArrowStringDtype())
In [6]: df.groupby('b').count()
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
545 elif not is_extension_array_dtype(subarr):
--> 546 subarr = construct_1d_ndarray_preserving_na(subarr, dtype, copy=copy)
547 except OutOfBoundsDatetime:
~/miniconda3/lib/python3.7/site-packages/pandas/core/dtypes/cast.py in construct_1d_ndarray_preserving_na(values, dtype, copy)
1506 """
-> 1507 subarr = np.array(values, dtype=dtype, copy=copy)
1508
TypeError: Cannot interpret '<mars.dataframe.arrays.ArrowStringDtype object at 0x7f7f81874150>' as a data type
During handling of the above exception, another exception occurred:
TypeError Traceback (most recent call last)
<ipython-input-6-884154032b17> in <module>
----> 1 df.groupby('b').count()
~/Workspace/mars/mars/dataframe/groupby/__init__.py in <lambda>(groupby, **kw)
40 setattr(cls, 'max', lambda groupby, **kw: agg(groupby, 'max', **kw))
41 setattr(cls, 'min', lambda groupby, **kw: agg(groupby, 'min', **kw))
---> 42 setattr(cls, 'count', lambda groupby, **kw: agg(groupby, 'count', **kw))
43 setattr(cls, 'size', lambda groupby, **kw: agg(groupby, 'size', **kw))
44 setattr(cls, 'mean', lambda groupby, **kw: agg(groupby, 'mean', **kw))
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in agg(groupby, func, method, *args, **kwargs)
650 agg_op = DataFrameGroupByAgg(func=func, method=method, raw_func=func,
651 groupby_params=groupby.op.groupby_params)
--> 652 return agg_op(groupby)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in __call__(self, groupby)
201
202 if self.output_types[0] == OutputType.dataframe:
--> 203 return self._call_dataframe(groupby, df)
204 else:
205 return self._call_series(groupby, df)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in _call_dataframe(self, groupby, input_df)
144
145 def _call_dataframe(self, groupby, input_df):
--> 146 grouped = groupby.op.build_mock_groupby()
147 agg_df = grouped.aggregate(self.func)
148
~/Workspace/mars/mars/dataframe/groupby/core.py in build_mock_groupby(self, **kwargs)
98 in_df = self.inputs[0]
99 if self.is_dataframe_obj:
--> 100 empty_df = build_empty_df(in_df.dtypes, index=pd.RangeIndex(2))
101 obj_dtypes = in_df.dtypes[in_df.dtypes == np.dtype('O')]
102 empty_df[obj_dtypes.index] = 'O'
~/Workspace/mars/mars/dataframe/utils.py in build_empty_df(dtypes, index)
446 df = pd.DataFrame(columns=pd.RangeIndex(len(columns)), index=index)
447 for i, d in enumerate(dtypes):
--> 448 df[i] = pd.Series(dtype=d, index=index)
449 df.columns = columns
450 return df
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
254 data = data._data
255 elif is_dict_like(data):
--> 256 data, index = self._init_dict(data, index, dtype)
257 dtype = None
258 copy = False
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in _init_dict(self, data, index, dtype)
347 # TODO: passing np.float64 to not break anything yet. See GH-17261
348 s = create_series_with_explicit_dtype(
--> 349 values, index=keys, dtype=dtype, dtype_if_empty=np.float64
350 )
351
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in create_series_with_explicit_dtype(data, index, dtype, name, copy, fastpath, dtype_if_empty)
623 dtype = dtype_if_empty
624 return Series(
--> 625 data=data, index=index, dtype=dtype, name=name, copy=copy, fastpath=fastpath
626 )
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
303 data = data.copy()
304 else:
--> 305 data = sanitize_array(data, index, dtype, copy, raise_cast_failure=True)
306
307 data = SingleBlockManager(data, index, fastpath=True)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in sanitize_array(data, index, dtype, copy, raise_cast_failure)
447 subarr = _try_cast(arr, dtype, copy, raise_cast_failure)
448 else:
--> 449 subarr = _try_cast(data, dtype, copy, raise_cast_failure)
450
451 # scalar like, GH
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
560 dtype = cast(ExtensionDtype, dtype)
561 array_type = dtype.construct_array_type()._from_sequence
--> 562 subarr = array_type(arr, dtype=dtype, copy=copy)
563 elif dtype is not None and raise_cast_failure:
564 raise
~/Workspace/mars/mars/dataframe/arrays.py in _from_sequence(cls, scalars, dtype, copy)
253 def _from_sequence(cls, scalars, dtype=None, copy=False):
254 if not hasattr(scalars, 'dtype'):
--> 255 ret = np.empty(len(scalars), dtype=object)
256 for i, s in enumerate(scalars):
257 ret[i] = s
TypeError: object of type 'pyarrow.lib.NullScalar' has no len()
|
TypeError
|
def build_empty_df(dtypes, index=None):
columns = dtypes.index
# duplicate column may exist,
# so use RangeIndex first
df = pd.DataFrame(columns=pd.RangeIndex(len(columns)), index=index)
length = len(index) if index is not None else 0
for i, d in enumerate(dtypes):
df[i] = pd.Series(
[_generate_value(d, 1) for _ in range(length)], dtype=d, index=index
)
df.columns = columns
return df
|
def build_empty_df(dtypes, index=None):
columns = dtypes.index
# duplicate column may exist,
# so use RangeIndex first
df = pd.DataFrame(columns=pd.RangeIndex(len(columns)), index=index)
for i, d in enumerate(dtypes):
df[i] = pd.Series(dtype=d, index=index)
df.columns = columns
return df
|
https://github.com/mars-project/mars/issues/1514
|
In [1]: import mars.dataframe as md
In [2]: df = md.DataFrame({'a': [1, 2, 3], 'b': ['a', 'b', 'c']})
In [3]: df['b'] = df['b'].astype(md.ArrowStringDtype())
In [6]: df.groupby('b').count()
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
545 elif not is_extension_array_dtype(subarr):
--> 546 subarr = construct_1d_ndarray_preserving_na(subarr, dtype, copy=copy)
547 except OutOfBoundsDatetime:
~/miniconda3/lib/python3.7/site-packages/pandas/core/dtypes/cast.py in construct_1d_ndarray_preserving_na(values, dtype, copy)
1506 """
-> 1507 subarr = np.array(values, dtype=dtype, copy=copy)
1508
TypeError: Cannot interpret '<mars.dataframe.arrays.ArrowStringDtype object at 0x7f7f81874150>' as a data type
During handling of the above exception, another exception occurred:
TypeError Traceback (most recent call last)
<ipython-input-6-884154032b17> in <module>
----> 1 df.groupby('b').count()
~/Workspace/mars/mars/dataframe/groupby/__init__.py in <lambda>(groupby, **kw)
40 setattr(cls, 'max', lambda groupby, **kw: agg(groupby, 'max', **kw))
41 setattr(cls, 'min', lambda groupby, **kw: agg(groupby, 'min', **kw))
---> 42 setattr(cls, 'count', lambda groupby, **kw: agg(groupby, 'count', **kw))
43 setattr(cls, 'size', lambda groupby, **kw: agg(groupby, 'size', **kw))
44 setattr(cls, 'mean', lambda groupby, **kw: agg(groupby, 'mean', **kw))
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in agg(groupby, func, method, *args, **kwargs)
650 agg_op = DataFrameGroupByAgg(func=func, method=method, raw_func=func,
651 groupby_params=groupby.op.groupby_params)
--> 652 return agg_op(groupby)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in __call__(self, groupby)
201
202 if self.output_types[0] == OutputType.dataframe:
--> 203 return self._call_dataframe(groupby, df)
204 else:
205 return self._call_series(groupby, df)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in _call_dataframe(self, groupby, input_df)
144
145 def _call_dataframe(self, groupby, input_df):
--> 146 grouped = groupby.op.build_mock_groupby()
147 agg_df = grouped.aggregate(self.func)
148
~/Workspace/mars/mars/dataframe/groupby/core.py in build_mock_groupby(self, **kwargs)
98 in_df = self.inputs[0]
99 if self.is_dataframe_obj:
--> 100 empty_df = build_empty_df(in_df.dtypes, index=pd.RangeIndex(2))
101 obj_dtypes = in_df.dtypes[in_df.dtypes == np.dtype('O')]
102 empty_df[obj_dtypes.index] = 'O'
~/Workspace/mars/mars/dataframe/utils.py in build_empty_df(dtypes, index)
446 df = pd.DataFrame(columns=pd.RangeIndex(len(columns)), index=index)
447 for i, d in enumerate(dtypes):
--> 448 df[i] = pd.Series(dtype=d, index=index)
449 df.columns = columns
450 return df
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
254 data = data._data
255 elif is_dict_like(data):
--> 256 data, index = self._init_dict(data, index, dtype)
257 dtype = None
258 copy = False
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in _init_dict(self, data, index, dtype)
347 # TODO: passing np.float64 to not break anything yet. See GH-17261
348 s = create_series_with_explicit_dtype(
--> 349 values, index=keys, dtype=dtype, dtype_if_empty=np.float64
350 )
351
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in create_series_with_explicit_dtype(data, index, dtype, name, copy, fastpath, dtype_if_empty)
623 dtype = dtype_if_empty
624 return Series(
--> 625 data=data, index=index, dtype=dtype, name=name, copy=copy, fastpath=fastpath
626 )
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
303 data = data.copy()
304 else:
--> 305 data = sanitize_array(data, index, dtype, copy, raise_cast_failure=True)
306
307 data = SingleBlockManager(data, index, fastpath=True)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in sanitize_array(data, index, dtype, copy, raise_cast_failure)
447 subarr = _try_cast(arr, dtype, copy, raise_cast_failure)
448 else:
--> 449 subarr = _try_cast(data, dtype, copy, raise_cast_failure)
450
451 # scalar like, GH
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
560 dtype = cast(ExtensionDtype, dtype)
561 array_type = dtype.construct_array_type()._from_sequence
--> 562 subarr = array_type(arr, dtype=dtype, copy=copy)
563 elif dtype is not None and raise_cast_failure:
564 raise
~/Workspace/mars/mars/dataframe/arrays.py in _from_sequence(cls, scalars, dtype, copy)
253 def _from_sequence(cls, scalars, dtype=None, copy=False):
254 if not hasattr(scalars, 'dtype'):
--> 255 ret = np.empty(len(scalars), dtype=object)
256 for i, s in enumerate(scalars):
257 ret[i] = s
TypeError: object of type 'pyarrow.lib.NullScalar' has no len()
|
TypeError
|
def build_df(df_obj, fill_value=1, size=1):
empty_df = build_empty_df(df_obj.dtypes, index=df_obj.index_value.to_pandas()[:0])
dtypes = empty_df.dtypes
record = [_generate_value(dtype, fill_value) for dtype in dtypes]
if isinstance(empty_df.index, pd.MultiIndex):
index = tuple(
_generate_value(level.dtype, fill_value) for level in empty_df.index.levels
)
empty_df = empty_df.reindex(
index=pd.MultiIndex.from_tuples([index], names=empty_df.index.names)
)
empty_df.iloc[0] = record
else:
index = _generate_value(empty_df.index.dtype, fill_value)
empty_df.loc[index] = record
empty_df = pd.concat([empty_df] * size)
# make sure dtypes correct for MultiIndex
for i, dtype in enumerate(dtypes.tolist()):
s = empty_df.iloc[:, i]
if not pd.api.types.is_dtype_equal(s.dtype, dtype):
empty_df.iloc[:, i] = s.astype(dtype)
return empty_df
|
def build_df(df_obj, fill_value=1, size=1):
empty_df = build_empty_df(df_obj.dtypes, index=df_obj.index_value.to_pandas()[:0])
dtypes = empty_df.dtypes
record = [_generate_value(dtype, fill_value) for dtype in empty_df.dtypes]
if isinstance(empty_df.index, pd.MultiIndex):
index = tuple(
_generate_value(level.dtype, fill_value) for level in empty_df.index.levels
)
empty_df.loc[index, :] = record
else:
index = _generate_value(empty_df.index.dtype, fill_value)
empty_df.loc[index] = record
empty_df = pd.concat([empty_df] * size)
# make sure dtypes correct for MultiIndex
for i, dtype in enumerate(dtypes.tolist()):
s = empty_df.iloc[:, i]
if s.dtype != dtype:
empty_df.iloc[:, i] = s.astype(dtype)
return empty_df
|
https://github.com/mars-project/mars/issues/1514
|
In [1]: import mars.dataframe as md
In [2]: df = md.DataFrame({'a': [1, 2, 3], 'b': ['a', 'b', 'c']})
In [3]: df['b'] = df['b'].astype(md.ArrowStringDtype())
In [6]: df.groupby('b').count()
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
545 elif not is_extension_array_dtype(subarr):
--> 546 subarr = construct_1d_ndarray_preserving_na(subarr, dtype, copy=copy)
547 except OutOfBoundsDatetime:
~/miniconda3/lib/python3.7/site-packages/pandas/core/dtypes/cast.py in construct_1d_ndarray_preserving_na(values, dtype, copy)
1506 """
-> 1507 subarr = np.array(values, dtype=dtype, copy=copy)
1508
TypeError: Cannot interpret '<mars.dataframe.arrays.ArrowStringDtype object at 0x7f7f81874150>' as a data type
During handling of the above exception, another exception occurred:
TypeError Traceback (most recent call last)
<ipython-input-6-884154032b17> in <module>
----> 1 df.groupby('b').count()
~/Workspace/mars/mars/dataframe/groupby/__init__.py in <lambda>(groupby, **kw)
40 setattr(cls, 'max', lambda groupby, **kw: agg(groupby, 'max', **kw))
41 setattr(cls, 'min', lambda groupby, **kw: agg(groupby, 'min', **kw))
---> 42 setattr(cls, 'count', lambda groupby, **kw: agg(groupby, 'count', **kw))
43 setattr(cls, 'size', lambda groupby, **kw: agg(groupby, 'size', **kw))
44 setattr(cls, 'mean', lambda groupby, **kw: agg(groupby, 'mean', **kw))
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in agg(groupby, func, method, *args, **kwargs)
650 agg_op = DataFrameGroupByAgg(func=func, method=method, raw_func=func,
651 groupby_params=groupby.op.groupby_params)
--> 652 return agg_op(groupby)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in __call__(self, groupby)
201
202 if self.output_types[0] == OutputType.dataframe:
--> 203 return self._call_dataframe(groupby, df)
204 else:
205 return self._call_series(groupby, df)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in _call_dataframe(self, groupby, input_df)
144
145 def _call_dataframe(self, groupby, input_df):
--> 146 grouped = groupby.op.build_mock_groupby()
147 agg_df = grouped.aggregate(self.func)
148
~/Workspace/mars/mars/dataframe/groupby/core.py in build_mock_groupby(self, **kwargs)
98 in_df = self.inputs[0]
99 if self.is_dataframe_obj:
--> 100 empty_df = build_empty_df(in_df.dtypes, index=pd.RangeIndex(2))
101 obj_dtypes = in_df.dtypes[in_df.dtypes == np.dtype('O')]
102 empty_df[obj_dtypes.index] = 'O'
~/Workspace/mars/mars/dataframe/utils.py in build_empty_df(dtypes, index)
446 df = pd.DataFrame(columns=pd.RangeIndex(len(columns)), index=index)
447 for i, d in enumerate(dtypes):
--> 448 df[i] = pd.Series(dtype=d, index=index)
449 df.columns = columns
450 return df
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
254 data = data._data
255 elif is_dict_like(data):
--> 256 data, index = self._init_dict(data, index, dtype)
257 dtype = None
258 copy = False
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in _init_dict(self, data, index, dtype)
347 # TODO: passing np.float64 to not break anything yet. See GH-17261
348 s = create_series_with_explicit_dtype(
--> 349 values, index=keys, dtype=dtype, dtype_if_empty=np.float64
350 )
351
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in create_series_with_explicit_dtype(data, index, dtype, name, copy, fastpath, dtype_if_empty)
623 dtype = dtype_if_empty
624 return Series(
--> 625 data=data, index=index, dtype=dtype, name=name, copy=copy, fastpath=fastpath
626 )
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
303 data = data.copy()
304 else:
--> 305 data = sanitize_array(data, index, dtype, copy, raise_cast_failure=True)
306
307 data = SingleBlockManager(data, index, fastpath=True)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in sanitize_array(data, index, dtype, copy, raise_cast_failure)
447 subarr = _try_cast(arr, dtype, copy, raise_cast_failure)
448 else:
--> 449 subarr = _try_cast(data, dtype, copy, raise_cast_failure)
450
451 # scalar like, GH
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
560 dtype = cast(ExtensionDtype, dtype)
561 array_type = dtype.construct_array_type()._from_sequence
--> 562 subarr = array_type(arr, dtype=dtype, copy=copy)
563 elif dtype is not None and raise_cast_failure:
564 raise
~/Workspace/mars/mars/dataframe/arrays.py in _from_sequence(cls, scalars, dtype, copy)
253 def _from_sequence(cls, scalars, dtype=None, copy=False):
254 if not hasattr(scalars, 'dtype'):
--> 255 ret = np.empty(len(scalars), dtype=object)
256 for i, s in enumerate(scalars):
257 ret[i] = s
TypeError: object of type 'pyarrow.lib.NullScalar' has no len()
|
TypeError
|
def build_empty_series(dtype, index=None, name=None):
length = len(index) if index is not None else 0
return pd.Series(
[_generate_value(dtype, 1) for _ in range(length)],
dtype=dtype,
index=index,
name=name,
)
|
def build_empty_series(dtype, index=None, name=None):
return pd.Series(dtype=dtype, index=index, name=name)
|
https://github.com/mars-project/mars/issues/1514
|
In [1]: import mars.dataframe as md
In [2]: df = md.DataFrame({'a': [1, 2, 3], 'b': ['a', 'b', 'c']})
In [3]: df['b'] = df['b'].astype(md.ArrowStringDtype())
In [6]: df.groupby('b').count()
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
545 elif not is_extension_array_dtype(subarr):
--> 546 subarr = construct_1d_ndarray_preserving_na(subarr, dtype, copy=copy)
547 except OutOfBoundsDatetime:
~/miniconda3/lib/python3.7/site-packages/pandas/core/dtypes/cast.py in construct_1d_ndarray_preserving_na(values, dtype, copy)
1506 """
-> 1507 subarr = np.array(values, dtype=dtype, copy=copy)
1508
TypeError: Cannot interpret '<mars.dataframe.arrays.ArrowStringDtype object at 0x7f7f81874150>' as a data type
During handling of the above exception, another exception occurred:
TypeError Traceback (most recent call last)
<ipython-input-6-884154032b17> in <module>
----> 1 df.groupby('b').count()
~/Workspace/mars/mars/dataframe/groupby/__init__.py in <lambda>(groupby, **kw)
40 setattr(cls, 'max', lambda groupby, **kw: agg(groupby, 'max', **kw))
41 setattr(cls, 'min', lambda groupby, **kw: agg(groupby, 'min', **kw))
---> 42 setattr(cls, 'count', lambda groupby, **kw: agg(groupby, 'count', **kw))
43 setattr(cls, 'size', lambda groupby, **kw: agg(groupby, 'size', **kw))
44 setattr(cls, 'mean', lambda groupby, **kw: agg(groupby, 'mean', **kw))
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in agg(groupby, func, method, *args, **kwargs)
650 agg_op = DataFrameGroupByAgg(func=func, method=method, raw_func=func,
651 groupby_params=groupby.op.groupby_params)
--> 652 return agg_op(groupby)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in __call__(self, groupby)
201
202 if self.output_types[0] == OutputType.dataframe:
--> 203 return self._call_dataframe(groupby, df)
204 else:
205 return self._call_series(groupby, df)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in _call_dataframe(self, groupby, input_df)
144
145 def _call_dataframe(self, groupby, input_df):
--> 146 grouped = groupby.op.build_mock_groupby()
147 agg_df = grouped.aggregate(self.func)
148
~/Workspace/mars/mars/dataframe/groupby/core.py in build_mock_groupby(self, **kwargs)
98 in_df = self.inputs[0]
99 if self.is_dataframe_obj:
--> 100 empty_df = build_empty_df(in_df.dtypes, index=pd.RangeIndex(2))
101 obj_dtypes = in_df.dtypes[in_df.dtypes == np.dtype('O')]
102 empty_df[obj_dtypes.index] = 'O'
~/Workspace/mars/mars/dataframe/utils.py in build_empty_df(dtypes, index)
446 df = pd.DataFrame(columns=pd.RangeIndex(len(columns)), index=index)
447 for i, d in enumerate(dtypes):
--> 448 df[i] = pd.Series(dtype=d, index=index)
449 df.columns = columns
450 return df
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
254 data = data._data
255 elif is_dict_like(data):
--> 256 data, index = self._init_dict(data, index, dtype)
257 dtype = None
258 copy = False
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in _init_dict(self, data, index, dtype)
347 # TODO: passing np.float64 to not break anything yet. See GH-17261
348 s = create_series_with_explicit_dtype(
--> 349 values, index=keys, dtype=dtype, dtype_if_empty=np.float64
350 )
351
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in create_series_with_explicit_dtype(data, index, dtype, name, copy, fastpath, dtype_if_empty)
623 dtype = dtype_if_empty
624 return Series(
--> 625 data=data, index=index, dtype=dtype, name=name, copy=copy, fastpath=fastpath
626 )
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
303 data = data.copy()
304 else:
--> 305 data = sanitize_array(data, index, dtype, copy, raise_cast_failure=True)
306
307 data = SingleBlockManager(data, index, fastpath=True)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in sanitize_array(data, index, dtype, copy, raise_cast_failure)
447 subarr = _try_cast(arr, dtype, copy, raise_cast_failure)
448 else:
--> 449 subarr = _try_cast(data, dtype, copy, raise_cast_failure)
450
451 # scalar like, GH
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
560 dtype = cast(ExtensionDtype, dtype)
561 array_type = dtype.construct_array_type()._from_sequence
--> 562 subarr = array_type(arr, dtype=dtype, copy=copy)
563 elif dtype is not None and raise_cast_failure:
564 raise
~/Workspace/mars/mars/dataframe/arrays.py in _from_sequence(cls, scalars, dtype, copy)
253 def _from_sequence(cls, scalars, dtype=None, copy=False):
254 if not hasattr(scalars, 'dtype'):
--> 255 ret = np.empty(len(scalars), dtype=object)
256 for i, s in enumerate(scalars):
257 ret[i] = s
TypeError: object of type 'pyarrow.lib.NullScalar' has no len()
|
TypeError
|
def build_series(series_obj, fill_value=1, size=1, name=None):
empty_series = build_empty_series(
series_obj.dtype, name=name, index=series_obj.index_value.to_pandas()[:0]
)
record = _generate_value(series_obj.dtype, fill_value)
if isinstance(empty_series.index, pd.MultiIndex):
index = tuple(
_generate_value(level.dtype, fill_value)
for level in empty_series.index.levels
)
empty_series = empty_series.reindex(
index=pd.MultiIndex.from_tuples([index], names=empty_series.index.names)
)
empty_series.iloc[0] = record
else:
if isinstance(empty_series.index.dtype, pd.CategoricalDtype):
index = None
else:
index = _generate_value(empty_series.index.dtype, fill_value)
empty_series.loc[index] = record
empty_series = pd.concat([empty_series] * size)
# make sure dtype correct for MultiIndex
empty_series = empty_series.astype(series_obj.dtype, copy=False)
return empty_series
|
def build_series(series_obj, fill_value=1, size=1):
empty_series = build_empty_series(
series_obj.dtype, index=series_obj.index_value.to_pandas()[:0]
)
record = _generate_value(series_obj.dtype, fill_value)
if isinstance(empty_series.index, pd.MultiIndex):
index = tuple(
_generate_value(level.dtype, fill_value)
for level in empty_series.index.levels
)
empty_series.loc[index,] = record
else:
if isinstance(empty_series.index.dtype, pd.CategoricalDtype):
index = None
else:
index = _generate_value(empty_series.index.dtype, fill_value)
empty_series.loc[index] = record
empty_series = pd.concat([empty_series] * size)
# make sure dtype correct for MultiIndex
empty_series = empty_series.astype(series_obj.dtype, copy=False)
return empty_series
|
https://github.com/mars-project/mars/issues/1514
|
In [1]: import mars.dataframe as md
In [2]: df = md.DataFrame({'a': [1, 2, 3], 'b': ['a', 'b', 'c']})
In [3]: df['b'] = df['b'].astype(md.ArrowStringDtype())
In [6]: df.groupby('b').count()
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
545 elif not is_extension_array_dtype(subarr):
--> 546 subarr = construct_1d_ndarray_preserving_na(subarr, dtype, copy=copy)
547 except OutOfBoundsDatetime:
~/miniconda3/lib/python3.7/site-packages/pandas/core/dtypes/cast.py in construct_1d_ndarray_preserving_na(values, dtype, copy)
1506 """
-> 1507 subarr = np.array(values, dtype=dtype, copy=copy)
1508
TypeError: Cannot interpret '<mars.dataframe.arrays.ArrowStringDtype object at 0x7f7f81874150>' as a data type
During handling of the above exception, another exception occurred:
TypeError Traceback (most recent call last)
<ipython-input-6-884154032b17> in <module>
----> 1 df.groupby('b').count()
~/Workspace/mars/mars/dataframe/groupby/__init__.py in <lambda>(groupby, **kw)
40 setattr(cls, 'max', lambda groupby, **kw: agg(groupby, 'max', **kw))
41 setattr(cls, 'min', lambda groupby, **kw: agg(groupby, 'min', **kw))
---> 42 setattr(cls, 'count', lambda groupby, **kw: agg(groupby, 'count', **kw))
43 setattr(cls, 'size', lambda groupby, **kw: agg(groupby, 'size', **kw))
44 setattr(cls, 'mean', lambda groupby, **kw: agg(groupby, 'mean', **kw))
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in agg(groupby, func, method, *args, **kwargs)
650 agg_op = DataFrameGroupByAgg(func=func, method=method, raw_func=func,
651 groupby_params=groupby.op.groupby_params)
--> 652 return agg_op(groupby)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in __call__(self, groupby)
201
202 if self.output_types[0] == OutputType.dataframe:
--> 203 return self._call_dataframe(groupby, df)
204 else:
205 return self._call_series(groupby, df)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in _call_dataframe(self, groupby, input_df)
144
145 def _call_dataframe(self, groupby, input_df):
--> 146 grouped = groupby.op.build_mock_groupby()
147 agg_df = grouped.aggregate(self.func)
148
~/Workspace/mars/mars/dataframe/groupby/core.py in build_mock_groupby(self, **kwargs)
98 in_df = self.inputs[0]
99 if self.is_dataframe_obj:
--> 100 empty_df = build_empty_df(in_df.dtypes, index=pd.RangeIndex(2))
101 obj_dtypes = in_df.dtypes[in_df.dtypes == np.dtype('O')]
102 empty_df[obj_dtypes.index] = 'O'
~/Workspace/mars/mars/dataframe/utils.py in build_empty_df(dtypes, index)
446 df = pd.DataFrame(columns=pd.RangeIndex(len(columns)), index=index)
447 for i, d in enumerate(dtypes):
--> 448 df[i] = pd.Series(dtype=d, index=index)
449 df.columns = columns
450 return df
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
254 data = data._data
255 elif is_dict_like(data):
--> 256 data, index = self._init_dict(data, index, dtype)
257 dtype = None
258 copy = False
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in _init_dict(self, data, index, dtype)
347 # TODO: passing np.float64 to not break anything yet. See GH-17261
348 s = create_series_with_explicit_dtype(
--> 349 values, index=keys, dtype=dtype, dtype_if_empty=np.float64
350 )
351
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in create_series_with_explicit_dtype(data, index, dtype, name, copy, fastpath, dtype_if_empty)
623 dtype = dtype_if_empty
624 return Series(
--> 625 data=data, index=index, dtype=dtype, name=name, copy=copy, fastpath=fastpath
626 )
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
303 data = data.copy()
304 else:
--> 305 data = sanitize_array(data, index, dtype, copy, raise_cast_failure=True)
306
307 data = SingleBlockManager(data, index, fastpath=True)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in sanitize_array(data, index, dtype, copy, raise_cast_failure)
447 subarr = _try_cast(arr, dtype, copy, raise_cast_failure)
448 else:
--> 449 subarr = _try_cast(data, dtype, copy, raise_cast_failure)
450
451 # scalar like, GH
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
560 dtype = cast(ExtensionDtype, dtype)
561 array_type = dtype.construct_array_type()._from_sequence
--> 562 subarr = array_type(arr, dtype=dtype, copy=copy)
563 elif dtype is not None and raise_cast_failure:
564 raise
~/Workspace/mars/mars/dataframe/arrays.py in _from_sequence(cls, scalars, dtype, copy)
253 def _from_sequence(cls, scalars, dtype=None, copy=False):
254 if not hasattr(scalars, 'dtype'):
--> 255 ret = np.empty(len(scalars), dtype=object)
256 for i, s in enumerate(scalars):
257 ret[i] = s
TypeError: object of type 'pyarrow.lib.NullScalar' has no len()
|
TypeError
|
def __call__(self, expanding):
inp = expanding.input
raw_func = self.func
self._normalize_funcs()
if isinstance(inp, DATAFRAME_TYPE):
empty_df = build_df(inp)
for c, t in empty_df.dtypes.items():
if t == np.dtype("O"):
empty_df[c] = "O"
test_df = expanding(empty_df).agg(raw_func)
if self._axis == 0:
index_value = inp.index_value
else:
index_value = parse_index(
test_df.index, expanding.params, inp, store_data=False
)
self._append_index = test_df.columns.nlevels != empty_df.columns.nlevels
return self.new_dataframe(
[inp],
shape=(inp.shape[0], test_df.shape[1]),
dtypes=test_df.dtypes,
index_value=index_value,
columns_value=parse_index(test_df.columns, store_data=True),
)
else:
pd_index = inp.index_value.to_pandas()
empty_series = build_empty_series(inp.dtype, index=pd_index[:0], name=inp.name)
test_obj = expanding(empty_series).agg(raw_func)
if isinstance(test_obj, pd.DataFrame):
return self.new_dataframe(
[inp],
shape=(inp.shape[0], test_obj.shape[1]),
dtypes=test_obj.dtypes,
index_value=inp.index_value,
columns_value=parse_index(test_obj.dtypes.index, store_data=True),
)
else:
return self.new_series(
[inp],
shape=inp.shape,
dtype=test_obj.dtype,
index_value=inp.index_value,
name=test_obj.name,
)
|
def __call__(self, expanding):
inp = expanding.input
raw_func = self.func
self._normalize_funcs()
if isinstance(inp, DATAFRAME_TYPE):
pd_index = inp.index_value.to_pandas()
empty_df = build_empty_df(inp.dtypes, index=pd_index[:1])
for c, t in empty_df.dtypes.items():
if t == np.dtype("O"):
empty_df[c] = "O"
test_df = expanding(empty_df).agg(raw_func)
if self._axis == 0:
index_value = inp.index_value
else:
index_value = parse_index(
test_df.index, expanding.params, inp, store_data=False
)
self._append_index = test_df.columns.nlevels != empty_df.columns.nlevels
return self.new_dataframe(
[inp],
shape=(inp.shape[0], test_df.shape[1]),
dtypes=test_df.dtypes,
index_value=index_value,
columns_value=parse_index(test_df.columns, store_data=True),
)
else:
pd_index = inp.index_value.to_pandas()
empty_series = build_empty_series(inp.dtype, index=pd_index[:0], name=inp.name)
test_obj = expanding(empty_series).agg(raw_func)
if isinstance(test_obj, pd.DataFrame):
return self.new_dataframe(
[inp],
shape=(inp.shape[0], test_obj.shape[1]),
dtypes=test_obj.dtypes,
index_value=inp.index_value,
columns_value=parse_index(test_obj.dtypes.index, store_data=True),
)
else:
return self.new_series(
[inp],
shape=inp.shape,
dtype=test_obj.dtype,
index_value=inp.index_value,
name=test_obj.name,
)
|
https://github.com/mars-project/mars/issues/1514
|
In [1]: import mars.dataframe as md
In [2]: df = md.DataFrame({'a': [1, 2, 3], 'b': ['a', 'b', 'c']})
In [3]: df['b'] = df['b'].astype(md.ArrowStringDtype())
In [6]: df.groupby('b').count()
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
545 elif not is_extension_array_dtype(subarr):
--> 546 subarr = construct_1d_ndarray_preserving_na(subarr, dtype, copy=copy)
547 except OutOfBoundsDatetime:
~/miniconda3/lib/python3.7/site-packages/pandas/core/dtypes/cast.py in construct_1d_ndarray_preserving_na(values, dtype, copy)
1506 """
-> 1507 subarr = np.array(values, dtype=dtype, copy=copy)
1508
TypeError: Cannot interpret '<mars.dataframe.arrays.ArrowStringDtype object at 0x7f7f81874150>' as a data type
During handling of the above exception, another exception occurred:
TypeError Traceback (most recent call last)
<ipython-input-6-884154032b17> in <module>
----> 1 df.groupby('b').count()
~/Workspace/mars/mars/dataframe/groupby/__init__.py in <lambda>(groupby, **kw)
40 setattr(cls, 'max', lambda groupby, **kw: agg(groupby, 'max', **kw))
41 setattr(cls, 'min', lambda groupby, **kw: agg(groupby, 'min', **kw))
---> 42 setattr(cls, 'count', lambda groupby, **kw: agg(groupby, 'count', **kw))
43 setattr(cls, 'size', lambda groupby, **kw: agg(groupby, 'size', **kw))
44 setattr(cls, 'mean', lambda groupby, **kw: agg(groupby, 'mean', **kw))
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in agg(groupby, func, method, *args, **kwargs)
650 agg_op = DataFrameGroupByAgg(func=func, method=method, raw_func=func,
651 groupby_params=groupby.op.groupby_params)
--> 652 return agg_op(groupby)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in __call__(self, groupby)
201
202 if self.output_types[0] == OutputType.dataframe:
--> 203 return self._call_dataframe(groupby, df)
204 else:
205 return self._call_series(groupby, df)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in _call_dataframe(self, groupby, input_df)
144
145 def _call_dataframe(self, groupby, input_df):
--> 146 grouped = groupby.op.build_mock_groupby()
147 agg_df = grouped.aggregate(self.func)
148
~/Workspace/mars/mars/dataframe/groupby/core.py in build_mock_groupby(self, **kwargs)
98 in_df = self.inputs[0]
99 if self.is_dataframe_obj:
--> 100 empty_df = build_empty_df(in_df.dtypes, index=pd.RangeIndex(2))
101 obj_dtypes = in_df.dtypes[in_df.dtypes == np.dtype('O')]
102 empty_df[obj_dtypes.index] = 'O'
~/Workspace/mars/mars/dataframe/utils.py in build_empty_df(dtypes, index)
446 df = pd.DataFrame(columns=pd.RangeIndex(len(columns)), index=index)
447 for i, d in enumerate(dtypes):
--> 448 df[i] = pd.Series(dtype=d, index=index)
449 df.columns = columns
450 return df
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
254 data = data._data
255 elif is_dict_like(data):
--> 256 data, index = self._init_dict(data, index, dtype)
257 dtype = None
258 copy = False
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in _init_dict(self, data, index, dtype)
347 # TODO: passing np.float64 to not break anything yet. See GH-17261
348 s = create_series_with_explicit_dtype(
--> 349 values, index=keys, dtype=dtype, dtype_if_empty=np.float64
350 )
351
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in create_series_with_explicit_dtype(data, index, dtype, name, copy, fastpath, dtype_if_empty)
623 dtype = dtype_if_empty
624 return Series(
--> 625 data=data, index=index, dtype=dtype, name=name, copy=copy, fastpath=fastpath
626 )
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
303 data = data.copy()
304 else:
--> 305 data = sanitize_array(data, index, dtype, copy, raise_cast_failure=True)
306
307 data = SingleBlockManager(data, index, fastpath=True)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in sanitize_array(data, index, dtype, copy, raise_cast_failure)
447 subarr = _try_cast(arr, dtype, copy, raise_cast_failure)
448 else:
--> 449 subarr = _try_cast(data, dtype, copy, raise_cast_failure)
450
451 # scalar like, GH
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
560 dtype = cast(ExtensionDtype, dtype)
561 array_type = dtype.construct_array_type()._from_sequence
--> 562 subarr = array_type(arr, dtype=dtype, copy=copy)
563 elif dtype is not None and raise_cast_failure:
564 raise
~/Workspace/mars/mars/dataframe/arrays.py in _from_sequence(cls, scalars, dtype, copy)
253 def _from_sequence(cls, scalars, dtype=None, copy=False):
254 if not hasattr(scalars, 'dtype'):
--> 255 ret = np.empty(len(scalars), dtype=object)
256 for i, s in enumerate(scalars):
257 ret[i] = s
TypeError: object of type 'pyarrow.lib.NullScalar' has no len()
|
TypeError
|
def __new__(mcs, name, bases, kv):
if "__call__" in kv:
# if __call__ is specified for an operand,
# make sure that entering user space
kv["__call__"] = enter_mode(kernel=False)(kv["__call__"])
cls = super().__new__(mcs, name, bases, kv)
for base in bases:
if OP_TYPE_KEY not in kv and hasattr(base, OP_TYPE_KEY):
kv[OP_TYPE_KEY] = getattr(base, OP_TYPE_KEY)
if OP_MODULE_KEY not in kv and hasattr(base, OP_MODULE_KEY):
kv[OP_MODULE_KEY] = getattr(base, OP_MODULE_KEY)
if kv.get(OP_TYPE_KEY) is not None and kv.get(OP_MODULE_KEY) is not None:
# common operand can be inherited for different modules, like tensor or dataframe, so forth
operand_type_to_oprand_cls[kv[OP_MODULE_KEY], kv[OP_TYPE_KEY]] = cls
return cls
|
def __new__(mcs, name, bases, kv):
cls = super().__new__(mcs, name, bases, kv)
for base in bases:
if OP_TYPE_KEY not in kv and hasattr(base, OP_TYPE_KEY):
kv[OP_TYPE_KEY] = getattr(base, OP_TYPE_KEY)
if OP_MODULE_KEY not in kv and hasattr(base, OP_MODULE_KEY):
kv[OP_MODULE_KEY] = getattr(base, OP_MODULE_KEY)
if kv.get(OP_TYPE_KEY) is not None and kv.get(OP_MODULE_KEY) is not None:
# common operand can be inherited for different modules, like tensor or dataframe, so forth
operand_type_to_oprand_cls[kv[OP_MODULE_KEY], kv[OP_TYPE_KEY]] = cls
return cls
|
https://github.com/mars-project/mars/issues/1514
|
In [1]: import mars.dataframe as md
In [2]: df = md.DataFrame({'a': [1, 2, 3], 'b': ['a', 'b', 'c']})
In [3]: df['b'] = df['b'].astype(md.ArrowStringDtype())
In [6]: df.groupby('b').count()
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
545 elif not is_extension_array_dtype(subarr):
--> 546 subarr = construct_1d_ndarray_preserving_na(subarr, dtype, copy=copy)
547 except OutOfBoundsDatetime:
~/miniconda3/lib/python3.7/site-packages/pandas/core/dtypes/cast.py in construct_1d_ndarray_preserving_na(values, dtype, copy)
1506 """
-> 1507 subarr = np.array(values, dtype=dtype, copy=copy)
1508
TypeError: Cannot interpret '<mars.dataframe.arrays.ArrowStringDtype object at 0x7f7f81874150>' as a data type
During handling of the above exception, another exception occurred:
TypeError Traceback (most recent call last)
<ipython-input-6-884154032b17> in <module>
----> 1 df.groupby('b').count()
~/Workspace/mars/mars/dataframe/groupby/__init__.py in <lambda>(groupby, **kw)
40 setattr(cls, 'max', lambda groupby, **kw: agg(groupby, 'max', **kw))
41 setattr(cls, 'min', lambda groupby, **kw: agg(groupby, 'min', **kw))
---> 42 setattr(cls, 'count', lambda groupby, **kw: agg(groupby, 'count', **kw))
43 setattr(cls, 'size', lambda groupby, **kw: agg(groupby, 'size', **kw))
44 setattr(cls, 'mean', lambda groupby, **kw: agg(groupby, 'mean', **kw))
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in agg(groupby, func, method, *args, **kwargs)
650 agg_op = DataFrameGroupByAgg(func=func, method=method, raw_func=func,
651 groupby_params=groupby.op.groupby_params)
--> 652 return agg_op(groupby)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in __call__(self, groupby)
201
202 if self.output_types[0] == OutputType.dataframe:
--> 203 return self._call_dataframe(groupby, df)
204 else:
205 return self._call_series(groupby, df)
~/Workspace/mars/mars/dataframe/groupby/aggregation.py in _call_dataframe(self, groupby, input_df)
144
145 def _call_dataframe(self, groupby, input_df):
--> 146 grouped = groupby.op.build_mock_groupby()
147 agg_df = grouped.aggregate(self.func)
148
~/Workspace/mars/mars/dataframe/groupby/core.py in build_mock_groupby(self, **kwargs)
98 in_df = self.inputs[0]
99 if self.is_dataframe_obj:
--> 100 empty_df = build_empty_df(in_df.dtypes, index=pd.RangeIndex(2))
101 obj_dtypes = in_df.dtypes[in_df.dtypes == np.dtype('O')]
102 empty_df[obj_dtypes.index] = 'O'
~/Workspace/mars/mars/dataframe/utils.py in build_empty_df(dtypes, index)
446 df = pd.DataFrame(columns=pd.RangeIndex(len(columns)), index=index)
447 for i, d in enumerate(dtypes):
--> 448 df[i] = pd.Series(dtype=d, index=index)
449 df.columns = columns
450 return df
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
254 data = data._data
255 elif is_dict_like(data):
--> 256 data, index = self._init_dict(data, index, dtype)
257 dtype = None
258 copy = False
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in _init_dict(self, data, index, dtype)
347 # TODO: passing np.float64 to not break anything yet. See GH-17261
348 s = create_series_with_explicit_dtype(
--> 349 values, index=keys, dtype=dtype, dtype_if_empty=np.float64
350 )
351
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in create_series_with_explicit_dtype(data, index, dtype, name, copy, fastpath, dtype_if_empty)
623 dtype = dtype_if_empty
624 return Series(
--> 625 data=data, index=index, dtype=dtype, name=name, copy=copy, fastpath=fastpath
626 )
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in __init__(self, data, index, dtype, name, copy, fastpath)
303 data = data.copy()
304 else:
--> 305 data = sanitize_array(data, index, dtype, copy, raise_cast_failure=True)
306
307 data = SingleBlockManager(data, index, fastpath=True)
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in sanitize_array(data, index, dtype, copy, raise_cast_failure)
447 subarr = _try_cast(arr, dtype, copy, raise_cast_failure)
448 else:
--> 449 subarr = _try_cast(data, dtype, copy, raise_cast_failure)
450
451 # scalar like, GH
~/miniconda3/lib/python3.7/site-packages/pandas/core/construction.py in _try_cast(arr, dtype, copy, raise_cast_failure)
560 dtype = cast(ExtensionDtype, dtype)
561 array_type = dtype.construct_array_type()._from_sequence
--> 562 subarr = array_type(arr, dtype=dtype, copy=copy)
563 elif dtype is not None and raise_cast_failure:
564 raise
~/Workspace/mars/mars/dataframe/arrays.py in _from_sequence(cls, scalars, dtype, copy)
253 def _from_sequence(cls, scalars, dtype=None, copy=False):
254 if not hasattr(scalars, 'dtype'):
--> 255 ret = np.empty(len(scalars), dtype=object)
256 for i, s in enumerate(scalars):
257 ret[i] = s
TypeError: object of type 'pyarrow.lib.NullScalar' has no len()
|
TypeError
|
def __init__(self, discoverer, distributed=True):
if isinstance(discoverer, list):
discoverer = StaticSchedulerDiscoverer(discoverer)
self._discoverer = discoverer
self._distributed = distributed
self._hash_ring = None
self._watcher = None
self._schedulers = []
self._observer_refs = dict()
|
def __init__(self, discoverer, distributed=True):
if isinstance(discoverer, list):
discoverer = StaticSchedulerDiscoverer(discoverer)
self._discoverer = discoverer
self._distributed = distributed
self._hash_ring = None
self._watcher = None
self._schedulers = []
self._observer_refs = []
|
https://github.com/mars-project/mars/issues/1524
|
Traceback (most recent call last):
File "src/gevent/greenlet.py", line 854, in gevent._gevent_cgreenlet.Greenlet.run
File "mars/actors/pool/gevent_pool.pyx", line 70, in mars.actors.pool.gevent_pool.MessageContext.result
cpdef result(self):
File "mars/actors/pool/gevent_pool.pyx", line 71, in mars.actors.pool.gevent_pool.MessageContext.result
return self.async_result.result()
File "src/gevent/event.py", line 383, in gevent._gevent_cevent.AsyncResult.result
File "src/gevent/event.py", line 305, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 335, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 323, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 303, in gevent._gevent_cevent.AsyncResult._raise_exception
File "/opt/conda/lib/python3.7/site-packages/gevent/_compat.py", line 65, in reraise
raise value.with_traceback(tb)
File "mars/actors/pool/gevent_pool.pyx", line 94, in mars.actors.pool.gevent_pool.ActorExecutionContext.fire_run
res = actor.on_receive(message_ctx.message)
File "mars/actors/core.pyx", line 112, in mars.actors.core._FunctionActor.on_receive
File "mars/actors/core.pyx", line 114, in mars.actors.core._FunctionActor.on_receive
File "/home/admin/work/_public-mars-0.4.5.zip/mars/cluster_info.py", line 147, in set_schedulers
getattr(observer_ref, fun_name)(schedulers, _tell=True)
File "mars/actors/core.pyx", line 63, in mars.actors.core.ActorRef.__getattr__._mt_call
File "mars/actors/core.pyx", line 41, in mars.actors.core.ActorRef.tell
File "mars/actors/pool/gevent_pool.pyx", line 204, in mars.actors.pool.gevent_pool.ActorContext.tell
return self._comm.tell(actor_ref, message, delay=delay,
File "mars/actors/pool/gevent_pool.pyx", line 775, in mars.actors.pool.gevent_pool.Communicator.tell
cpdef tell(self, ActorRef actor_ref, object message, object delay=None,
File "mars/actors/pool/gevent_pool.pyx", line 781, in mars.actors.pool.gevent_pool.Communicator.tell
return self._send(actor_ref, message, wait_response=False, wait=wait, callback=callback)
File "mars/actors/pool/gevent_pool.pyx", line 768, in mars.actors.pool.gevent_pool.Communicator._send
return self._dispatch(self._send_local, self._send_process, self._send_remote, actor_ref,
File "mars/actors/pool/gevent_pool.pyx", line 677, in mars.actors.pool.gevent_pool.Communicator._dispatch
return redirect_func(send_to_index, *args, **kwargs)
File "mars/actors/pool/gevent_pool.pyx", line 748, in mars.actors.pool.gevent_pool.Communicator._send_process
return self.submit(message_id)
File "mars/actors/pool/gevent_pool.pyx", line 311, in mars.actors.pool.gevent_pool.AsyncHandler.submit
return ar.result()
File "src/gevent/event.py", line 383, in gevent._gevent_cevent.AsyncResult.result
File "src/gevent/event.py", line 305, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 323, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 303, in gevent._gevent_cevent.AsyncResult._raise_exception
File "/opt/conda/lib/python3.7/site-packages/gevent/_compat.py", line 65, in reraise
raise value.with_traceback(tb)
File "mars/actors/pool/gevent_pool.pyx", line 977, in mars.actors.pool.gevent_pool.Communicator._on_receive_tell
actor_ctx = self.pool.get_actor_execution_ctx(message.actor_ref.uid)
File "mars/actors/pool/gevent_pool.pyx", line 259, in mars.actors.pool.gevent_pool.LocalActorPool.get_actor_execution_ctx
cpdef ActorExecutionContext get_actor_execution_ctx(self, object actor_uid):
File "mars/actors/pool/gevent_pool.pyx", line 263, in mars.actors.pool.gevent_pool.LocalActorPool.get_actor_execution_ctx
raise ActorNotExist('Actor {0} does not exist'.format(actor_uid))
mars.actors.errors.ActorNotExist: Actor w:8:mars-cpu-calc-backup-241-268bbd7e0aab350b99e6354b286f52ab-1492 does not exist
2020-08-25T07:08:35Z <Greenlet at 0x7fceb54cd830: <built-in method result of mars.actors.pool.gevent_pool.MessageContext object at 0x7fceb5b88730>> failed with ActorNotExist
|
mars.actors.errors.ActorNotExist
|
def register_observer(self, observer, fun_name):
self._observer_refs[(observer.uid, observer.address)] = (
self.ctx.actor_ref(observer),
fun_name,
)
|
def register_observer(self, observer, fun_name):
self._observer_refs.append((self.ctx.actor_ref(observer), fun_name))
|
https://github.com/mars-project/mars/issues/1524
|
Traceback (most recent call last):
File "src/gevent/greenlet.py", line 854, in gevent._gevent_cgreenlet.Greenlet.run
File "mars/actors/pool/gevent_pool.pyx", line 70, in mars.actors.pool.gevent_pool.MessageContext.result
cpdef result(self):
File "mars/actors/pool/gevent_pool.pyx", line 71, in mars.actors.pool.gevent_pool.MessageContext.result
return self.async_result.result()
File "src/gevent/event.py", line 383, in gevent._gevent_cevent.AsyncResult.result
File "src/gevent/event.py", line 305, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 335, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 323, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 303, in gevent._gevent_cevent.AsyncResult._raise_exception
File "/opt/conda/lib/python3.7/site-packages/gevent/_compat.py", line 65, in reraise
raise value.with_traceback(tb)
File "mars/actors/pool/gevent_pool.pyx", line 94, in mars.actors.pool.gevent_pool.ActorExecutionContext.fire_run
res = actor.on_receive(message_ctx.message)
File "mars/actors/core.pyx", line 112, in mars.actors.core._FunctionActor.on_receive
File "mars/actors/core.pyx", line 114, in mars.actors.core._FunctionActor.on_receive
File "/home/admin/work/_public-mars-0.4.5.zip/mars/cluster_info.py", line 147, in set_schedulers
getattr(observer_ref, fun_name)(schedulers, _tell=True)
File "mars/actors/core.pyx", line 63, in mars.actors.core.ActorRef.__getattr__._mt_call
File "mars/actors/core.pyx", line 41, in mars.actors.core.ActorRef.tell
File "mars/actors/pool/gevent_pool.pyx", line 204, in mars.actors.pool.gevent_pool.ActorContext.tell
return self._comm.tell(actor_ref, message, delay=delay,
File "mars/actors/pool/gevent_pool.pyx", line 775, in mars.actors.pool.gevent_pool.Communicator.tell
cpdef tell(self, ActorRef actor_ref, object message, object delay=None,
File "mars/actors/pool/gevent_pool.pyx", line 781, in mars.actors.pool.gevent_pool.Communicator.tell
return self._send(actor_ref, message, wait_response=False, wait=wait, callback=callback)
File "mars/actors/pool/gevent_pool.pyx", line 768, in mars.actors.pool.gevent_pool.Communicator._send
return self._dispatch(self._send_local, self._send_process, self._send_remote, actor_ref,
File "mars/actors/pool/gevent_pool.pyx", line 677, in mars.actors.pool.gevent_pool.Communicator._dispatch
return redirect_func(send_to_index, *args, **kwargs)
File "mars/actors/pool/gevent_pool.pyx", line 748, in mars.actors.pool.gevent_pool.Communicator._send_process
return self.submit(message_id)
File "mars/actors/pool/gevent_pool.pyx", line 311, in mars.actors.pool.gevent_pool.AsyncHandler.submit
return ar.result()
File "src/gevent/event.py", line 383, in gevent._gevent_cevent.AsyncResult.result
File "src/gevent/event.py", line 305, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 323, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 303, in gevent._gevent_cevent.AsyncResult._raise_exception
File "/opt/conda/lib/python3.7/site-packages/gevent/_compat.py", line 65, in reraise
raise value.with_traceback(tb)
File "mars/actors/pool/gevent_pool.pyx", line 977, in mars.actors.pool.gevent_pool.Communicator._on_receive_tell
actor_ctx = self.pool.get_actor_execution_ctx(message.actor_ref.uid)
File "mars/actors/pool/gevent_pool.pyx", line 259, in mars.actors.pool.gevent_pool.LocalActorPool.get_actor_execution_ctx
cpdef ActorExecutionContext get_actor_execution_ctx(self, object actor_uid):
File "mars/actors/pool/gevent_pool.pyx", line 263, in mars.actors.pool.gevent_pool.LocalActorPool.get_actor_execution_ctx
raise ActorNotExist('Actor {0} does not exist'.format(actor_uid))
mars.actors.errors.ActorNotExist: Actor w:8:mars-cpu-calc-backup-241-268bbd7e0aab350b99e6354b286f52ab-1492 does not exist
2020-08-25T07:08:35Z <Greenlet at 0x7fceb54cd830: <built-in method result of mars.actors.pool.gevent_pool.MessageContext object at 0x7fceb5b88730>> failed with ActorNotExist
|
mars.actors.errors.ActorNotExist
|
def set_schedulers(self, schedulers):
logger.debug("Setting schedulers %r", schedulers)
self._schedulers = schedulers
self._hash_ring = create_hash_ring(self._schedulers)
for observer_ref, fun_name in self._observer_refs.values():
# notify the observers to update the new scheduler list
getattr(observer_ref, fun_name)(schedulers, _tell=True, _wait=False)
|
def set_schedulers(self, schedulers):
logger.debug("Setting schedulers %r", schedulers)
self._schedulers = schedulers
self._hash_ring = create_hash_ring(self._schedulers)
for observer_ref, fun_name in self._observer_refs:
# notify the observers to update the new scheduler list
getattr(observer_ref, fun_name)(schedulers, _tell=True)
|
https://github.com/mars-project/mars/issues/1524
|
Traceback (most recent call last):
File "src/gevent/greenlet.py", line 854, in gevent._gevent_cgreenlet.Greenlet.run
File "mars/actors/pool/gevent_pool.pyx", line 70, in mars.actors.pool.gevent_pool.MessageContext.result
cpdef result(self):
File "mars/actors/pool/gevent_pool.pyx", line 71, in mars.actors.pool.gevent_pool.MessageContext.result
return self.async_result.result()
File "src/gevent/event.py", line 383, in gevent._gevent_cevent.AsyncResult.result
File "src/gevent/event.py", line 305, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 335, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 323, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 303, in gevent._gevent_cevent.AsyncResult._raise_exception
File "/opt/conda/lib/python3.7/site-packages/gevent/_compat.py", line 65, in reraise
raise value.with_traceback(tb)
File "mars/actors/pool/gevent_pool.pyx", line 94, in mars.actors.pool.gevent_pool.ActorExecutionContext.fire_run
res = actor.on_receive(message_ctx.message)
File "mars/actors/core.pyx", line 112, in mars.actors.core._FunctionActor.on_receive
File "mars/actors/core.pyx", line 114, in mars.actors.core._FunctionActor.on_receive
File "/home/admin/work/_public-mars-0.4.5.zip/mars/cluster_info.py", line 147, in set_schedulers
getattr(observer_ref, fun_name)(schedulers, _tell=True)
File "mars/actors/core.pyx", line 63, in mars.actors.core.ActorRef.__getattr__._mt_call
File "mars/actors/core.pyx", line 41, in mars.actors.core.ActorRef.tell
File "mars/actors/pool/gevent_pool.pyx", line 204, in mars.actors.pool.gevent_pool.ActorContext.tell
return self._comm.tell(actor_ref, message, delay=delay,
File "mars/actors/pool/gevent_pool.pyx", line 775, in mars.actors.pool.gevent_pool.Communicator.tell
cpdef tell(self, ActorRef actor_ref, object message, object delay=None,
File "mars/actors/pool/gevent_pool.pyx", line 781, in mars.actors.pool.gevent_pool.Communicator.tell
return self._send(actor_ref, message, wait_response=False, wait=wait, callback=callback)
File "mars/actors/pool/gevent_pool.pyx", line 768, in mars.actors.pool.gevent_pool.Communicator._send
return self._dispatch(self._send_local, self._send_process, self._send_remote, actor_ref,
File "mars/actors/pool/gevent_pool.pyx", line 677, in mars.actors.pool.gevent_pool.Communicator._dispatch
return redirect_func(send_to_index, *args, **kwargs)
File "mars/actors/pool/gevent_pool.pyx", line 748, in mars.actors.pool.gevent_pool.Communicator._send_process
return self.submit(message_id)
File "mars/actors/pool/gevent_pool.pyx", line 311, in mars.actors.pool.gevent_pool.AsyncHandler.submit
return ar.result()
File "src/gevent/event.py", line 383, in gevent._gevent_cevent.AsyncResult.result
File "src/gevent/event.py", line 305, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 323, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 303, in gevent._gevent_cevent.AsyncResult._raise_exception
File "/opt/conda/lib/python3.7/site-packages/gevent/_compat.py", line 65, in reraise
raise value.with_traceback(tb)
File "mars/actors/pool/gevent_pool.pyx", line 977, in mars.actors.pool.gevent_pool.Communicator._on_receive_tell
actor_ctx = self.pool.get_actor_execution_ctx(message.actor_ref.uid)
File "mars/actors/pool/gevent_pool.pyx", line 259, in mars.actors.pool.gevent_pool.LocalActorPool.get_actor_execution_ctx
cpdef ActorExecutionContext get_actor_execution_ctx(self, object actor_uid):
File "mars/actors/pool/gevent_pool.pyx", line 263, in mars.actors.pool.gevent_pool.LocalActorPool.get_actor_execution_ctx
raise ActorNotExist('Actor {0} does not exist'.format(actor_uid))
mars.actors.errors.ActorNotExist: Actor w:8:mars-cpu-calc-backup-241-268bbd7e0aab350b99e6354b286f52ab-1492 does not exist
2020-08-25T07:08:35Z <Greenlet at 0x7fceb54cd830: <built-in method result of mars.actors.pool.gevent_pool.MessageContext object at 0x7fceb5b88730>> failed with ActorNotExist
|
mars.actors.errors.ActorNotExist
|
def take(self, indices, allow_fill=False, fill_value=None):
if allow_fill is False or (allow_fill and fill_value is self.dtype.na_value):
return type(self)(self[indices], dtype=self._dtype)
array = self._arrow_array.to_pandas().to_numpy()
replace = False
if allow_fill and fill_value is None:
fill_value = self.dtype.na_value
replace = True
result = take(array, indices, fill_value=fill_value, allow_fill=allow_fill)
del array
if replace:
# pyarrow cannot recognize pa.NULL
result[result == self.dtype.na_value] = None
return type(self)(result, dtype=self._dtype)
|
def take(self, indices, allow_fill=False, fill_value=None):
if allow_fill is False:
return type(self)(self[indices], dtype=self._dtype)
array = self._arrow_array.to_pandas().to_numpy()
replace = False
if allow_fill and fill_value is None:
fill_value = self.dtype.na_value
replace = True
result = take(array, indices, fill_value=fill_value, allow_fill=allow_fill)
del array
if replace:
# pyarrow cannot recognize pa.NULL
result[result == self.dtype.na_value] = None
return type(self)(result, dtype=self._dtype)
|
https://github.com/mars-project/mars/issues/1524
|
Traceback (most recent call last):
File "src/gevent/greenlet.py", line 854, in gevent._gevent_cgreenlet.Greenlet.run
File "mars/actors/pool/gevent_pool.pyx", line 70, in mars.actors.pool.gevent_pool.MessageContext.result
cpdef result(self):
File "mars/actors/pool/gevent_pool.pyx", line 71, in mars.actors.pool.gevent_pool.MessageContext.result
return self.async_result.result()
File "src/gevent/event.py", line 383, in gevent._gevent_cevent.AsyncResult.result
File "src/gevent/event.py", line 305, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 335, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 323, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 303, in gevent._gevent_cevent.AsyncResult._raise_exception
File "/opt/conda/lib/python3.7/site-packages/gevent/_compat.py", line 65, in reraise
raise value.with_traceback(tb)
File "mars/actors/pool/gevent_pool.pyx", line 94, in mars.actors.pool.gevent_pool.ActorExecutionContext.fire_run
res = actor.on_receive(message_ctx.message)
File "mars/actors/core.pyx", line 112, in mars.actors.core._FunctionActor.on_receive
File "mars/actors/core.pyx", line 114, in mars.actors.core._FunctionActor.on_receive
File "/home/admin/work/_public-mars-0.4.5.zip/mars/cluster_info.py", line 147, in set_schedulers
getattr(observer_ref, fun_name)(schedulers, _tell=True)
File "mars/actors/core.pyx", line 63, in mars.actors.core.ActorRef.__getattr__._mt_call
File "mars/actors/core.pyx", line 41, in mars.actors.core.ActorRef.tell
File "mars/actors/pool/gevent_pool.pyx", line 204, in mars.actors.pool.gevent_pool.ActorContext.tell
return self._comm.tell(actor_ref, message, delay=delay,
File "mars/actors/pool/gevent_pool.pyx", line 775, in mars.actors.pool.gevent_pool.Communicator.tell
cpdef tell(self, ActorRef actor_ref, object message, object delay=None,
File "mars/actors/pool/gevent_pool.pyx", line 781, in mars.actors.pool.gevent_pool.Communicator.tell
return self._send(actor_ref, message, wait_response=False, wait=wait, callback=callback)
File "mars/actors/pool/gevent_pool.pyx", line 768, in mars.actors.pool.gevent_pool.Communicator._send
return self._dispatch(self._send_local, self._send_process, self._send_remote, actor_ref,
File "mars/actors/pool/gevent_pool.pyx", line 677, in mars.actors.pool.gevent_pool.Communicator._dispatch
return redirect_func(send_to_index, *args, **kwargs)
File "mars/actors/pool/gevent_pool.pyx", line 748, in mars.actors.pool.gevent_pool.Communicator._send_process
return self.submit(message_id)
File "mars/actors/pool/gevent_pool.pyx", line 311, in mars.actors.pool.gevent_pool.AsyncHandler.submit
return ar.result()
File "src/gevent/event.py", line 383, in gevent._gevent_cevent.AsyncResult.result
File "src/gevent/event.py", line 305, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 323, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 303, in gevent._gevent_cevent.AsyncResult._raise_exception
File "/opt/conda/lib/python3.7/site-packages/gevent/_compat.py", line 65, in reraise
raise value.with_traceback(tb)
File "mars/actors/pool/gevent_pool.pyx", line 977, in mars.actors.pool.gevent_pool.Communicator._on_receive_tell
actor_ctx = self.pool.get_actor_execution_ctx(message.actor_ref.uid)
File "mars/actors/pool/gevent_pool.pyx", line 259, in mars.actors.pool.gevent_pool.LocalActorPool.get_actor_execution_ctx
cpdef ActorExecutionContext get_actor_execution_ctx(self, object actor_uid):
File "mars/actors/pool/gevent_pool.pyx", line 263, in mars.actors.pool.gevent_pool.LocalActorPool.get_actor_execution_ctx
raise ActorNotExist('Actor {0} does not exist'.format(actor_uid))
mars.actors.errors.ActorNotExist: Actor w:8:mars-cpu-calc-backup-241-268bbd7e0aab350b99e6354b286f52ab-1492 does not exist
2020-08-25T07:08:35Z <Greenlet at 0x7fceb54cd830: <built-in method result of mars.actors.pool.gevent_pool.MessageContext object at 0x7fceb5b88730>> failed with ActorNotExist
|
mars.actors.errors.ActorNotExist
|
def pre_destroy(self):
self._actual_ref.destroy()
self.unset_cluster_info_ref()
|
def pre_destroy(self):
self._actual_ref.destroy()
|
https://github.com/mars-project/mars/issues/1524
|
Traceback (most recent call last):
File "src/gevent/greenlet.py", line 854, in gevent._gevent_cgreenlet.Greenlet.run
File "mars/actors/pool/gevent_pool.pyx", line 70, in mars.actors.pool.gevent_pool.MessageContext.result
cpdef result(self):
File "mars/actors/pool/gevent_pool.pyx", line 71, in mars.actors.pool.gevent_pool.MessageContext.result
return self.async_result.result()
File "src/gevent/event.py", line 383, in gevent._gevent_cevent.AsyncResult.result
File "src/gevent/event.py", line 305, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 335, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 323, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 303, in gevent._gevent_cevent.AsyncResult._raise_exception
File "/opt/conda/lib/python3.7/site-packages/gevent/_compat.py", line 65, in reraise
raise value.with_traceback(tb)
File "mars/actors/pool/gevent_pool.pyx", line 94, in mars.actors.pool.gevent_pool.ActorExecutionContext.fire_run
res = actor.on_receive(message_ctx.message)
File "mars/actors/core.pyx", line 112, in mars.actors.core._FunctionActor.on_receive
File "mars/actors/core.pyx", line 114, in mars.actors.core._FunctionActor.on_receive
File "/home/admin/work/_public-mars-0.4.5.zip/mars/cluster_info.py", line 147, in set_schedulers
getattr(observer_ref, fun_name)(schedulers, _tell=True)
File "mars/actors/core.pyx", line 63, in mars.actors.core.ActorRef.__getattr__._mt_call
File "mars/actors/core.pyx", line 41, in mars.actors.core.ActorRef.tell
File "mars/actors/pool/gevent_pool.pyx", line 204, in mars.actors.pool.gevent_pool.ActorContext.tell
return self._comm.tell(actor_ref, message, delay=delay,
File "mars/actors/pool/gevent_pool.pyx", line 775, in mars.actors.pool.gevent_pool.Communicator.tell
cpdef tell(self, ActorRef actor_ref, object message, object delay=None,
File "mars/actors/pool/gevent_pool.pyx", line 781, in mars.actors.pool.gevent_pool.Communicator.tell
return self._send(actor_ref, message, wait_response=False, wait=wait, callback=callback)
File "mars/actors/pool/gevent_pool.pyx", line 768, in mars.actors.pool.gevent_pool.Communicator._send
return self._dispatch(self._send_local, self._send_process, self._send_remote, actor_ref,
File "mars/actors/pool/gevent_pool.pyx", line 677, in mars.actors.pool.gevent_pool.Communicator._dispatch
return redirect_func(send_to_index, *args, **kwargs)
File "mars/actors/pool/gevent_pool.pyx", line 748, in mars.actors.pool.gevent_pool.Communicator._send_process
return self.submit(message_id)
File "mars/actors/pool/gevent_pool.pyx", line 311, in mars.actors.pool.gevent_pool.AsyncHandler.submit
return ar.result()
File "src/gevent/event.py", line 383, in gevent._gevent_cevent.AsyncResult.result
File "src/gevent/event.py", line 305, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 323, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 303, in gevent._gevent_cevent.AsyncResult._raise_exception
File "/opt/conda/lib/python3.7/site-packages/gevent/_compat.py", line 65, in reraise
raise value.with_traceback(tb)
File "mars/actors/pool/gevent_pool.pyx", line 977, in mars.actors.pool.gevent_pool.Communicator._on_receive_tell
actor_ctx = self.pool.get_actor_execution_ctx(message.actor_ref.uid)
File "mars/actors/pool/gevent_pool.pyx", line 259, in mars.actors.pool.gevent_pool.LocalActorPool.get_actor_execution_ctx
cpdef ActorExecutionContext get_actor_execution_ctx(self, object actor_uid):
File "mars/actors/pool/gevent_pool.pyx", line 263, in mars.actors.pool.gevent_pool.LocalActorPool.get_actor_execution_ctx
raise ActorNotExist('Actor {0} does not exist'.format(actor_uid))
mars.actors.errors.ActorNotExist: Actor w:8:mars-cpu-calc-backup-241-268bbd7e0aab350b99e6354b286f52ab-1492 does not exist
2020-08-25T07:08:35Z <Greenlet at 0x7fceb54cd830: <built-in method result of mars.actors.pool.gevent_pool.MessageContext object at 0x7fceb5b88730>> failed with ActorNotExist
|
mars.actors.errors.ActorNotExist
|
def pre_destroy(self):
super().pre_destroy()
self.unset_cluster_info_ref()
self._graph_meta_ref.destroy()
|
def pre_destroy(self):
super().pre_destroy()
self._graph_meta_ref.destroy()
|
https://github.com/mars-project/mars/issues/1524
|
Traceback (most recent call last):
File "src/gevent/greenlet.py", line 854, in gevent._gevent_cgreenlet.Greenlet.run
File "mars/actors/pool/gevent_pool.pyx", line 70, in mars.actors.pool.gevent_pool.MessageContext.result
cpdef result(self):
File "mars/actors/pool/gevent_pool.pyx", line 71, in mars.actors.pool.gevent_pool.MessageContext.result
return self.async_result.result()
File "src/gevent/event.py", line 383, in gevent._gevent_cevent.AsyncResult.result
File "src/gevent/event.py", line 305, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 335, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 323, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 303, in gevent._gevent_cevent.AsyncResult._raise_exception
File "/opt/conda/lib/python3.7/site-packages/gevent/_compat.py", line 65, in reraise
raise value.with_traceback(tb)
File "mars/actors/pool/gevent_pool.pyx", line 94, in mars.actors.pool.gevent_pool.ActorExecutionContext.fire_run
res = actor.on_receive(message_ctx.message)
File "mars/actors/core.pyx", line 112, in mars.actors.core._FunctionActor.on_receive
File "mars/actors/core.pyx", line 114, in mars.actors.core._FunctionActor.on_receive
File "/home/admin/work/_public-mars-0.4.5.zip/mars/cluster_info.py", line 147, in set_schedulers
getattr(observer_ref, fun_name)(schedulers, _tell=True)
File "mars/actors/core.pyx", line 63, in mars.actors.core.ActorRef.__getattr__._mt_call
File "mars/actors/core.pyx", line 41, in mars.actors.core.ActorRef.tell
File "mars/actors/pool/gevent_pool.pyx", line 204, in mars.actors.pool.gevent_pool.ActorContext.tell
return self._comm.tell(actor_ref, message, delay=delay,
File "mars/actors/pool/gevent_pool.pyx", line 775, in mars.actors.pool.gevent_pool.Communicator.tell
cpdef tell(self, ActorRef actor_ref, object message, object delay=None,
File "mars/actors/pool/gevent_pool.pyx", line 781, in mars.actors.pool.gevent_pool.Communicator.tell
return self._send(actor_ref, message, wait_response=False, wait=wait, callback=callback)
File "mars/actors/pool/gevent_pool.pyx", line 768, in mars.actors.pool.gevent_pool.Communicator._send
return self._dispatch(self._send_local, self._send_process, self._send_remote, actor_ref,
File "mars/actors/pool/gevent_pool.pyx", line 677, in mars.actors.pool.gevent_pool.Communicator._dispatch
return redirect_func(send_to_index, *args, **kwargs)
File "mars/actors/pool/gevent_pool.pyx", line 748, in mars.actors.pool.gevent_pool.Communicator._send_process
return self.submit(message_id)
File "mars/actors/pool/gevent_pool.pyx", line 311, in mars.actors.pool.gevent_pool.AsyncHandler.submit
return ar.result()
File "src/gevent/event.py", line 383, in gevent._gevent_cevent.AsyncResult.result
File "src/gevent/event.py", line 305, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 323, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 303, in gevent._gevent_cevent.AsyncResult._raise_exception
File "/opt/conda/lib/python3.7/site-packages/gevent/_compat.py", line 65, in reraise
raise value.with_traceback(tb)
File "mars/actors/pool/gevent_pool.pyx", line 977, in mars.actors.pool.gevent_pool.Communicator._on_receive_tell
actor_ctx = self.pool.get_actor_execution_ctx(message.actor_ref.uid)
File "mars/actors/pool/gevent_pool.pyx", line 259, in mars.actors.pool.gevent_pool.LocalActorPool.get_actor_execution_ctx
cpdef ActorExecutionContext get_actor_execution_ctx(self, object actor_uid):
File "mars/actors/pool/gevent_pool.pyx", line 263, in mars.actors.pool.gevent_pool.LocalActorPool.get_actor_execution_ctx
raise ActorNotExist('Actor {0} does not exist'.format(actor_uid))
mars.actors.errors.ActorNotExist: Actor w:8:mars-cpu-calc-backup-241-268bbd7e0aab350b99e6354b286f52ab-1492 does not exist
2020-08-25T07:08:35Z <Greenlet at 0x7fceb54cd830: <built-in method result of mars.actors.pool.gevent_pool.MessageContext object at 0x7fceb5b88730>> failed with ActorNotExist
|
mars.actors.errors.ActorNotExist
|
def pre_destroy(self):
self._heartbeat_ref.destroy()
self.unset_cluster_info_ref()
super().pre_destroy()
|
def pre_destroy(self):
self._heartbeat_ref.destroy()
super().pre_destroy()
|
https://github.com/mars-project/mars/issues/1524
|
Traceback (most recent call last):
File "src/gevent/greenlet.py", line 854, in gevent._gevent_cgreenlet.Greenlet.run
File "mars/actors/pool/gevent_pool.pyx", line 70, in mars.actors.pool.gevent_pool.MessageContext.result
cpdef result(self):
File "mars/actors/pool/gevent_pool.pyx", line 71, in mars.actors.pool.gevent_pool.MessageContext.result
return self.async_result.result()
File "src/gevent/event.py", line 383, in gevent._gevent_cevent.AsyncResult.result
File "src/gevent/event.py", line 305, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 335, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 323, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 303, in gevent._gevent_cevent.AsyncResult._raise_exception
File "/opt/conda/lib/python3.7/site-packages/gevent/_compat.py", line 65, in reraise
raise value.with_traceback(tb)
File "mars/actors/pool/gevent_pool.pyx", line 94, in mars.actors.pool.gevent_pool.ActorExecutionContext.fire_run
res = actor.on_receive(message_ctx.message)
File "mars/actors/core.pyx", line 112, in mars.actors.core._FunctionActor.on_receive
File "mars/actors/core.pyx", line 114, in mars.actors.core._FunctionActor.on_receive
File "/home/admin/work/_public-mars-0.4.5.zip/mars/cluster_info.py", line 147, in set_schedulers
getattr(observer_ref, fun_name)(schedulers, _tell=True)
File "mars/actors/core.pyx", line 63, in mars.actors.core.ActorRef.__getattr__._mt_call
File "mars/actors/core.pyx", line 41, in mars.actors.core.ActorRef.tell
File "mars/actors/pool/gevent_pool.pyx", line 204, in mars.actors.pool.gevent_pool.ActorContext.tell
return self._comm.tell(actor_ref, message, delay=delay,
File "mars/actors/pool/gevent_pool.pyx", line 775, in mars.actors.pool.gevent_pool.Communicator.tell
cpdef tell(self, ActorRef actor_ref, object message, object delay=None,
File "mars/actors/pool/gevent_pool.pyx", line 781, in mars.actors.pool.gevent_pool.Communicator.tell
return self._send(actor_ref, message, wait_response=False, wait=wait, callback=callback)
File "mars/actors/pool/gevent_pool.pyx", line 768, in mars.actors.pool.gevent_pool.Communicator._send
return self._dispatch(self._send_local, self._send_process, self._send_remote, actor_ref,
File "mars/actors/pool/gevent_pool.pyx", line 677, in mars.actors.pool.gevent_pool.Communicator._dispatch
return redirect_func(send_to_index, *args, **kwargs)
File "mars/actors/pool/gevent_pool.pyx", line 748, in mars.actors.pool.gevent_pool.Communicator._send_process
return self.submit(message_id)
File "mars/actors/pool/gevent_pool.pyx", line 311, in mars.actors.pool.gevent_pool.AsyncHandler.submit
return ar.result()
File "src/gevent/event.py", line 383, in gevent._gevent_cevent.AsyncResult.result
File "src/gevent/event.py", line 305, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 323, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 303, in gevent._gevent_cevent.AsyncResult._raise_exception
File "/opt/conda/lib/python3.7/site-packages/gevent/_compat.py", line 65, in reraise
raise value.with_traceback(tb)
File "mars/actors/pool/gevent_pool.pyx", line 977, in mars.actors.pool.gevent_pool.Communicator._on_receive_tell
actor_ctx = self.pool.get_actor_execution_ctx(message.actor_ref.uid)
File "mars/actors/pool/gevent_pool.pyx", line 259, in mars.actors.pool.gevent_pool.LocalActorPool.get_actor_execution_ctx
cpdef ActorExecutionContext get_actor_execution_ctx(self, object actor_uid):
File "mars/actors/pool/gevent_pool.pyx", line 263, in mars.actors.pool.gevent_pool.LocalActorPool.get_actor_execution_ctx
raise ActorNotExist('Actor {0} does not exist'.format(actor_uid))
mars.actors.errors.ActorNotExist: Actor w:8:mars-cpu-calc-backup-241-268bbd7e0aab350b99e6354b286f52ab-1492 does not exist
2020-08-25T07:08:35Z <Greenlet at 0x7fceb54cd830: <built-in method result of mars.actors.pool.gevent_pool.MessageContext object at 0x7fceb5b88730>> failed with ActorNotExist
|
mars.actors.errors.ActorNotExist
|
def pre_destroy(self):
super().pre_destroy()
self.unset_cluster_info_ref()
self._manager_ref.delete_session(self._session_id, _tell=True)
self.ctx.destroy_actor(self._assigner_ref)
for graph_ref in self._graph_refs.values():
self.ctx.destroy_actor(graph_ref)
for mut_tensor_ref in self._mut_tensor_refs.values():
self.ctx.destroy_actor(mut_tensor_ref)
|
def pre_destroy(self):
super().pre_destroy()
self._manager_ref.delete_session(self._session_id, _tell=True)
self.ctx.destroy_actor(self._assigner_ref)
for graph_ref in self._graph_refs.values():
self.ctx.destroy_actor(graph_ref)
for mut_tensor_ref in self._mut_tensor_refs.values():
self.ctx.destroy_actor(mut_tensor_ref)
|
https://github.com/mars-project/mars/issues/1524
|
Traceback (most recent call last):
File "src/gevent/greenlet.py", line 854, in gevent._gevent_cgreenlet.Greenlet.run
File "mars/actors/pool/gevent_pool.pyx", line 70, in mars.actors.pool.gevent_pool.MessageContext.result
cpdef result(self):
File "mars/actors/pool/gevent_pool.pyx", line 71, in mars.actors.pool.gevent_pool.MessageContext.result
return self.async_result.result()
File "src/gevent/event.py", line 383, in gevent._gevent_cevent.AsyncResult.result
File "src/gevent/event.py", line 305, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 335, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 323, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 303, in gevent._gevent_cevent.AsyncResult._raise_exception
File "/opt/conda/lib/python3.7/site-packages/gevent/_compat.py", line 65, in reraise
raise value.with_traceback(tb)
File "mars/actors/pool/gevent_pool.pyx", line 94, in mars.actors.pool.gevent_pool.ActorExecutionContext.fire_run
res = actor.on_receive(message_ctx.message)
File "mars/actors/core.pyx", line 112, in mars.actors.core._FunctionActor.on_receive
File "mars/actors/core.pyx", line 114, in mars.actors.core._FunctionActor.on_receive
File "/home/admin/work/_public-mars-0.4.5.zip/mars/cluster_info.py", line 147, in set_schedulers
getattr(observer_ref, fun_name)(schedulers, _tell=True)
File "mars/actors/core.pyx", line 63, in mars.actors.core.ActorRef.__getattr__._mt_call
File "mars/actors/core.pyx", line 41, in mars.actors.core.ActorRef.tell
File "mars/actors/pool/gevent_pool.pyx", line 204, in mars.actors.pool.gevent_pool.ActorContext.tell
return self._comm.tell(actor_ref, message, delay=delay,
File "mars/actors/pool/gevent_pool.pyx", line 775, in mars.actors.pool.gevent_pool.Communicator.tell
cpdef tell(self, ActorRef actor_ref, object message, object delay=None,
File "mars/actors/pool/gevent_pool.pyx", line 781, in mars.actors.pool.gevent_pool.Communicator.tell
return self._send(actor_ref, message, wait_response=False, wait=wait, callback=callback)
File "mars/actors/pool/gevent_pool.pyx", line 768, in mars.actors.pool.gevent_pool.Communicator._send
return self._dispatch(self._send_local, self._send_process, self._send_remote, actor_ref,
File "mars/actors/pool/gevent_pool.pyx", line 677, in mars.actors.pool.gevent_pool.Communicator._dispatch
return redirect_func(send_to_index, *args, **kwargs)
File "mars/actors/pool/gevent_pool.pyx", line 748, in mars.actors.pool.gevent_pool.Communicator._send_process
return self.submit(message_id)
File "mars/actors/pool/gevent_pool.pyx", line 311, in mars.actors.pool.gevent_pool.AsyncHandler.submit
return ar.result()
File "src/gevent/event.py", line 383, in gevent._gevent_cevent.AsyncResult.result
File "src/gevent/event.py", line 305, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 323, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 303, in gevent._gevent_cevent.AsyncResult._raise_exception
File "/opt/conda/lib/python3.7/site-packages/gevent/_compat.py", line 65, in reraise
raise value.with_traceback(tb)
File "mars/actors/pool/gevent_pool.pyx", line 977, in mars.actors.pool.gevent_pool.Communicator._on_receive_tell
actor_ctx = self.pool.get_actor_execution_ctx(message.actor_ref.uid)
File "mars/actors/pool/gevent_pool.pyx", line 259, in mars.actors.pool.gevent_pool.LocalActorPool.get_actor_execution_ctx
cpdef ActorExecutionContext get_actor_execution_ctx(self, object actor_uid):
File "mars/actors/pool/gevent_pool.pyx", line 263, in mars.actors.pool.gevent_pool.LocalActorPool.get_actor_execution_ctx
raise ActorNotExist('Actor {0} does not exist'.format(actor_uid))
mars.actors.errors.ActorNotExist: Actor w:8:mars-cpu-calc-backup-241-268bbd7e0aab350b99e6354b286f52ab-1492 does not exist
2020-08-25T07:08:35Z <Greenlet at 0x7fceb54cd830: <built-in method result of mars.actors.pool.gevent_pool.MessageContext object at 0x7fceb5b88730>> failed with ActorNotExist
|
mars.actors.errors.ActorNotExist
|
def post_create(self):
super().post_create()
from .status import StatusActor
self._status_ref = self.ctx.actor_ref(StatusActor.default_uid())
if not self.ctx.has_actor(self._status_ref):
self._status_ref = None
|
def post_create(self):
super().post_create()
try:
self.set_cluster_info_ref()
except ActorNotExist:
pass
from .status import StatusActor
self._status_ref = self.ctx.actor_ref(StatusActor.default_uid())
if not self.ctx.has_actor(self._status_ref):
self._status_ref = None
|
https://github.com/mars-project/mars/issues/1524
|
Traceback (most recent call last):
File "src/gevent/greenlet.py", line 854, in gevent._gevent_cgreenlet.Greenlet.run
File "mars/actors/pool/gevent_pool.pyx", line 70, in mars.actors.pool.gevent_pool.MessageContext.result
cpdef result(self):
File "mars/actors/pool/gevent_pool.pyx", line 71, in mars.actors.pool.gevent_pool.MessageContext.result
return self.async_result.result()
File "src/gevent/event.py", line 383, in gevent._gevent_cevent.AsyncResult.result
File "src/gevent/event.py", line 305, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 335, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 323, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 303, in gevent._gevent_cevent.AsyncResult._raise_exception
File "/opt/conda/lib/python3.7/site-packages/gevent/_compat.py", line 65, in reraise
raise value.with_traceback(tb)
File "mars/actors/pool/gevent_pool.pyx", line 94, in mars.actors.pool.gevent_pool.ActorExecutionContext.fire_run
res = actor.on_receive(message_ctx.message)
File "mars/actors/core.pyx", line 112, in mars.actors.core._FunctionActor.on_receive
File "mars/actors/core.pyx", line 114, in mars.actors.core._FunctionActor.on_receive
File "/home/admin/work/_public-mars-0.4.5.zip/mars/cluster_info.py", line 147, in set_schedulers
getattr(observer_ref, fun_name)(schedulers, _tell=True)
File "mars/actors/core.pyx", line 63, in mars.actors.core.ActorRef.__getattr__._mt_call
File "mars/actors/core.pyx", line 41, in mars.actors.core.ActorRef.tell
File "mars/actors/pool/gevent_pool.pyx", line 204, in mars.actors.pool.gevent_pool.ActorContext.tell
return self._comm.tell(actor_ref, message, delay=delay,
File "mars/actors/pool/gevent_pool.pyx", line 775, in mars.actors.pool.gevent_pool.Communicator.tell
cpdef tell(self, ActorRef actor_ref, object message, object delay=None,
File "mars/actors/pool/gevent_pool.pyx", line 781, in mars.actors.pool.gevent_pool.Communicator.tell
return self._send(actor_ref, message, wait_response=False, wait=wait, callback=callback)
File "mars/actors/pool/gevent_pool.pyx", line 768, in mars.actors.pool.gevent_pool.Communicator._send
return self._dispatch(self._send_local, self._send_process, self._send_remote, actor_ref,
File "mars/actors/pool/gevent_pool.pyx", line 677, in mars.actors.pool.gevent_pool.Communicator._dispatch
return redirect_func(send_to_index, *args, **kwargs)
File "mars/actors/pool/gevent_pool.pyx", line 748, in mars.actors.pool.gevent_pool.Communicator._send_process
return self.submit(message_id)
File "mars/actors/pool/gevent_pool.pyx", line 311, in mars.actors.pool.gevent_pool.AsyncHandler.submit
return ar.result()
File "src/gevent/event.py", line 383, in gevent._gevent_cevent.AsyncResult.result
File "src/gevent/event.py", line 305, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 323, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 303, in gevent._gevent_cevent.AsyncResult._raise_exception
File "/opt/conda/lib/python3.7/site-packages/gevent/_compat.py", line 65, in reraise
raise value.with_traceback(tb)
File "mars/actors/pool/gevent_pool.pyx", line 977, in mars.actors.pool.gevent_pool.Communicator._on_receive_tell
actor_ctx = self.pool.get_actor_execution_ctx(message.actor_ref.uid)
File "mars/actors/pool/gevent_pool.pyx", line 259, in mars.actors.pool.gevent_pool.LocalActorPool.get_actor_execution_ctx
cpdef ActorExecutionContext get_actor_execution_ctx(self, object actor_uid):
File "mars/actors/pool/gevent_pool.pyx", line 263, in mars.actors.pool.gevent_pool.LocalActorPool.get_actor_execution_ctx
raise ActorNotExist('Actor {0} does not exist'.format(actor_uid))
mars.actors.errors.ActorNotExist: Actor w:8:mars-cpu-calc-backup-241-268bbd7e0aab350b99e6354b286f52ab-1492 does not exist
2020-08-25T07:08:35Z <Greenlet at 0x7fceb54cd830: <built-in method result of mars.actors.pool.gevent_pool.MessageContext object at 0x7fceb5b88730>> failed with ActorNotExist
|
mars.actors.errors.ActorNotExist
|
def post_create(self):
from .daemon import WorkerDaemonActor
from .dispatcher import DispatchActor
from .quota import MemQuotaActor
from .status import StatusActor
super().post_create()
self._dispatch_ref = self.promise_ref(DispatchActor.default_uid())
self._mem_quota_ref = self.promise_ref(MemQuotaActor.default_uid())
self._daemon_ref = self.ctx.actor_ref(WorkerDaemonActor.default_uid())
if not self.ctx.has_actor(self._daemon_ref):
self._daemon_ref = None
else:
self.register_actors_down_handler()
self._status_ref = self.ctx.actor_ref(StatusActor.default_uid())
if not self.ctx.has_actor(self._status_ref):
self._status_ref = None
self._receiver_manager_ref = self.ctx.actor_ref(ReceiverManagerActor.default_uid())
if not self.ctx.has_actor(self._receiver_manager_ref):
self._receiver_manager_ref = None
else:
self._receiver_manager_ref = self.promise_ref(self._receiver_manager_ref)
from ..scheduler import ResourceActor
self._resource_ref = self.get_actor_ref(ResourceActor.default_uid())
self.periodical_dump()
|
def post_create(self):
from .daemon import WorkerDaemonActor
from .dispatcher import DispatchActor
from .quota import MemQuotaActor
from .status import StatusActor
super().post_create()
self.set_cluster_info_ref()
self._dispatch_ref = self.promise_ref(DispatchActor.default_uid())
self._mem_quota_ref = self.promise_ref(MemQuotaActor.default_uid())
self._daemon_ref = self.ctx.actor_ref(WorkerDaemonActor.default_uid())
if not self.ctx.has_actor(self._daemon_ref):
self._daemon_ref = None
else:
self.register_actors_down_handler()
self._status_ref = self.ctx.actor_ref(StatusActor.default_uid())
if not self.ctx.has_actor(self._status_ref):
self._status_ref = None
self._receiver_manager_ref = self.ctx.actor_ref(ReceiverManagerActor.default_uid())
if not self.ctx.has_actor(self._receiver_manager_ref):
self._receiver_manager_ref = None
else:
self._receiver_manager_ref = self.promise_ref(self._receiver_manager_ref)
from ..scheduler import ResourceActor
self._resource_ref = self.get_actor_ref(ResourceActor.default_uid())
self.periodical_dump()
|
https://github.com/mars-project/mars/issues/1524
|
Traceback (most recent call last):
File "src/gevent/greenlet.py", line 854, in gevent._gevent_cgreenlet.Greenlet.run
File "mars/actors/pool/gevent_pool.pyx", line 70, in mars.actors.pool.gevent_pool.MessageContext.result
cpdef result(self):
File "mars/actors/pool/gevent_pool.pyx", line 71, in mars.actors.pool.gevent_pool.MessageContext.result
return self.async_result.result()
File "src/gevent/event.py", line 383, in gevent._gevent_cevent.AsyncResult.result
File "src/gevent/event.py", line 305, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 335, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 323, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 303, in gevent._gevent_cevent.AsyncResult._raise_exception
File "/opt/conda/lib/python3.7/site-packages/gevent/_compat.py", line 65, in reraise
raise value.with_traceback(tb)
File "mars/actors/pool/gevent_pool.pyx", line 94, in mars.actors.pool.gevent_pool.ActorExecutionContext.fire_run
res = actor.on_receive(message_ctx.message)
File "mars/actors/core.pyx", line 112, in mars.actors.core._FunctionActor.on_receive
File "mars/actors/core.pyx", line 114, in mars.actors.core._FunctionActor.on_receive
File "/home/admin/work/_public-mars-0.4.5.zip/mars/cluster_info.py", line 147, in set_schedulers
getattr(observer_ref, fun_name)(schedulers, _tell=True)
File "mars/actors/core.pyx", line 63, in mars.actors.core.ActorRef.__getattr__._mt_call
File "mars/actors/core.pyx", line 41, in mars.actors.core.ActorRef.tell
File "mars/actors/pool/gevent_pool.pyx", line 204, in mars.actors.pool.gevent_pool.ActorContext.tell
return self._comm.tell(actor_ref, message, delay=delay,
File "mars/actors/pool/gevent_pool.pyx", line 775, in mars.actors.pool.gevent_pool.Communicator.tell
cpdef tell(self, ActorRef actor_ref, object message, object delay=None,
File "mars/actors/pool/gevent_pool.pyx", line 781, in mars.actors.pool.gevent_pool.Communicator.tell
return self._send(actor_ref, message, wait_response=False, wait=wait, callback=callback)
File "mars/actors/pool/gevent_pool.pyx", line 768, in mars.actors.pool.gevent_pool.Communicator._send
return self._dispatch(self._send_local, self._send_process, self._send_remote, actor_ref,
File "mars/actors/pool/gevent_pool.pyx", line 677, in mars.actors.pool.gevent_pool.Communicator._dispatch
return redirect_func(send_to_index, *args, **kwargs)
File "mars/actors/pool/gevent_pool.pyx", line 748, in mars.actors.pool.gevent_pool.Communicator._send_process
return self.submit(message_id)
File "mars/actors/pool/gevent_pool.pyx", line 311, in mars.actors.pool.gevent_pool.AsyncHandler.submit
return ar.result()
File "src/gevent/event.py", line 383, in gevent._gevent_cevent.AsyncResult.result
File "src/gevent/event.py", line 305, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 323, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 303, in gevent._gevent_cevent.AsyncResult._raise_exception
File "/opt/conda/lib/python3.7/site-packages/gevent/_compat.py", line 65, in reraise
raise value.with_traceback(tb)
File "mars/actors/pool/gevent_pool.pyx", line 977, in mars.actors.pool.gevent_pool.Communicator._on_receive_tell
actor_ctx = self.pool.get_actor_execution_ctx(message.actor_ref.uid)
File "mars/actors/pool/gevent_pool.pyx", line 259, in mars.actors.pool.gevent_pool.LocalActorPool.get_actor_execution_ctx
cpdef ActorExecutionContext get_actor_execution_ctx(self, object actor_uid):
File "mars/actors/pool/gevent_pool.pyx", line 263, in mars.actors.pool.gevent_pool.LocalActorPool.get_actor_execution_ctx
raise ActorNotExist('Actor {0} does not exist'.format(actor_uid))
mars.actors.errors.ActorNotExist: Actor w:8:mars-cpu-calc-backup-241-268bbd7e0aab350b99e6354b286f52ab-1492 does not exist
2020-08-25T07:08:35Z <Greenlet at 0x7fceb54cd830: <built-in method result of mars.actors.pool.gevent_pool.MessageContext object at 0x7fceb5b88730>> failed with ActorNotExist
|
mars.actors.errors.ActorNotExist
|
def decide_dataframe_chunk_sizes(shape, chunk_size, memory_usage):
"""
Decide how a given DataFrame can be split into chunk.
:param shape: DataFrame's shape
:param chunk_size: if dict provided, it's dimension id to chunk size;
if provided, it's the chunk size for each dimension.
:param memory_usage: pandas Series in which each column's memory usage
:type memory_usage: pandas.Series
:return: the calculated chunk size for each dimension
:rtype: tuple
"""
from ..config import options
chunk_size = dictify_chunk_size(shape, chunk_size)
average_memory_usage = memory_usage / shape[0]
nleft = len(shape) - len(chunk_size)
if nleft < 0:
raise ValueError("chunks have more than two dimensions")
if nleft == 0:
return normalize_chunk_sizes(
shape, tuple(chunk_size[j] for j in range(len(shape)))
)
max_chunk_size = options.chunk_store_limit
# for the row side, along axis 0
if 0 not in chunk_size:
row_chunk_size = []
row_left_size = shape[0]
else:
row_chunk_size = normalize_chunk_sizes((shape[0],), (chunk_size[0],))[0]
row_left_size = -1
# for the column side, along axis 1
if 1 not in chunk_size:
col_chunk_size = []
col_chunk_store = []
col_left_size = shape[1]
else:
col_chunk_size = normalize_chunk_sizes((shape[1],), (chunk_size[1],))[0]
acc = [0] + np.cumsum(col_chunk_size).tolist()
col_chunk_store = [
average_memory_usage[acc[i] : acc[i + 1]].sum()
for i in range(len(col_chunk_size))
]
col_left_size = -1
while True:
nbytes_occupied = np.prod(
[max(it) for it in (row_chunk_size, col_chunk_store) if it]
)
dim_size = np.maximum(
int(np.power(max_chunk_size / nbytes_occupied, 1 / float(nleft))), 1
)
if col_left_size == 0:
col_chunk_size.append(0)
if row_left_size == 0:
row_chunk_size.append(0)
# check col first
if col_left_size > 0:
cs = min(col_left_size, dim_size)
col_chunk_size.append(cs)
start = int(np.sum(col_chunk_size[:-1]))
col_chunk_store.append(average_memory_usage.iloc[start : start + cs].sum())
col_left_size -= cs
if row_left_size > 0:
max_col_chunk_store = max(col_chunk_store)
cs = min(row_left_size, int(max_chunk_size / max_col_chunk_store))
row_chunk_size.append(cs)
row_left_size -= cs
if col_left_size <= 0 and row_left_size <= 0:
break
return tuple(row_chunk_size), tuple(col_chunk_size)
|
def decide_dataframe_chunk_sizes(shape, chunk_size, memory_usage):
"""
Decide how a given DataFrame can be split into chunk.
:param shape: DataFrame's shape
:param chunk_size: if dict provided, it's dimension id to chunk size;
if provided, it's the chunk size for each dimension.
:param memory_usage: pandas Series in which each column's memory usage
:type memory_usage: pandas.Series
:return: the calculated chunk size for each dimension
:rtype: tuple
"""
from ..config import options
chunk_size = dictify_chunk_size(shape, chunk_size)
average_memory_usage = memory_usage / shape[0]
nleft = len(shape) - len(chunk_size)
if nleft < 0:
raise ValueError("chunks have more than two dimensions")
if nleft == 0:
return normalize_chunk_sizes(
shape, tuple(chunk_size[j] for j in range(len(shape)))
)
max_chunk_size = options.chunk_store_limit
# for the row side, along axis 0
if 0 not in chunk_size:
row_chunk_size = []
row_left_size = shape[0]
else:
row_chunk_size = normalize_chunk_sizes((shape[0],), (chunk_size[0],))[0]
row_left_size = 0
# for the column side, along axis 1
if 1 not in chunk_size:
col_chunk_size = []
col_chunk_store = []
col_left_size = shape[1]
else:
col_chunk_size = normalize_chunk_sizes((shape[1],), (chunk_size[1],))[0]
acc = [0] + np.cumsum(col_chunk_size).tolist()
col_chunk_store = [
average_memory_usage[acc[i] : acc[i + 1]].sum()
for i in range(len(col_chunk_size))
]
col_left_size = 0
while True:
nbytes_occupied = np.prod(
[max(it) for it in (row_chunk_size, col_chunk_store) if it]
)
dim_size = np.maximum(
int(np.power(max_chunk_size / nbytes_occupied, 1 / float(nleft))), 1
)
# check col first
if col_left_size > 0:
cs = min(col_left_size, dim_size)
col_chunk_size.append(cs)
start = int(np.sum(col_chunk_size[:-1]))
col_chunk_store.append(average_memory_usage.iloc[start : start + cs].sum())
col_left_size -= cs
if row_left_size > 0:
max_col_chunk_store = max(col_chunk_store)
cs = min(row_left_size, int(max_chunk_size / max_col_chunk_store))
row_chunk_size.append(cs)
row_left_size -= cs
if col_left_size == 0 and row_left_size == 0:
break
return tuple(row_chunk_size), tuple(col_chunk_size)
|
https://github.com/mars-project/mars/issues/1521
|
In [1]: import pandas as pd
In [2]: a = pd.DataFrame(columns=list('ab'))
In [3]: import mars.dataframe as md
In [4]: md.DataFrame(a).iloc[:2].execute()
---------------------------------------------------------------------------
StopIteration Traceback (most recent call last)
<ipython-input-4-426db4e594a4> in <module>
----> 1 md.DataFrame(a).iloc[:2].execute()
~/Documents/mars_dev/mars/mars/core.py in execute(self, session, **kw)
576
577 def execute(self, session=None, **kw):
--> 578 self._data.execute(session, **kw)
579 return self
580
~/Documents/mars_dev/mars/mars/core.py in execute(self, session, **kw)
364
365 # no more fetch, thus just fire run
--> 366 session.run(self, **kw)
367 # return Tileable or ExecutableTuple itself
368 return self
~/Documents/mars_dev/mars/mars/session.py in run(self, *tileables, **kw)
478 tileables = tuple(mt.tensor(t) if not isinstance(t, (Entity, Base)) else t
479 for t in tileables)
--> 480 result = self._sess.run(*tileables, **kw)
481
482 for t in tileables:
~/Documents/mars_dev/mars/mars/session.py in run(self, *tileables, **kw)
105 # set number of running cores
106 self.context.set_ncores(kw['n_parallel'])
--> 107 res = self._executor.execute_tileables(tileables, **kw)
108 return res
109
~/Documents/mars_dev/mars/mars/utils.py in _wrapped(*args, **kwargs)
406 _kernel_mode.eager = False
407 _kernel_mode.eager_count = enter_eager_count + 1
--> 408 return func(*args, **kwargs)
409 finally:
410 _kernel_mode.eager_count -= 1
~/Documents/mars_dev/mars/mars/utils.py in inner(*args, **kwargs)
500 def inner(*args, **kwargs):
501 with build_mode():
--> 502 return func(*args, **kwargs)
503 return inner
504
~/Documents/mars_dev/mars/mars/executor.py in execute_tileables(self, tileables, fetch, n_parallel, n_thread, print_progress, mock, compose, name)
862 # build chunk graph, tile will be done during building
863 chunk_graph = chunk_graph_builder.build(
--> 864 tileables, tileable_graph=tileable_graph)
865 tileable_graph = chunk_graph_builder.prev_tileable_graph
866 temp_result_keys = set(result_keys)
~/Documents/mars_dev/mars/mars/utils.py in _wrapped(*args, **kwargs)
406 _kernel_mode.eager = False
407 _kernel_mode.eager_count = enter_eager_count + 1
--> 408 return func(*args, **kwargs)
409 finally:
410 _kernel_mode.eager_count -= 1
~/Documents/mars_dev/mars/mars/utils.py in inner(*args, **kwargs)
500 def inner(*args, **kwargs):
501 with build_mode():
--> 502 return func(*args, **kwargs)
503 return inner
504
~/Documents/mars_dev/mars/mars/tiles.py in build(self, tileables, tileable_graph)
348
349 chunk_graph = super().build(
--> 350 tileables, tileable_graph=tileable_graph)
351 self._iterative_chunk_graphs.append(chunk_graph)
352 if len(self._interrupted_ops) == 0:
~/Documents/mars_dev/mars/mars/utils.py in _wrapped(*args, **kwargs)
406 _kernel_mode.eager = False
407 _kernel_mode.eager_count = enter_eager_count + 1
--> 408 return func(*args, **kwargs)
409 finally:
410 _kernel_mode.eager_count -= 1
~/Documents/mars_dev/mars/mars/utils.py in inner(*args, **kwargs)
500 def inner(*args, **kwargs):
501 with build_mode():
--> 502 return func(*args, **kwargs)
503 return inner
504
~/Documents/mars_dev/mars/mars/tiles.py in build(self, tileables, tileable_graph)
261 # for further execution
262 partial_tiled_chunks = \
--> 263 self._on_tile_failure(tileable_data.op, exc_info)
264 if partial_tiled_chunks is not None and \
265 len(partial_tiled_chunks) > 0:
~/Documents/mars_dev/mars/mars/tiles.py in inner(op, exc_info)
300 on_tile_failure(op, exc_info)
301 else:
--> 302 raise exc_info[1].with_traceback(exc_info[2]) from None
303 return inner
304
~/Documents/mars_dev/mars/mars/tiles.py in build(self, tileables, tileable_graph)
241 continue
242 try:
--> 243 tiled = self._tile(tileable_data, tileable_graph)
244 tiled_op.add(tileable_data.op)
245 for t, td in zip(tileable_data.op.outputs, tiled):
~/Documents/mars_dev/mars/mars/tiles.py in _tile(self, tileable_data, tileable_graph)
336 if any(inp.op in self._interrupted_ops for inp in tileable_data.inputs):
337 raise TilesError('Tile fail due to failure of inputs')
--> 338 return super()._tile(tileable_data, tileable_graph)
339
340 @kernel_mode
~/Documents/mars_dev/mars/mars/tiles.py in _tile(self, tileable_data, tileable_graph)
199 t._nsplits = o.nsplits
200 elif on_tile is None:
--> 201 tds[0]._inplace_tile()
202 else:
203 tds = on_tile(tileable_data.op.outputs, tds)
~/Documents/mars_dev/mars/mars/core.py in _inplace_tile(self)
161
162 def _inplace_tile(self):
--> 163 return handler.inplace_tile(self)
164
165 def __getattr__(self, attr):
~/Documents/mars_dev/mars/mars/tiles.py in inplace_tile(self, to_tile)
134 if not to_tile.is_coarse():
135 return to_tile
--> 136 dispatched = self.dispatch(to_tile.op)
137 self._assign_to([d.data for d in dispatched], to_tile.op.outputs)
138 return to_tile
~/Documents/mars_dev/mars/mars/utils.py in _wrapped(*args, **kwargs)
406 _kernel_mode.eager = False
407 _kernel_mode.eager_count = enter_eager_count + 1
--> 408 return func(*args, **kwargs)
409 finally:
410 _kernel_mode.eager_count -= 1
~/Documents/mars_dev/mars/mars/tiles.py in dispatch(self, op)
117 else:
118 try:
--> 119 tiled = op_cls.tile(op)
120 except NotImplementedError as ex:
121 cause = ex
~/Documents/mars_dev/mars/mars/dataframe/indexing/iloc.py in tile(cls, op)
339
340 handler = DataFrameIlocIndexesHandler()
--> 341 return [handler.handle(op)]
342
343 @classmethod
~/Documents/mars_dev/mars/mars/tensor/indexing/index_lib.py in handle(self, op, return_context)
896
897 self._preprocess(context, index_infos)
--> 898 self._process(context, index_infos)
899 self._postprocess(context, index_infos)
900
~/Documents/mars_dev/mars/mars/tensor/indexing/index_lib.py in _process(cls, context, index_infos)
923 index_to_shape = OrderedDict(sorted([(c.index, c.shape) for c in out_chunks],
924 key=itemgetter(0)))
--> 925 context.out_nsplits = calc_nsplits(index_to_shape)
926
927 @classmethod
~/Documents/mars_dev/mars/mars/utils.py in calc_nsplits(chunk_idx_to_shape)
664 :return: nsplits
665 """
--> 666 ndim = len(next(iter(chunk_idx_to_shape)))
667 tileable_nsplits = []
668 # for each dimension, record chunk shape whose index is zero on other dimensions
StopIteration:
|
TilesError
|
def decide_series_chunk_size(shape, chunk_size, memory_usage):
from ..config import options
chunk_size = dictify_chunk_size(shape, chunk_size)
average_memory_usage = memory_usage / shape[0] if shape[0] != 0 else memory_usage
if len(chunk_size) == len(shape):
return normalize_chunk_sizes(shape, chunk_size[0])
max_chunk_size = options.chunk_store_limit
series_chunk_size = max_chunk_size / average_memory_usage
return normalize_chunk_sizes(shape, int(series_chunk_size))
|
def decide_series_chunk_size(shape, chunk_size, memory_usage):
from ..config import options
chunk_size = dictify_chunk_size(shape, chunk_size)
average_memory_usage = memory_usage / shape[0]
if len(chunk_size) == len(shape):
return normalize_chunk_sizes(shape, chunk_size[0])
max_chunk_size = options.chunk_store_limit
series_chunk_size = max_chunk_size / average_memory_usage
return normalize_chunk_sizes(shape, int(series_chunk_size))
|
https://github.com/mars-project/mars/issues/1521
|
In [1]: import pandas as pd
In [2]: a = pd.DataFrame(columns=list('ab'))
In [3]: import mars.dataframe as md
In [4]: md.DataFrame(a).iloc[:2].execute()
---------------------------------------------------------------------------
StopIteration Traceback (most recent call last)
<ipython-input-4-426db4e594a4> in <module>
----> 1 md.DataFrame(a).iloc[:2].execute()
~/Documents/mars_dev/mars/mars/core.py in execute(self, session, **kw)
576
577 def execute(self, session=None, **kw):
--> 578 self._data.execute(session, **kw)
579 return self
580
~/Documents/mars_dev/mars/mars/core.py in execute(self, session, **kw)
364
365 # no more fetch, thus just fire run
--> 366 session.run(self, **kw)
367 # return Tileable or ExecutableTuple itself
368 return self
~/Documents/mars_dev/mars/mars/session.py in run(self, *tileables, **kw)
478 tileables = tuple(mt.tensor(t) if not isinstance(t, (Entity, Base)) else t
479 for t in tileables)
--> 480 result = self._sess.run(*tileables, **kw)
481
482 for t in tileables:
~/Documents/mars_dev/mars/mars/session.py in run(self, *tileables, **kw)
105 # set number of running cores
106 self.context.set_ncores(kw['n_parallel'])
--> 107 res = self._executor.execute_tileables(tileables, **kw)
108 return res
109
~/Documents/mars_dev/mars/mars/utils.py in _wrapped(*args, **kwargs)
406 _kernel_mode.eager = False
407 _kernel_mode.eager_count = enter_eager_count + 1
--> 408 return func(*args, **kwargs)
409 finally:
410 _kernel_mode.eager_count -= 1
~/Documents/mars_dev/mars/mars/utils.py in inner(*args, **kwargs)
500 def inner(*args, **kwargs):
501 with build_mode():
--> 502 return func(*args, **kwargs)
503 return inner
504
~/Documents/mars_dev/mars/mars/executor.py in execute_tileables(self, tileables, fetch, n_parallel, n_thread, print_progress, mock, compose, name)
862 # build chunk graph, tile will be done during building
863 chunk_graph = chunk_graph_builder.build(
--> 864 tileables, tileable_graph=tileable_graph)
865 tileable_graph = chunk_graph_builder.prev_tileable_graph
866 temp_result_keys = set(result_keys)
~/Documents/mars_dev/mars/mars/utils.py in _wrapped(*args, **kwargs)
406 _kernel_mode.eager = False
407 _kernel_mode.eager_count = enter_eager_count + 1
--> 408 return func(*args, **kwargs)
409 finally:
410 _kernel_mode.eager_count -= 1
~/Documents/mars_dev/mars/mars/utils.py in inner(*args, **kwargs)
500 def inner(*args, **kwargs):
501 with build_mode():
--> 502 return func(*args, **kwargs)
503 return inner
504
~/Documents/mars_dev/mars/mars/tiles.py in build(self, tileables, tileable_graph)
348
349 chunk_graph = super().build(
--> 350 tileables, tileable_graph=tileable_graph)
351 self._iterative_chunk_graphs.append(chunk_graph)
352 if len(self._interrupted_ops) == 0:
~/Documents/mars_dev/mars/mars/utils.py in _wrapped(*args, **kwargs)
406 _kernel_mode.eager = False
407 _kernel_mode.eager_count = enter_eager_count + 1
--> 408 return func(*args, **kwargs)
409 finally:
410 _kernel_mode.eager_count -= 1
~/Documents/mars_dev/mars/mars/utils.py in inner(*args, **kwargs)
500 def inner(*args, **kwargs):
501 with build_mode():
--> 502 return func(*args, **kwargs)
503 return inner
504
~/Documents/mars_dev/mars/mars/tiles.py in build(self, tileables, tileable_graph)
261 # for further execution
262 partial_tiled_chunks = \
--> 263 self._on_tile_failure(tileable_data.op, exc_info)
264 if partial_tiled_chunks is not None and \
265 len(partial_tiled_chunks) > 0:
~/Documents/mars_dev/mars/mars/tiles.py in inner(op, exc_info)
300 on_tile_failure(op, exc_info)
301 else:
--> 302 raise exc_info[1].with_traceback(exc_info[2]) from None
303 return inner
304
~/Documents/mars_dev/mars/mars/tiles.py in build(self, tileables, tileable_graph)
241 continue
242 try:
--> 243 tiled = self._tile(tileable_data, tileable_graph)
244 tiled_op.add(tileable_data.op)
245 for t, td in zip(tileable_data.op.outputs, tiled):
~/Documents/mars_dev/mars/mars/tiles.py in _tile(self, tileable_data, tileable_graph)
336 if any(inp.op in self._interrupted_ops for inp in tileable_data.inputs):
337 raise TilesError('Tile fail due to failure of inputs')
--> 338 return super()._tile(tileable_data, tileable_graph)
339
340 @kernel_mode
~/Documents/mars_dev/mars/mars/tiles.py in _tile(self, tileable_data, tileable_graph)
199 t._nsplits = o.nsplits
200 elif on_tile is None:
--> 201 tds[0]._inplace_tile()
202 else:
203 tds = on_tile(tileable_data.op.outputs, tds)
~/Documents/mars_dev/mars/mars/core.py in _inplace_tile(self)
161
162 def _inplace_tile(self):
--> 163 return handler.inplace_tile(self)
164
165 def __getattr__(self, attr):
~/Documents/mars_dev/mars/mars/tiles.py in inplace_tile(self, to_tile)
134 if not to_tile.is_coarse():
135 return to_tile
--> 136 dispatched = self.dispatch(to_tile.op)
137 self._assign_to([d.data for d in dispatched], to_tile.op.outputs)
138 return to_tile
~/Documents/mars_dev/mars/mars/utils.py in _wrapped(*args, **kwargs)
406 _kernel_mode.eager = False
407 _kernel_mode.eager_count = enter_eager_count + 1
--> 408 return func(*args, **kwargs)
409 finally:
410 _kernel_mode.eager_count -= 1
~/Documents/mars_dev/mars/mars/tiles.py in dispatch(self, op)
117 else:
118 try:
--> 119 tiled = op_cls.tile(op)
120 except NotImplementedError as ex:
121 cause = ex
~/Documents/mars_dev/mars/mars/dataframe/indexing/iloc.py in tile(cls, op)
339
340 handler = DataFrameIlocIndexesHandler()
--> 341 return [handler.handle(op)]
342
343 @classmethod
~/Documents/mars_dev/mars/mars/tensor/indexing/index_lib.py in handle(self, op, return_context)
896
897 self._preprocess(context, index_infos)
--> 898 self._process(context, index_infos)
899 self._postprocess(context, index_infos)
900
~/Documents/mars_dev/mars/mars/tensor/indexing/index_lib.py in _process(cls, context, index_infos)
923 index_to_shape = OrderedDict(sorted([(c.index, c.shape) for c in out_chunks],
924 key=itemgetter(0)))
--> 925 context.out_nsplits = calc_nsplits(index_to_shape)
926
927 @classmethod
~/Documents/mars_dev/mars/mars/utils.py in calc_nsplits(chunk_idx_to_shape)
664 :return: nsplits
665 """
--> 666 ndim = len(next(iter(chunk_idx_to_shape)))
667 tileable_nsplits = []
668 # for each dimension, record chunk shape whose index is zero on other dimensions
StopIteration:
|
TilesError
|
def normalize_chunk_sizes(shape, chunk_size):
shape = normalize_shape(shape)
if not isinstance(chunk_size, tuple):
if isinstance(chunk_size, Iterable):
chunk_size = tuple(chunk_size)
elif isinstance(chunk_size, int):
chunk_size = (chunk_size,) * len(shape)
if len(shape) != len(chunk_size):
raise ValueError(
"Chunks must have the same dimemsion, "
f"got shape: {shape}, chunks: {chunk_size}"
)
chunk_sizes = []
for size, chunk in zip(shape, chunk_size):
if isinstance(chunk, Iterable):
if not isinstance(chunk, tuple):
chunk = tuple(chunk)
if sum(chunk) != size:
raise ValueError(
"chunks shape should be of the same length, "
f"got shape: {size}, chunks: {chunk}"
)
chunk_sizes.append(chunk)
else:
assert isinstance(chunk, int)
if size == 0:
sizes = (0,)
else:
sizes = tuple(chunk for _ in range(int(size / chunk))) + (
tuple() if size % chunk == 0 else (size % chunk,)
)
chunk_sizes.append(sizes)
return tuple(chunk_sizes)
|
def normalize_chunk_sizes(shape, chunk_size):
shape = normalize_shape(shape)
if not isinstance(chunk_size, tuple):
if isinstance(chunk_size, Iterable):
chunk_size = tuple(chunk_size)
elif isinstance(chunk_size, int):
chunk_size = (chunk_size,) * len(shape)
if len(shape) != len(chunk_size):
raise ValueError(
"Chunks must have the same dimemsion, "
f"got shape: {shape}, chunks: {chunk_size}"
)
chunk_sizes = []
for size, chunk in zip(shape, chunk_size):
if isinstance(chunk, Iterable):
if not isinstance(chunk, tuple):
chunk = tuple(chunk)
if sum(chunk) != size:
raise ValueError(
"chunks shape should be of the same length, "
f"got shape: {size}, chunks: {chunk}"
)
chunk_sizes.append(chunk)
else:
assert isinstance(chunk, int)
sizes = tuple(chunk for _ in range(int(size / chunk))) + (
tuple() if size % chunk == 0 else (size % chunk,)
)
chunk_sizes.append(sizes)
return tuple(chunk_sizes)
|
https://github.com/mars-project/mars/issues/1521
|
In [1]: import pandas as pd
In [2]: a = pd.DataFrame(columns=list('ab'))
In [3]: import mars.dataframe as md
In [4]: md.DataFrame(a).iloc[:2].execute()
---------------------------------------------------------------------------
StopIteration Traceback (most recent call last)
<ipython-input-4-426db4e594a4> in <module>
----> 1 md.DataFrame(a).iloc[:2].execute()
~/Documents/mars_dev/mars/mars/core.py in execute(self, session, **kw)
576
577 def execute(self, session=None, **kw):
--> 578 self._data.execute(session, **kw)
579 return self
580
~/Documents/mars_dev/mars/mars/core.py in execute(self, session, **kw)
364
365 # no more fetch, thus just fire run
--> 366 session.run(self, **kw)
367 # return Tileable or ExecutableTuple itself
368 return self
~/Documents/mars_dev/mars/mars/session.py in run(self, *tileables, **kw)
478 tileables = tuple(mt.tensor(t) if not isinstance(t, (Entity, Base)) else t
479 for t in tileables)
--> 480 result = self._sess.run(*tileables, **kw)
481
482 for t in tileables:
~/Documents/mars_dev/mars/mars/session.py in run(self, *tileables, **kw)
105 # set number of running cores
106 self.context.set_ncores(kw['n_parallel'])
--> 107 res = self._executor.execute_tileables(tileables, **kw)
108 return res
109
~/Documents/mars_dev/mars/mars/utils.py in _wrapped(*args, **kwargs)
406 _kernel_mode.eager = False
407 _kernel_mode.eager_count = enter_eager_count + 1
--> 408 return func(*args, **kwargs)
409 finally:
410 _kernel_mode.eager_count -= 1
~/Documents/mars_dev/mars/mars/utils.py in inner(*args, **kwargs)
500 def inner(*args, **kwargs):
501 with build_mode():
--> 502 return func(*args, **kwargs)
503 return inner
504
~/Documents/mars_dev/mars/mars/executor.py in execute_tileables(self, tileables, fetch, n_parallel, n_thread, print_progress, mock, compose, name)
862 # build chunk graph, tile will be done during building
863 chunk_graph = chunk_graph_builder.build(
--> 864 tileables, tileable_graph=tileable_graph)
865 tileable_graph = chunk_graph_builder.prev_tileable_graph
866 temp_result_keys = set(result_keys)
~/Documents/mars_dev/mars/mars/utils.py in _wrapped(*args, **kwargs)
406 _kernel_mode.eager = False
407 _kernel_mode.eager_count = enter_eager_count + 1
--> 408 return func(*args, **kwargs)
409 finally:
410 _kernel_mode.eager_count -= 1
~/Documents/mars_dev/mars/mars/utils.py in inner(*args, **kwargs)
500 def inner(*args, **kwargs):
501 with build_mode():
--> 502 return func(*args, **kwargs)
503 return inner
504
~/Documents/mars_dev/mars/mars/tiles.py in build(self, tileables, tileable_graph)
348
349 chunk_graph = super().build(
--> 350 tileables, tileable_graph=tileable_graph)
351 self._iterative_chunk_graphs.append(chunk_graph)
352 if len(self._interrupted_ops) == 0:
~/Documents/mars_dev/mars/mars/utils.py in _wrapped(*args, **kwargs)
406 _kernel_mode.eager = False
407 _kernel_mode.eager_count = enter_eager_count + 1
--> 408 return func(*args, **kwargs)
409 finally:
410 _kernel_mode.eager_count -= 1
~/Documents/mars_dev/mars/mars/utils.py in inner(*args, **kwargs)
500 def inner(*args, **kwargs):
501 with build_mode():
--> 502 return func(*args, **kwargs)
503 return inner
504
~/Documents/mars_dev/mars/mars/tiles.py in build(self, tileables, tileable_graph)
261 # for further execution
262 partial_tiled_chunks = \
--> 263 self._on_tile_failure(tileable_data.op, exc_info)
264 if partial_tiled_chunks is not None and \
265 len(partial_tiled_chunks) > 0:
~/Documents/mars_dev/mars/mars/tiles.py in inner(op, exc_info)
300 on_tile_failure(op, exc_info)
301 else:
--> 302 raise exc_info[1].with_traceback(exc_info[2]) from None
303 return inner
304
~/Documents/mars_dev/mars/mars/tiles.py in build(self, tileables, tileable_graph)
241 continue
242 try:
--> 243 tiled = self._tile(tileable_data, tileable_graph)
244 tiled_op.add(tileable_data.op)
245 for t, td in zip(tileable_data.op.outputs, tiled):
~/Documents/mars_dev/mars/mars/tiles.py in _tile(self, tileable_data, tileable_graph)
336 if any(inp.op in self._interrupted_ops for inp in tileable_data.inputs):
337 raise TilesError('Tile fail due to failure of inputs')
--> 338 return super()._tile(tileable_data, tileable_graph)
339
340 @kernel_mode
~/Documents/mars_dev/mars/mars/tiles.py in _tile(self, tileable_data, tileable_graph)
199 t._nsplits = o.nsplits
200 elif on_tile is None:
--> 201 tds[0]._inplace_tile()
202 else:
203 tds = on_tile(tileable_data.op.outputs, tds)
~/Documents/mars_dev/mars/mars/core.py in _inplace_tile(self)
161
162 def _inplace_tile(self):
--> 163 return handler.inplace_tile(self)
164
165 def __getattr__(self, attr):
~/Documents/mars_dev/mars/mars/tiles.py in inplace_tile(self, to_tile)
134 if not to_tile.is_coarse():
135 return to_tile
--> 136 dispatched = self.dispatch(to_tile.op)
137 self._assign_to([d.data for d in dispatched], to_tile.op.outputs)
138 return to_tile
~/Documents/mars_dev/mars/mars/utils.py in _wrapped(*args, **kwargs)
406 _kernel_mode.eager = False
407 _kernel_mode.eager_count = enter_eager_count + 1
--> 408 return func(*args, **kwargs)
409 finally:
410 _kernel_mode.eager_count -= 1
~/Documents/mars_dev/mars/mars/tiles.py in dispatch(self, op)
117 else:
118 try:
--> 119 tiled = op_cls.tile(op)
120 except NotImplementedError as ex:
121 cause = ex
~/Documents/mars_dev/mars/mars/dataframe/indexing/iloc.py in tile(cls, op)
339
340 handler = DataFrameIlocIndexesHandler()
--> 341 return [handler.handle(op)]
342
343 @classmethod
~/Documents/mars_dev/mars/mars/tensor/indexing/index_lib.py in handle(self, op, return_context)
896
897 self._preprocess(context, index_infos)
--> 898 self._process(context, index_infos)
899 self._postprocess(context, index_infos)
900
~/Documents/mars_dev/mars/mars/tensor/indexing/index_lib.py in _process(cls, context, index_infos)
923 index_to_shape = OrderedDict(sorted([(c.index, c.shape) for c in out_chunks],
924 key=itemgetter(0)))
--> 925 context.out_nsplits = calc_nsplits(index_to_shape)
926
927 @classmethod
~/Documents/mars_dev/mars/mars/utils.py in calc_nsplits(chunk_idx_to_shape)
664 :return: nsplits
665 """
--> 666 ndim = len(next(iter(chunk_idx_to_shape)))
667 tileable_nsplits = []
668 # for each dimension, record chunk shape whose index is zero on other dimensions
StopIteration:
|
TilesError
|
def _is_sparse(cls, x1, x2):
if hasattr(x1, "issparse") and x1.issparse():
# if x1 is sparse, will be sparse always
return True
elif np.isscalar(x1) and x1 == 0:
# x1 == 0, return sparse if x2 is
return x2.issparse() if hasattr(x2, "issparse") else False
return False
|
def _is_sparse(cls, x1, x2):
# x2 is sparse or not does not matter
if hasattr(x1, "issparse") and x1.issparse() and np.isscalar(x2):
return True
elif x1 == 0:
return True
return False
|
https://github.com/mars-project/mars/issues/1500
|
vx = mt.dot((1,0,0),(0,1,0))
vy = mt.dot((1,0,0),(0,0,1))
t = mt.arctan2(vx, vy)
---------------------------------------------------------------------------
IndexError Traceback (most recent call last)
~/anaconda3/lib/python3.7/site-packages/mars/core.py in __len__(self)
533 try:
--> 534 return self.shape[0]
535 except IndexError:
IndexError: tuple index out of range
During handling of the above exception, another exception occurred:
TypeError Traceback (most recent call last)
<ipython-input-23-09c63447ea86> in <module>
----> 1 mt.arctan2(vx, vy)
~/anaconda3/lib/python3.7/site-packages/mars/tensor/utils.py in h(*tensors, **kw)
256 kw['dtype'] = dtype
257
--> 258 ret = func(*tensors, **kw)
259 if ret is NotImplemented:
260 reverse_func = getattr(inspect.getmodule(func), 'r{0}'.format(func.__name__), None) \
~/anaconda3/lib/python3.7/site-packages/mars/tensor/arithmetic/arctan2.py in arctan2(x1, x2, out, where, **kwargs)
125 """
126 op = TensorArctan2(**kwargs)
--> 127 return op(x1, x2, out=out, where=where)
~/anaconda3/lib/python3.7/site-packages/mars/tensor/arithmetic/core.py in __call__(self, x1, x2, out, where)
268
269 def __call__(self, x1, x2, out=None, where=None):
--> 270 return self._call(x1, x2, out=out, where=where)
271
272 def rcall(self, x1, x2, out=None, where=None):
~/anaconda3/lib/python3.7/site-packages/mars/tensor/arithmetic/core.py in _call(self, x1, x2, out, where)
251
252 inputs = filter_inputs([x1, x2, out, where])
--> 253 t = self.new_tensor(inputs, shape, order=order)
254
255 if out is None:
~/anaconda3/lib/python3.7/site-packages/mars/tensor/operands.py in new_tensor(self, inputs, shape, dtype, order, **kw)
77 raise TypeError('cannot new tensor with more than 1 outputs')
78
---> 79 return self.new_tensors(inputs, shape=shape, dtype=dtype, order=order, **kw)[0]
80
81 @classmethod
~/anaconda3/lib/python3.7/site-packages/mars/tensor/operands.py in new_tensors(self, inputs, shape, dtype, order, chunks, nsplits, output_limit, kws, **kw)
71 output_limit=None, kws=None, **kw):
72 return self.new_tileables(inputs, shape=shape, chunks=chunks, nsplits=nsplits,
---> 73 output_limit=output_limit, kws=kws, dtype=dtype, order=order, **kw)
74
75 def new_tensor(self, inputs, shape, dtype=None, order=None, **kw):
~/anaconda3/lib/python3.7/site-packages/mars/operands.py in new_tileables(self, inputs, kws, **kw)
352 """
353
--> 354 tileables = self._new_tileables(inputs, kws=kws, **kw)
355 if is_eager_mode():
356 ExecutableTuple(tileables).execute(fetch=False)
~/anaconda3/lib/python3.7/site-packages/mars/tensor/arithmetic/core.py in _new_tileables(self, inputs, kws, **kw)
70
71 def _new_tileables(self, inputs, kws=None, **kw):
---> 72 self._set_sparse(inputs)
73 return super()._new_tileables(
74 inputs, kws=kws, **kw)
~/anaconda3/lib/python3.7/site-packages/mars/tensor/arithmetic/core.py in _set_sparse(self, inputs)
188 x1 = self._lhs if np.isscalar(self._lhs) else next(inputs_iter)
189 x2 = self._rhs if np.isscalar(self._rhs) else next(inputs_iter)
--> 190 setattr(self, '_sparse', self._is_sparse(x1, x2))
191
192 def _set_inputs(self, inputs):
~/anaconda3/lib/python3.7/site-packages/mars/tensor/arithmetic/arctan2.py in _is_sparse(cls, x1, x2)
33 if hasattr(x1, 'issparse') and x1.issparse() and np.isscalar(x2):
34 return True
---> 35 elif x1 == 0:
36 return True
37 return False
~/anaconda3/lib/python3.7/site-packages/mars/tensor/core.py in __len__(self)
279
280 def __len__(self):
--> 281 return len(self._data)
282
283 @property
~/anaconda3/lib/python3.7/site-packages/mars/core.py in __len__(self)
536 if build_mode().is_build_mode:
537 return 0
--> 538 raise TypeError('len() of unsized object')
539
540 @property
TypeError: len() of unsized object
|
IndexError
|
def execute(cls, ctx, op):
inputs, device_id, xp = as_same_device(
[ctx[c.key] for c in op.inputs], device=op.device, ret_extra=True
)
with device(device_id):
a = op.lhs if np.isscalar(op.lhs) else inputs[0]
b = op.rhs if np.isscalar(op.rhs) else inputs[-1]
ctx[op.outputs[0].key] = xp.isclose(
a, b, atol=op.atol, rtol=op.rtol, equal_nan=op.equal_nan
)
|
def execute(cls, ctx, op):
(a, b), device_id, xp = as_same_device(
[ctx[c.key] for c in op.inputs], device=op.device, ret_extra=True
)
with device(device_id):
ctx[op.outputs[0].key] = xp.isclose(
a, b, atol=op.atol, rtol=op.rtol, equal_nan=op.equal_nan
)
|
https://github.com/mars-project/mars/issues/1497
|
In []: mt.isclose((0,0), (0,0)).execute()
Out[]: array([True, True])
In []: mt.isclose((0,0), (0,)).execute()
Out[]: arary([True, True])
In []: np.isclose((0,0), (0))
Out[]: array([True, True])
In []: mt.isclose((0,0), (0)).execute()
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-20-a3e463eb7acf> in <module>
----> 1 mt.isclose((0,0), (0)).execute()
~/anaconda3/lib/python3.7/site-packages/mars/core.py in execute(self, session, **kw)
580
581 def execute(self, session=None, **kw):
--> 582 self._data.execute(session, **kw)
583 return self
584
~/anaconda3/lib/python3.7/site-packages/mars/core.py in execute(self, session, **kw)
368
369 # no more fetch, thus just fire run
--> 370 session.run(self, **kw)
371 # return Tileable or ExecutableTuple itself
372 return self
~/anaconda3/lib/python3.7/site-packages/mars/session.py in run(self, *tileables, **kw)
460 tileables = tuple(mt.tensor(t) if not isinstance(t, (Entity, Base)) else t
461 for t in tileables)
--> 462 result = self._sess.run(*tileables, **kw)
463
464 for t in tileables:
~/anaconda3/lib/python3.7/site-packages/mars/session.py in run(self, *tileables, **kw)
105 # set number of running cores
106 self.context.set_ncores(kw['n_parallel'])
--> 107 res = self._executor.execute_tileables(tileables, **kw)
108 return res
109
~/anaconda3/lib/python3.7/site-packages/mars/utils.py in _wrapped(*args, **kwargs)
406 _kernel_mode.eager = False
407 _kernel_mode.eager_count = enter_eager_count + 1
--> 408 return func(*args, **kwargs)
409 finally:
410 _kernel_mode.eager_count -= 1
~/anaconda3/lib/python3.7/site-packages/mars/utils.py in inner(*args, **kwargs)
500 def inner(*args, **kwargs):
501 with build_mode():
--> 502 return func(*args, **kwargs)
503 return inner
504
~/anaconda3/lib/python3.7/site-packages/mars/executor.py in execute_tileables(self, tileables, fetch, n_parallel, n_thread, print_progress, mock, compose, name)
878 n_parallel=n_parallel or n_thread,
879 print_progress=print_progress, mock=mock,
--> 880 chunk_result=chunk_result)
881
882 # update shape of tileable and its chunks whatever it's successful or not
~/anaconda3/lib/python3.7/site-packages/mars/executor.py in execute_graph(self, graph, keys, n_parallel, print_progress, mock, no_intermediate, compose, retval, chunk_result)
688 print_progress=print_progress, mock=mock, mock_max_memory=self._mock_max_memory,
689 fetch_keys=fetch_keys, no_intermediate=no_intermediate)
--> 690 res = graph_execution.execute(retval)
691 self._mock_max_memory = max(self._mock_max_memory, graph_execution._mock_max_memory)
692 if mock:
~/anaconda3/lib/python3.7/site-packages/mars/executor.py in execute(self, retval)
569 # wait until all the futures completed
570 for future in executed_futures:
--> 571 future.result()
572
573 if retval:
~/anaconda3/lib/python3.7/concurrent/futures/_base.py in result(self, timeout)
426 raise CancelledError()
427 elif self._state == FINISHED:
--> 428 return self.__get_result()
429
430 self._condition.wait(timeout)
~/anaconda3/lib/python3.7/concurrent/futures/_base.py in __get_result(self)
382 def __get_result(self):
383 if self._exception:
--> 384 raise self._exception
385 else:
386 return self._result
~/anaconda3/lib/python3.7/concurrent/futures/thread.py in run(self)
55
56 try:
---> 57 result = self.fn(*self.args, **self.kwargs)
58 except BaseException as exc:
59 self.future.set_exception(exc)
~/anaconda3/lib/python3.7/site-packages/mars/executor.py in _execute_operand(self, op)
441 # so we pass the first operand's first output to Executor.handle
442 first_op = ops[0]
--> 443 Executor.handle(first_op, results, self._mock)
444
445 # update maximal memory usage during execution
~/anaconda3/lib/python3.7/site-packages/mars/executor.py in handle(cls, op, results, mock)
639 # The `UFuncTypeError` was introduced by numpy#12593 since v1.17.0.
640 try:
--> 641 return runner(results, op)
642 except UFuncTypeError as e:
643 raise TypeError(str(e)).with_traceback(sys.exc_info()[2]) from None
~/anaconda3/lib/python3.7/site-packages/mars/tensor/arithmetic/isclose.py in execute(cls, ctx, op)
61 def execute(cls, ctx, op):
62 (a, b), device_id, xp = as_same_device(
---> 63 [ctx[c.key] for c in op.inputs], device=op.device, ret_extra=True)
64
65 with device(device_id):
ValueError: not enough values to unpack (expected 2, got 1)
|
ValueError
|
def arrow_array_to_objects(obj):
from .dataframe.arrays import ArrowDtype
if isinstance(obj, pd.DataFrame):
out_cols = dict()
for col_name, dtype in obj.dtypes.items():
if isinstance(dtype, ArrowDtype):
out_cols[col_name] = pd.Series(
obj[col_name].to_numpy(), index=obj.index
)
else:
out_cols[col_name] = obj[col_name]
obj = pd.DataFrame(out_cols, columns=list(obj.dtypes.keys()))
elif isinstance(obj, pd.Series):
if isinstance(obj.dtype, ArrowDtype):
obj = pd.Series(obj.to_numpy(), index=obj.index, name=obj.name)
return obj
|
def arrow_array_to_objects(obj):
from .dataframe.arrays import ArrowDtype
if isinstance(obj, pd.DataFrame):
out_cols = dict()
for col_name, dtype in obj.dtypes.items():
if isinstance(dtype, ArrowDtype):
out_cols[col_name] = pd.Series(
obj[col_name].to_numpy(), index=obj.index
)
else:
out_cols[col_name] = obj[col_name]
obj = pd.DataFrame(out_cols, columns=list(obj.dtypes.keys()))
elif isinstance(obj, pd.Series):
if isinstance(obj.dtype, ArrowDtype):
obj = pd.Series(obj.to_numpy())
return obj
|
https://github.com/mars-project/mars/issues/1497
|
In []: mt.isclose((0,0), (0,0)).execute()
Out[]: array([True, True])
In []: mt.isclose((0,0), (0,)).execute()
Out[]: arary([True, True])
In []: np.isclose((0,0), (0))
Out[]: array([True, True])
In []: mt.isclose((0,0), (0)).execute()
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-20-a3e463eb7acf> in <module>
----> 1 mt.isclose((0,0), (0)).execute()
~/anaconda3/lib/python3.7/site-packages/mars/core.py in execute(self, session, **kw)
580
581 def execute(self, session=None, **kw):
--> 582 self._data.execute(session, **kw)
583 return self
584
~/anaconda3/lib/python3.7/site-packages/mars/core.py in execute(self, session, **kw)
368
369 # no more fetch, thus just fire run
--> 370 session.run(self, **kw)
371 # return Tileable or ExecutableTuple itself
372 return self
~/anaconda3/lib/python3.7/site-packages/mars/session.py in run(self, *tileables, **kw)
460 tileables = tuple(mt.tensor(t) if not isinstance(t, (Entity, Base)) else t
461 for t in tileables)
--> 462 result = self._sess.run(*tileables, **kw)
463
464 for t in tileables:
~/anaconda3/lib/python3.7/site-packages/mars/session.py in run(self, *tileables, **kw)
105 # set number of running cores
106 self.context.set_ncores(kw['n_parallel'])
--> 107 res = self._executor.execute_tileables(tileables, **kw)
108 return res
109
~/anaconda3/lib/python3.7/site-packages/mars/utils.py in _wrapped(*args, **kwargs)
406 _kernel_mode.eager = False
407 _kernel_mode.eager_count = enter_eager_count + 1
--> 408 return func(*args, **kwargs)
409 finally:
410 _kernel_mode.eager_count -= 1
~/anaconda3/lib/python3.7/site-packages/mars/utils.py in inner(*args, **kwargs)
500 def inner(*args, **kwargs):
501 with build_mode():
--> 502 return func(*args, **kwargs)
503 return inner
504
~/anaconda3/lib/python3.7/site-packages/mars/executor.py in execute_tileables(self, tileables, fetch, n_parallel, n_thread, print_progress, mock, compose, name)
878 n_parallel=n_parallel or n_thread,
879 print_progress=print_progress, mock=mock,
--> 880 chunk_result=chunk_result)
881
882 # update shape of tileable and its chunks whatever it's successful or not
~/anaconda3/lib/python3.7/site-packages/mars/executor.py in execute_graph(self, graph, keys, n_parallel, print_progress, mock, no_intermediate, compose, retval, chunk_result)
688 print_progress=print_progress, mock=mock, mock_max_memory=self._mock_max_memory,
689 fetch_keys=fetch_keys, no_intermediate=no_intermediate)
--> 690 res = graph_execution.execute(retval)
691 self._mock_max_memory = max(self._mock_max_memory, graph_execution._mock_max_memory)
692 if mock:
~/anaconda3/lib/python3.7/site-packages/mars/executor.py in execute(self, retval)
569 # wait until all the futures completed
570 for future in executed_futures:
--> 571 future.result()
572
573 if retval:
~/anaconda3/lib/python3.7/concurrent/futures/_base.py in result(self, timeout)
426 raise CancelledError()
427 elif self._state == FINISHED:
--> 428 return self.__get_result()
429
430 self._condition.wait(timeout)
~/anaconda3/lib/python3.7/concurrent/futures/_base.py in __get_result(self)
382 def __get_result(self):
383 if self._exception:
--> 384 raise self._exception
385 else:
386 return self._result
~/anaconda3/lib/python3.7/concurrent/futures/thread.py in run(self)
55
56 try:
---> 57 result = self.fn(*self.args, **self.kwargs)
58 except BaseException as exc:
59 self.future.set_exception(exc)
~/anaconda3/lib/python3.7/site-packages/mars/executor.py in _execute_operand(self, op)
441 # so we pass the first operand's first output to Executor.handle
442 first_op = ops[0]
--> 443 Executor.handle(first_op, results, self._mock)
444
445 # update maximal memory usage during execution
~/anaconda3/lib/python3.7/site-packages/mars/executor.py in handle(cls, op, results, mock)
639 # The `UFuncTypeError` was introduced by numpy#12593 since v1.17.0.
640 try:
--> 641 return runner(results, op)
642 except UFuncTypeError as e:
643 raise TypeError(str(e)).with_traceback(sys.exc_info()[2]) from None
~/anaconda3/lib/python3.7/site-packages/mars/tensor/arithmetic/isclose.py in execute(cls, ctx, op)
61 def execute(cls, ctx, op):
62 (a, b), device_id, xp = as_same_device(
---> 63 [ctx[c.key] for c in op.inputs], device=op.device, ret_extra=True)
64
65 with device(device_id):
ValueError: not enough values to unpack (expected 2, got 1)
|
ValueError
|
def fetch_chunks_data(
self,
session_id,
chunk_indexes,
chunk_keys,
nsplits,
index_obj=None,
serial=True,
serial_type=None,
compressions=None,
pickle_protocol=None,
):
chunk_index_to_key = dict(
(index, key) for index, key in zip(chunk_indexes, chunk_keys)
)
if not index_obj:
chunk_results = dict(
(idx, self.fetch_chunk_data(session_id, k))
for idx, k in zip(chunk_indexes, chunk_keys)
)
else:
chunk_results = dict()
indexes = dict()
for axis, s in enumerate(index_obj):
idx_to_slices = slice_split(s, nsplits[axis])
indexes[axis] = idx_to_slices
for chunk_index in itertools.product(*[v.keys() for v in indexes.values()]):
# slice_obj: use tuple, since numpy complains
#
# FutureWarning: Using a non-tuple sequence for multidimensional indexing is deprecated; use
# `arr[tuple(seq)]` instead of `arr[seq]`. In the future this will be interpreted as an array
# index, `arr[np.array(seq)]`, which will result either in an error or a different result.
slice_obj = tuple(
indexes[axis][chunk_idx] for axis, chunk_idx in enumerate(chunk_index)
)
chunk_key = chunk_index_to_key[chunk_index]
chunk_results[chunk_index] = self.fetch_chunk_data(
session_id, chunk_key, slice_obj
)
chunk_results = [
(idx, dataserializer.loads(f.result())) for idx, f in chunk_results.items()
]
if len(chunk_results) == 1:
ret = chunk_results[0][1]
else:
ret = merge_chunks(chunk_results)
if not serial:
return ret
compressions = (
max(compressions) if compressions else dataserializer.CompressType.NONE
)
if serial_type == dataserializer.SerialType.PICKLE:
ret = arrow_array_to_objects(ret)
return dataserializer.dumps(
ret,
serial_type=serial_type,
compress=compressions,
pickle_protocol=pickle_protocol,
)
|
def fetch_chunks_data(
self,
session_id,
chunk_indexes,
chunk_keys,
nsplits,
index_obj=None,
serial=True,
serial_type=None,
compressions=None,
pickle_protocol=None,
):
chunk_index_to_key = dict(
(index, key) for index, key in zip(chunk_indexes, chunk_keys)
)
if not index_obj:
chunk_results = dict(
(idx, self.fetch_chunk_data(session_id, k))
for idx, k in zip(chunk_indexes, chunk_keys)
)
else:
chunk_results = dict()
indexes = dict()
for axis, s in enumerate(index_obj):
idx_to_slices = slice_split(s, nsplits[axis])
indexes[axis] = idx_to_slices
for chunk_index in itertools.product(*[v.keys() for v in indexes.values()]):
# slice_obj: use tuple, since numpy complains
#
# FutureWarning: Using a non-tuple sequence for multidimensional indexing is deprecated; use
# `arr[tuple(seq)]` instead of `arr[seq]`. In the future this will be interpreted as an array
# index, `arr[np.array(seq)]`, which will result either in an error or a different result.
slice_obj = tuple(
indexes[axis][chunk_idx] for axis, chunk_idx in enumerate(chunk_index)
)
chunk_key = chunk_index_to_key[chunk_index]
chunk_results[chunk_index] = self.fetch_chunk_data(
session_id, chunk_key, slice_obj
)
chunk_results = [
(idx, dataserializer.loads(f.result())) for idx, f in chunk_results.items()
]
if len(chunk_results) == 1:
ret = chunk_results[0][1]
else:
ret = merge_chunks(chunk_results)
if not serial:
return ret
compressions = (
max(compressions) if compressions else dataserializer.CompressType.NONE
)
return dataserializer.dumps(
ret,
serial_type=serial_type,
compress=compressions,
pickle_protocol=pickle_protocol,
)
|
https://github.com/mars-project/mars/issues/1479
|
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 129, in __repr__
return self._data.__repr__()
File "/Users/wenjun.swj/Code/mars/mars/dataframe/core.py", line 1083, in __repr__
return self._to_str(representation=True)
File "/Users/wenjun.swj/Code/mars/mars/dataframe/core.py", line 1053, in _to_str
self, session=self._executed_sessions[-1])
File "/Users/wenjun.swj/Code/mars/mars/dataframe/utils.py", line 773, in fetch_corner_data
return df_or_series.fetch(session=session)
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 376, in fetch
return session.fetch(self, **kw)
File "/Users/wenjun.swj/Code/mars/mars/session.py", line 491, in fetch
result = self._sess.fetch(*tileables, **kw)
File "/Users/wenjun.swj/Code/mars/mars/web/session.py", line 265, in fetch
result_data = dataserializer.loads(resp.content)
File "/Users/wenjun.swj/Code/mars/mars/serialize/dataserializer.py", line 259, in loads
return pickle.loads(data)
ModuleNotFoundError: No module named 'pyarrow'
|
ModuleNotFoundError
|
def parse_args(self, parser, argv, environ=None):
environ = environ or os.environ
args = parser.parse_args(argv)
args.host = args.host or environ.get("MARS_BIND_HOST")
args.port = args.port or environ.get("MARS_BIND_PORT")
args.endpoint = args.endpoint or environ.get("MARS_BIND_ENDPOINT")
args.advertise = args.advertise or environ.get("MARS_CONTAINER_IP")
load_modules = []
for mods in tuple(args.load_modules or ()) + (environ.get("MARS_LOAD_MODULES"),):
load_modules.extend(mods.split(",") if mods else [])
load_modules.extend(["mars.executor", "mars.serialize.protos"])
args.load_modules = tuple(load_modules)
if "MARS_TASK_DETAIL" in environ:
task_detail = json.loads(environ["MARS_TASK_DETAIL"])
task_type, task_index = (
task_detail["task"]["type"],
task_detail["task"]["index"],
)
args.advertise = args.advertise or task_detail["cluster"][task_type][task_index]
args.schedulers = args.schedulers or ",".join(
task_detail["cluster"]["scheduler"]
)
return args
|
def parse_args(self, parser, argv, environ=None):
environ = environ or os.environ
args = parser.parse_args(argv)
args.advertise = args.advertise or environ.get("MARS_CONTAINER_IP")
load_modules = []
for mods in tuple(args.load_modules or ()) + (environ.get("MARS_LOAD_MODULES"),):
load_modules.extend(mods.split(",") if mods else [])
load_modules.extend(["mars.executor", "mars.serialize.protos"])
args.load_modules = tuple(load_modules)
if "MARS_TASK_DETAIL" in environ:
task_detail = json.loads(environ["MARS_TASK_DETAIL"])
task_type, task_index = (
task_detail["task"]["type"],
task_detail["task"]["index"],
)
args.advertise = args.advertise or task_detail["cluster"][task_type][task_index]
args.schedulers = args.schedulers or ",".join(
task_detail["cluster"]["scheduler"]
)
return args
|
https://github.com/mars-project/mars/issues/1479
|
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 129, in __repr__
return self._data.__repr__()
File "/Users/wenjun.swj/Code/mars/mars/dataframe/core.py", line 1083, in __repr__
return self._to_str(representation=True)
File "/Users/wenjun.swj/Code/mars/mars/dataframe/core.py", line 1053, in _to_str
self, session=self._executed_sessions[-1])
File "/Users/wenjun.swj/Code/mars/mars/dataframe/utils.py", line 773, in fetch_corner_data
return df_or_series.fetch(session=session)
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 376, in fetch
return session.fetch(self, **kw)
File "/Users/wenjun.swj/Code/mars/mars/session.py", line 491, in fetch
result = self._sess.fetch(*tileables, **kw)
File "/Users/wenjun.swj/Code/mars/mars/web/session.py", line 265, in fetch
result_data = dataserializer.loads(resp.content)
File "/Users/wenjun.swj/Code/mars/mars/serialize/dataserializer.py", line 259, in loads
return pickle.loads(data)
ModuleNotFoundError: No module named 'pyarrow'
|
ModuleNotFoundError
|
def _get_ready_pod_count(self, label_selector):
query = self._core_api.list_namespaced_pod(
namespace=self._namespace, label_selector=label_selector
).to_dict()
cnt = 0
for el in query["items"]:
if el["status"]["phase"] in ("Error", "Failed"):
logger.warning(
"Error in starting pod, message: %s", el["status"]["message"]
)
continue
if "status" not in el or "conditions" not in el["status"]:
cnt += 1
elif any(
cond["type"] == "Ready" and cond["status"] == "True"
for cond in el["status"].get("conditions") or ()
):
cnt += 1
return cnt
|
def _get_ready_pod_count(self, label_selector):
query = self._core_api.list_namespaced_pod(
namespace=self._namespace, label_selector=label_selector
).to_dict()
cnt = 0
for el in query["items"]:
if el["status"]["phase"] in ("Error", "Failed"):
raise SystemError(el["status"]["message"])
if "status" not in el or "conditions" not in el["status"]:
cnt += 1
if any(
cond["type"] == "Ready" and cond["status"] == "True"
for cond in el["status"].get("conditions") or ()
):
cnt += 1
return cnt
|
https://github.com/mars-project/mars/issues/1479
|
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 129, in __repr__
return self._data.__repr__()
File "/Users/wenjun.swj/Code/mars/mars/dataframe/core.py", line 1083, in __repr__
return self._to_str(representation=True)
File "/Users/wenjun.swj/Code/mars/mars/dataframe/core.py", line 1053, in _to_str
self, session=self._executed_sessions[-1])
File "/Users/wenjun.swj/Code/mars/mars/dataframe/utils.py", line 773, in fetch_corner_data
return df_or_series.fetch(session=session)
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 376, in fetch
return session.fetch(self, **kw)
File "/Users/wenjun.swj/Code/mars/mars/session.py", line 491, in fetch
result = self._sess.fetch(*tileables, **kw)
File "/Users/wenjun.swj/Code/mars/mars/web/session.py", line 265, in fetch
result_data = dataserializer.loads(resp.content)
File "/Users/wenjun.swj/Code/mars/mars/serialize/dataserializer.py", line 259, in loads
return pickle.loads(data)
ModuleNotFoundError: No module named 'pyarrow'
|
ModuleNotFoundError
|
def config_args(self, parser):
super().config_args(parser)
parser.add_argument("--nproc", help="number of processes")
parser.add_argument(
"--disable-failover", action="store_true", help="disable fail-over"
)
|
def config_args(self, parser):
super().config_args(parser)
parser.add_argument("--nproc", help="number of processes")
|
https://github.com/mars-project/mars/issues/1479
|
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 129, in __repr__
return self._data.__repr__()
File "/Users/wenjun.swj/Code/mars/mars/dataframe/core.py", line 1083, in __repr__
return self._to_str(representation=True)
File "/Users/wenjun.swj/Code/mars/mars/dataframe/core.py", line 1053, in _to_str
self, session=self._executed_sessions[-1])
File "/Users/wenjun.swj/Code/mars/mars/dataframe/utils.py", line 773, in fetch_corner_data
return df_or_series.fetch(session=session)
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 376, in fetch
return session.fetch(self, **kw)
File "/Users/wenjun.swj/Code/mars/mars/session.py", line 491, in fetch
result = self._sess.fetch(*tileables, **kw)
File "/Users/wenjun.swj/Code/mars/mars/web/session.py", line 265, in fetch
result_data = dataserializer.loads(resp.content)
File "/Users/wenjun.swj/Code/mars/mars/serialize/dataserializer.py", line 259, in loads
return pickle.loads(data)
ModuleNotFoundError: No module named 'pyarrow'
|
ModuleNotFoundError
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.