after_merge
stringlengths 28
79.6k
| before_merge
stringlengths 20
79.6k
| url
stringlengths 38
71
| full_traceback
stringlengths 43
922k
| traceback_type
stringclasses 555
values |
|---|---|---|---|---|
def decide_dataframe_chunk_sizes(shape, chunk_size, memory_usage):
"""
Decide how a given DataFrame can be split into chunk.
:param shape: DataFrame's shape
:param chunk_size: if dict provided, it's dimension id to chunk size;
if provided, it's the chunk size for each dimension.
:param memory_usage: pandas Series in which each column's memory usage
:type memory_usage: pandas.Series
:return: the calculated chunk size for each dimension
:rtype: tuple
"""
from ..config import options
chunk_size = dictify_chunk_size(shape, chunk_size)
average_memory_usage = memory_usage / shape[0]
nleft = len(shape) - len(chunk_size)
if nleft < 0:
raise ValueError("chunks have more than two dimensions")
if nleft == 0:
return normalize_chunk_sizes(
shape, tuple(chunk_size[j] for j in range(len(shape)))
)
max_chunk_size = options.chunk_store_limit
# for the row side, along axis 0
if 0 not in chunk_size:
row_chunk_size = []
row_left_size = shape[0]
else:
row_chunk_size = normalize_chunk_sizes((shape[0],), (chunk_size[0],))[0]
row_left_size = -1
# for the column side, along axis 1
if 1 not in chunk_size:
col_chunk_size = []
col_chunk_store = []
col_left_size = shape[1]
else:
col_chunk_size = normalize_chunk_sizes((shape[1],), (chunk_size[1],))[0]
acc = [0] + np.cumsum(col_chunk_size).tolist()
col_chunk_store = [
average_memory_usage[acc[i] : acc[i + 1]].sum()
for i in range(len(col_chunk_size))
]
col_left_size = -1
while True:
nbytes_occupied = np.prod(
[max(it) for it in (row_chunk_size, col_chunk_store) if it]
)
dim_size = np.maximum(
int(np.power(max_chunk_size / nbytes_occupied, 1 / float(nleft))), 1
)
if col_left_size == 0:
col_chunk_size.append(0)
if row_left_size == 0:
row_chunk_size.append(0)
# check col first
if col_left_size > 0:
cs = min(col_left_size, dim_size)
col_chunk_size.append(cs)
start = int(np.sum(col_chunk_size[:-1]))
col_chunk_store.append(average_memory_usage.iloc[start : start + cs].sum())
col_left_size -= cs
if row_left_size > 0:
if col_chunk_store:
max_col_chunk_store = max(col_chunk_store)
cs = min(row_left_size, int(max_chunk_size / max_col_chunk_store))
else:
cs = row_left_size
row_chunk_size.append(cs)
row_left_size -= cs
if col_left_size <= 0 and row_left_size <= 0:
break
return tuple(row_chunk_size), tuple(col_chunk_size)
|
def decide_dataframe_chunk_sizes(shape, chunk_size, memory_usage):
"""
Decide how a given DataFrame can be split into chunk.
:param shape: DataFrame's shape
:param chunk_size: if dict provided, it's dimension id to chunk size;
if provided, it's the chunk size for each dimension.
:param memory_usage: pandas Series in which each column's memory usage
:type memory_usage: pandas.Series
:return: the calculated chunk size for each dimension
:rtype: tuple
"""
from ..config import options
chunk_size = dictify_chunk_size(shape, chunk_size)
average_memory_usage = memory_usage / shape[0]
nleft = len(shape) - len(chunk_size)
if nleft < 0:
raise ValueError("chunks have more than two dimensions")
if nleft == 0:
return normalize_chunk_sizes(
shape, tuple(chunk_size[j] for j in range(len(shape)))
)
max_chunk_size = options.chunk_store_limit
# for the row side, along axis 0
if 0 not in chunk_size:
row_chunk_size = []
row_left_size = shape[0]
else:
row_chunk_size = normalize_chunk_sizes((shape[0],), (chunk_size[0],))[0]
row_left_size = -1
# for the column side, along axis 1
if 1 not in chunk_size:
col_chunk_size = []
col_chunk_store = []
col_left_size = shape[1]
else:
col_chunk_size = normalize_chunk_sizes((shape[1],), (chunk_size[1],))[0]
acc = [0] + np.cumsum(col_chunk_size).tolist()
col_chunk_store = [
average_memory_usage[acc[i] : acc[i + 1]].sum()
for i in range(len(col_chunk_size))
]
col_left_size = -1
while True:
nbytes_occupied = np.prod(
[max(it) for it in (row_chunk_size, col_chunk_store) if it]
)
dim_size = np.maximum(
int(np.power(max_chunk_size / nbytes_occupied, 1 / float(nleft))), 1
)
if col_left_size == 0:
col_chunk_size.append(0)
if row_left_size == 0:
row_chunk_size.append(0)
# check col first
if col_left_size > 0:
cs = min(col_left_size, dim_size)
col_chunk_size.append(cs)
start = int(np.sum(col_chunk_size[:-1]))
col_chunk_store.append(average_memory_usage.iloc[start : start + cs].sum())
col_left_size -= cs
if row_left_size > 0:
max_col_chunk_store = max(col_chunk_store)
cs = min(row_left_size, int(max_chunk_size / max_col_chunk_store))
row_chunk_size.append(cs)
row_left_size -= cs
if col_left_size <= 0 and row_left_size <= 0:
break
return tuple(row_chunk_size), tuple(col_chunk_size)
|
https://github.com/mars-project/mars/issues/2018
|
In [52]: df = md.DataFrame(index=[1, 2, 3])
In [53]: df['a'] = md.Series(['a', 'b', 'c'], index=[2, 3, 4])
---------------------------------------------------------------------------
NotImplementedError Traceback (most recent call last)
<ipython-input-53-f550a59ef82c> in <module>
----> 1 df['a'] = md.Series(['a', 'b', 'c'], index=[2, 3, 4])
~/Workspace/mars/mars/dataframe/indexing/setitem.py in dataframe_setitem(df, col, value)
182 def dataframe_setitem(df, col, value):
183 op = DataFrameSetitem(target=df, indexes=col, value=value)
--> 184 return op(df, value)
~/Workspace/mars/mars/utils.py in _inner(*args, **kwargs)
457 @functools.wraps(func)
458 def _inner(*args, **kwargs):
--> 459 with self:
460 return func(*args, **kwargs)
461
~/Workspace/mars/mars/dataframe/indexing/setitem.py in __call__(self, target, value)
83
84 if value.index_value.key != target.index_value.key: # pragma: no cover
---> 85 raise NotImplementedError('Does not support setting value '
86 'with different index for now')
87
NotImplementedError: Does not support setting value with different index for now
|
NotImplementedError
|
def tile(cls, op):
from ..indexing.slice import TensorSlice
check_chunks_unknown_shape(op.inputs, TilesError)
if len(set([inp.shape for inp in op.inputs])) != 1:
# check shape again when input has unknown shape
raise ValueError("all input tensors must have the same shape")
inputs = unify_chunks(*op.inputs)
output = op.outputs[0]
axis = op.axis
output_nsplits = (
inputs[0].nsplits[:axis] + ((1,) * len(inputs),) + inputs[0].nsplits[axis:]
)
output_idxes = itertools.product(*[range(len(nsplit)) for nsplit in output_nsplits])
out_chunks = []
for idx in output_idxes:
input_idx = idx[:axis] + idx[axis + 1 :]
i = idx[axis]
input_chunk = inputs[i].cix[input_idx]
slices = (
[slice(None)] * axis
+ [np.newaxis]
+ [slice(None)] * (len(input_idx) - axis)
)
shape = input_chunk.shape[:axis] + (1,) + input_chunk.shape[axis:]
chunk_op = TensorSlice(slices=slices, dtype=op.dtype, sparse=op.sparse)
out_chunk = chunk_op.new_chunk(
[input_chunk], shape=shape, index=idx, order=output.order
)
out_chunks.append(out_chunk)
new_op = op.copy()
return new_op.new_tensors(
op.inputs, output.shape, chunks=out_chunks, nsplits=output_nsplits
)
|
def tile(cls, op):
from ..indexing.slice import TensorSlice
inputs = unify_chunks(*op.inputs)
output = op.outputs[0]
axis = op.axis
output_nsplits = (
inputs[0].nsplits[:axis] + ((1,) * len(inputs),) + inputs[0].nsplits[axis:]
)
output_idxes = itertools.product(*[range(len(nsplit)) for nsplit in output_nsplits])
out_chunks = []
for idx in output_idxes:
input_idx = idx[:axis] + idx[axis + 1 :]
i = idx[axis]
input_chunk = inputs[i].cix[input_idx]
slices = (
[slice(None)] * axis
+ [np.newaxis]
+ [slice(None)] * (len(input_idx) - axis)
)
shape = input_chunk.shape[:axis] + (1,) + input_chunk.shape[axis:]
chunk_op = TensorSlice(slices=slices, dtype=op.dtype, sparse=op.sparse)
out_chunk = chunk_op.new_chunk(
[input_chunk], shape=shape, index=idx, order=output.order
)
out_chunks.append(out_chunk)
new_op = op.copy()
return new_op.new_tensors(
op.inputs, output.shape, chunks=out_chunks, nsplits=output_nsplits
)
|
https://github.com/mars-project/mars/issues/2018
|
In [52]: df = md.DataFrame(index=[1, 2, 3])
In [53]: df['a'] = md.Series(['a', 'b', 'c'], index=[2, 3, 4])
---------------------------------------------------------------------------
NotImplementedError Traceback (most recent call last)
<ipython-input-53-f550a59ef82c> in <module>
----> 1 df['a'] = md.Series(['a', 'b', 'c'], index=[2, 3, 4])
~/Workspace/mars/mars/dataframe/indexing/setitem.py in dataframe_setitem(df, col, value)
182 def dataframe_setitem(df, col, value):
183 op = DataFrameSetitem(target=df, indexes=col, value=value)
--> 184 return op(df, value)
~/Workspace/mars/mars/utils.py in _inner(*args, **kwargs)
457 @functools.wraps(func)
458 def _inner(*args, **kwargs):
--> 459 with self:
460 return func(*args, **kwargs)
461
~/Workspace/mars/mars/dataframe/indexing/setitem.py in __call__(self, target, value)
83
84 if value.index_value.key != target.index_value.key: # pragma: no cover
---> 85 raise NotImplementedError('Does not support setting value '
86 'with different index for now')
87
NotImplementedError: Does not support setting value with different index for now
|
NotImplementedError
|
def stack(tensors, axis=0, out=None):
"""
Join a sequence of tensors along a new axis.
The `axis` parameter specifies the index of the new axis in the dimensions
of the result. For example, if ``axis=0`` it will be the first dimension
and if ``axis=-1`` it will be the last dimension.
Parameters
----------
tensors : sequence of array_like
Each tensor must have the same shape.
axis : int, optional
The axis in the result tensor along which the input tensors are stacked.
out : Tensor, optional
If provided, the destination to place the result. The shape must be
correct, matching that of what stack would have returned if no
out argument were specified.
Returns
-------
stacked : Tensor
The stacked tensor has one more dimension than the input tensors.
See Also
--------
concatenate : Join a sequence of tensors along an existing axis.
split : Split tensor into a list of multiple sub-tensors of equal size.
block : Assemble tensors from blocks.
Examples
--------
>>> import mars.tensor as mt
>>> arrays = [mt.random.randn(3, 4) for _ in range(10)]
>>> mt.stack(arrays, axis=0).shape
(10, 3, 4)
>>> mt.stack(arrays, axis=1).shape
(3, 10, 4)
>>> mt.stack(arrays, axis=2).shape
(3, 4, 10)
>>> a = mt.array([1, 2, 3])
>>> b = mt.array([2, 3, 4])
>>> mt.stack((a, b)).execute()
array([[1, 2, 3],
[2, 3, 4]])
>>> mt.stack((a, b), axis=-1).execute()
array([[1, 2],
[2, 3],
[3, 4]])
"""
tensors = [astensor(t) for t in tensors]
to_check_shapes = []
for t in tensors:
if not any(np.isnan(s) for s in t.shape):
to_check_shapes.append(t.shape)
if to_check_shapes and len(set(to_check_shapes)) != 1:
raise ValueError("all input tensors must have the same shape")
ndim = len(tensors[0].shape)
raw_axis = axis
if axis < 0:
axis = ndim + axis + 1
if axis > ndim or axis < 0:
raise np.AxisError(
f"axis {raw_axis} is out of bounds for tensor of dimension {ndim}"
)
dtype = np.result_type(*[t.dtype for t in tensors])
sparse = all(t.issparse() for t in tensors)
op = TensorStack(axis=axis, dtype=dtype, sparse=sparse)
return op(tensors, out=out)
|
def stack(tensors, axis=0, out=None):
"""
Join a sequence of tensors along a new axis.
The `axis` parameter specifies the index of the new axis in the dimensions
of the result. For example, if ``axis=0`` it will be the first dimension
and if ``axis=-1`` it will be the last dimension.
Parameters
----------
tensors : sequence of array_like
Each tensor must have the same shape.
axis : int, optional
The axis in the result tensor along which the input tensors are stacked.
out : Tensor, optional
If provided, the destination to place the result. The shape must be
correct, matching that of what stack would have returned if no
out argument were specified.
Returns
-------
stacked : Tensor
The stacked tensor has one more dimension than the input tensors.
See Also
--------
concatenate : Join a sequence of tensors along an existing axis.
split : Split tensor into a list of multiple sub-tensors of equal size.
block : Assemble tensors from blocks.
Examples
--------
>>> import mars.tensor as mt
>>> arrays = [mt.random.randn(3, 4) for _ in range(10)]
>>> mt.stack(arrays, axis=0).shape
(10, 3, 4)
>>> mt.stack(arrays, axis=1).shape
(3, 10, 4)
>>> mt.stack(arrays, axis=2).shape
(3, 4, 10)
>>> a = mt.array([1, 2, 3])
>>> b = mt.array([2, 3, 4])
>>> mt.stack((a, b)).execute()
array([[1, 2, 3],
[2, 3, 4]])
>>> mt.stack((a, b), axis=-1).execute()
array([[1, 2],
[2, 3],
[3, 4]])
"""
tensors = [astensor(t) for t in tensors]
if len(set(t.shape for t in tensors)) != 1:
raise ValueError("all input tensors must have the same shape")
ndim = len(tensors[0].shape)
raw_axis = axis
if axis < 0:
axis = ndim + axis + 1
if axis > ndim or axis < 0:
raise np.AxisError(
f"axis {raw_axis} is out of bounds for tensor of dimension {ndim}"
)
dtype = np.result_type(*[t.dtype for t in tensors])
sparse = all(t.issparse() for t in tensors)
op = TensorStack(axis=axis, dtype=dtype, sparse=sparse)
return op(tensors, out=out)
|
https://github.com/mars-project/mars/issues/2018
|
In [52]: df = md.DataFrame(index=[1, 2, 3])
In [53]: df['a'] = md.Series(['a', 'b', 'c'], index=[2, 3, 4])
---------------------------------------------------------------------------
NotImplementedError Traceback (most recent call last)
<ipython-input-53-f550a59ef82c> in <module>
----> 1 df['a'] = md.Series(['a', 'b', 'c'], index=[2, 3, 4])
~/Workspace/mars/mars/dataframe/indexing/setitem.py in dataframe_setitem(df, col, value)
182 def dataframe_setitem(df, col, value):
183 op = DataFrameSetitem(target=df, indexes=col, value=value)
--> 184 return op(df, value)
~/Workspace/mars/mars/utils.py in _inner(*args, **kwargs)
457 @functools.wraps(func)
458 def _inner(*args, **kwargs):
--> 459 with self:
460 return func(*args, **kwargs)
461
~/Workspace/mars/mars/dataframe/indexing/setitem.py in __call__(self, target, value)
83
84 if value.index_value.key != target.index_value.key: # pragma: no cover
---> 85 raise NotImplementedError('Does not support setting value '
86 'with different index for now')
87
NotImplementedError: Does not support setting value with different index for now
|
NotImplementedError
|
def __call__(self, a, bins, range, weights):
if range is not None:
_check_range(range)
if isinstance(bins, str):
# string, 'auto', 'stone', ...
# shape is unknown
bin_name = bins
# if `bins` is a string for an automatic method,
# this will replace it with the number of bins calculated
if bin_name not in _hist_bin_selectors:
raise ValueError(f"{bin_name!r} is not a valid estimator for `bins`")
if weights is not None:
raise TypeError(
"Automated estimation of the number of "
"bins is not supported for weighted data"
)
if isinstance(range, tuple) and len(range) == 2:
# if `bins` is a string, e.g. 'auto', 'stone'...,
# and `range` provided as well,
# `a` should be trimmed first
first_edge, last_edge = _get_outer_edges(a, range)
a = a[(a >= first_edge) & (a <= last_edge)]
shape = (np.nan,)
elif mt.ndim(bins) == 0:
try:
n_equal_bins = operator.index(bins)
except TypeError: # pragma: no cover
raise TypeError("`bins` must be an integer, a string, or an array")
if n_equal_bins < 1:
raise ValueError("`bins` must be positive, when an integer")
shape = (bins + 1,)
elif mt.ndim(bins) == 1:
if not isinstance(bins, TENSOR_TYPE):
bins = np.asarray(bins)
if not is_asc_sorted(bins):
raise ValueError("`bins` must increase monotonically, when an array")
shape = astensor(bins).shape
else:
raise ValueError("`bins` must be 1d, when an array")
inputs = [a]
if isinstance(bins, TENSOR_TYPE):
inputs.append(bins)
if weights is not None:
inputs.append(weights)
if (
(a.size > 0 or np.isnan(a.size))
and (isinstance(bins, str) or mt.ndim(bins) == 0)
and not range
):
# for bins that is str or integer, requires min max calculated first
# dims need to be kept in case a is empty which causes errors in reduction
input_min = self._input_min = a.min(keepdims=True)
inputs.append(input_min)
input_max = self._input_max = a.max(keepdims=True)
inputs.append(input_max)
return self.new_tensor(inputs, shape=shape, order=TensorOrder.C_ORDER)
|
def __call__(self, a, bins, range, weights):
if range is not None:
_check_range(range)
if isinstance(bins, str):
# string, 'auto', 'stone', ...
# shape is unknown
bin_name = bins
# if `bins` is a string for an automatic method,
# this will replace it with the number of bins calculated
if bin_name not in _hist_bin_selectors:
raise ValueError(f"{bin_name!r} is not a valid estimator for `bins`")
if weights is not None:
raise TypeError(
"Automated estimation of the number of "
"bins is not supported for weighted data"
)
if isinstance(range, tuple) and len(range) == 2:
# if `bins` is a string, e.g. 'auto', 'stone'...,
# and `range` provided as well,
# `a` should be trimmed first
first_edge, last_edge = _get_outer_edges(a, range)
a = a[(a >= first_edge) & (a <= last_edge)]
shape = (np.nan,)
elif mt.ndim(bins) == 0:
try:
n_equal_bins = operator.index(bins)
except TypeError: # pragma: no cover
raise TypeError("`bins` must be an integer, a string, or an array")
if n_equal_bins < 1:
raise ValueError("`bins` must be positive, when an integer")
shape = (bins + 1,)
elif mt.ndim(bins) == 1:
if not isinstance(bins, TENSOR_TYPE):
bins = np.asarray(bins)
if not is_asc_sorted(bins):
raise ValueError("`bins` must increase monotonically, when an array")
shape = astensor(bins).shape
else:
raise ValueError("`bins` must be 1d, when an array")
inputs = [a]
if isinstance(bins, TENSOR_TYPE):
inputs.append(bins)
if weights is not None:
inputs.append(weights)
if (
(a.size > 0 or np.isnan(a.size))
and (isinstance(bins, str) or mt.ndim(bins) == 0)
and not range
):
# for bins that is str or integer,
# requires min max calculated first
input_min = self._input_min = a.min()
inputs.append(input_min)
input_max = self._input_max = a.max()
inputs.append(input_max)
return self.new_tensor(inputs, shape=shape, order=TensorOrder.C_ORDER)
|
https://github.com/mars-project/mars/issues/1959
|
Traceback (most recent call last):
File "/opt/conda/lib/python3.7/site-packages/requests/adapters.py", line 479, in send
r = low_conn.getresponse(buffering=True)
TypeError: getresponse() got an unexpected keyword argument 'buffering'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/opt/conda/lib/python3.7/site-packages/requests/adapters.py", line 482, in send
r = low_conn.getresponse()
File "/opt/conda/lib/python3.7/http/client.py", line 1344, in getresponse
response.begin()
File "/opt/conda/lib/python3.7/http/client.py", line 306, in begin
version, status, reason = self._read_status()
File "/opt/conda/lib/python3.7/http/client.py", line 267, in _read_status
line = str(self.fp.readline(_MAXLINE + 1), "iso-8859-1")
File "/opt/conda/lib/python3.7/socket.py", line 589, in readinto
return self._sock.recv_into(b)
socket.timeout: timed out
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/opt/conda/lib/python3.7/site-packages/gevent/threadpool.py", line 142, in __run_task
thread_result.set(func(*args, **kwargs))
File "mars/actors/pool/gevent_pool.pyx", line 201, in mars.actors.pool.gevent_pool.GeventThreadPool._wrap_watch.inner
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 698, in execute_graph
res = graph_execution.execute(retval)
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 579, in execute
future.result()
File "/opt/conda/lib/python3.7/concurrent/futures/_base.py", line 435, in result
return self.__get_result()
File "/opt/conda/lib/python3.7/concurrent/futures/_base.py", line 384, in __get_result
raise self._exception
File "/opt/conda/lib/python3.7/concurrent/futures/thread.py", line 57, in run
result = self.fn(*self.args, **self.kwargs)
File "/opt/conda/lib/python3.7/site-packages/mars/utils.py", line 457, in _inner
return func(*args, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 446, in _execute_operand
self.handle_op(first_op, results, self._mock)
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 378, in handle_op
return Executor.handle(*args, **kw)
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 649, in handle
return runner(results, op)
File "/opt/conda/lib/python3.7/site-packages/odps/mars_extension/dataframe/datastore.py", line 359, in execute
cls._execute_arrow_tunnel(ctx, op)
File "/opt/conda/lib/python3.7/site-packages/odps/mars_extension/dataframe/datastore.py", line 349, in _execute_arrow_tunnel
writer.close()
File "/opt/conda/lib/python3.7/site-packages/odps/tunnel/io/writer.py", line 548, in close
self._flush()
File "/opt/conda/lib/python3.7/site-packages/odps/tunnel/io/writer.py", line 545, in _flush
self._request_callback(gen())
File "/opt/conda/lib/python3.7/site-packages/odps/tunnel/tabletunnel.py", line 315, in upload
self._client.put(url, data=data, params=params, headers=headers)
File "/opt/conda/lib/python3.7/site-packages/odps/rest.py", line 154, in put
return self.request(url, 'put', data=data, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/odps/rest.py", line 133, in request
proxies=self._proxy)
File "/opt/conda/lib/python3.7/site-packages/requests/sessions.py", line 643, in send
r = adapter.send(request, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/requests/adapters.py", line 498, in send
raise ConnectionError(err, request=request)
requests.exceptions.ConnectionError: timed out
2021-02-03T05:21:04Z (<ThreadPoolWorker at 0x7f14c2e8e7e0 thread_ident=0x7f14cfe3f700 threadpool-hub=<Hub at 0x7f14c338d1d0 thread_ident=0x7f14f8070740>>, <cyfunction GeventThreadPool._wrap_watch.<locals>.inner at 0x7f14c2ec37a0>) failed with ConnectionError
2021-02-03 13:21:04,748 mars.worker.calc 77 ERROR Unexpected exception occurred in BaseCalcActor._calc_results. graph_key=402439ba636c37f11bea13fe28ad9bfb
Traceback (most recent call last):
File "/opt/conda/lib/python3.7/site-packages/requests/adapters.py", line 479, in send
r = low_conn.getresponse(buffering=True)
TypeError: getresponse() got an unexpected keyword argument 'buffering'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/opt/conda/lib/python3.7/site-packages/requests/adapters.py", line 482, in send
r = low_conn.getresponse()
File "/opt/conda/lib/python3.7/http/client.py", line 1344, in getresponse
response.begin()
File "/opt/conda/lib/python3.7/http/client.py", line 306, in begin
version, status, reason = self._read_status()
File "/opt/conda/lib/python3.7/http/client.py", line 267, in _read_status
line = str(self.fp.readline(_MAXLINE + 1), "iso-8859-1")
File "/opt/conda/lib/python3.7/socket.py", line 589, in readinto
return self._sock.recv_into(b)
socket.timeout: timed out
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/opt/conda/lib/python3.7/site-packages/mars/utils.py", line 383, in _wrapped
return func(*args, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/mars/worker/calc.py", line 200, in _calc_results
chunk_targets, retval=False).result()
File "src/gevent/event.py", line 383, in gevent._gevent_cevent.AsyncResult.result
File "src/gevent/event.py", line 305, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 335, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 323, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 303, in gevent._gevent_cevent.AsyncResult._raise_exception
File "/opt/conda/lib/python3.7/site-packages/gevent/_compat.py", line 65, in reraise
raise value.with_traceback(tb)
File "/opt/conda/lib/python3.7/site-packages/gevent/threadpool.py", line 142, in __run_task
thread_result.set(func(*args, **kwargs))
File "mars/actors/pool/gevent_pool.pyx", line 201, in mars.actors.pool.gevent_pool.GeventThreadPool._wrap_watch.inner
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 698, in execute_graph
res = graph_execution.execute(retval)
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 579, in execute
future.result()
File "/opt/conda/lib/python3.7/concurrent/futures/_base.py", line 435, in result
return self.__get_result()
File "/opt/conda/lib/python3.7/concurrent/futures/_base.py", line 384, in __get_result
raise self._exception
File "/opt/conda/lib/python3.7/concurrent/futures/thread.py", line 57, in run
result = self.fn(*self.args, **self.kwargs)
File "/opt/conda/lib/python3.7/site-packages/mars/utils.py", line 457, in _inner
return func(*args, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 446, in _execute_operand
self.handle_op(first_op, results, self._mock)
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 378, in handle_op
return Executor.handle(*args, **kw)
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 649, in handle
return runner(results, op)
File "/opt/conda/lib/python3.7/site-packages/odps/mars_extension/dataframe/datastore.py", line 359, in execute
cls._execute_arrow_tunnel(ctx, op)
File "/opt/conda/lib/python3.7/site-packages/odps/mars_extension/dataframe/datastore.py", line 349, in _execute_arrow_tunnel
writer.close()
File "/opt/conda/lib/python3.7/site-packages/odps/tunnel/io/writer.py", line 548, in close
self._flush()
File "/opt/conda/lib/python3.7/site-packages/odps/tunnel/io/writer.py", line 545, in _flush
self._request_callback(gen())
File "/opt/conda/lib/python3.7/site-packages/odps/tunnel/tabletunnel.py", line 315, in upload
self._client.put(url, data=data, params=params, headers=headers)
File "/opt/conda/lib/python3.7/site-packages/odps/rest.py", line 154, in put
return self.request(url, 'put', data=data, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/odps/rest.py", line 133, in request
proxies=self._proxy)
File "/opt/conda/lib/python3.7/site-packages/requests/sessions.py", line 643, in send
r = adapter.send(request, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/requests/adapters.py", line 498, in send
raise ConnectionError(err, request=request)
requests.exceptions.ConnectionError: timed out
2021-02-03 13:21:04,749 mars.promise 77 ERROR Exception met in executing promise.
Traceback (most recent call last):
File "/opt/conda/lib/python3.7/site-packages/requests/adapters.py", line 479, in send
r = low_conn.getresponse(buffering=True)
TypeError: getresponse() got an unexpected keyword argument 'buffering'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/opt/conda/lib/python3.7/site-packages/requests/adapters.py", line 482, in send
r = low_conn.getresponse()
File "/opt/conda/lib/python3.7/http/client.py", line 1344, in getresponse
response.begin()
File "/opt/conda/lib/python3.7/http/client.py", line 306, in begin
version, status, reason = self._read_status()
File "/opt/conda/lib/python3.7/http/client.py", line 267, in _read_status
line = str(self.fp.readline(_MAXLINE + 1), "iso-8859-1")
File "/opt/conda/lib/python3.7/socket.py", line 589, in readinto
return self._sock.recv_into(b)
socket.timeout: timed out
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/opt/conda/lib/python3.7/site-packages/mars/promise.py", line 100, in _wrapped
result = func(*args, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/mars/worker/calc.py", line 301, in <lambda>
.then(lambda context_dict: _start_calc(context_dict)) \
File "/opt/conda/lib/python3.7/site-packages/mars/worker/calc.py", line 276, in _start_calc
return self._calc_results(session_id, graph_key, graph, context_dict, chunk_targets)
File "/opt/conda/lib/python3.7/site-packages/mars/utils.py", line 383, in _wrapped
return func(*args, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/mars/worker/calc.py", line 200, in _calc_results
chunk_targets, retval=False).result()
File "src/gevent/event.py", line 383, in gevent._gevent_cevent.AsyncResult.result
File "src/gevent/event.py", line 305, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 335, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 323, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 303, in gevent._gevent_cevent.AsyncResult._raise_exception
File "/opt/conda/lib/python3.7/site-packages/gevent/_compat.py", line 65, in reraise
raise value.with_traceback(tb)
File "/opt/conda/lib/python3.7/site-packages/gevent/threadpool.py", line 142, in __run_task
thread_result.set(func(*args, **kwargs))
File "mars/actors/pool/gevent_pool.pyx", line 201, in mars.actors.pool.gevent_pool.GeventThreadPool._wrap_watch.inner
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 698, in execute_graph
res = graph_execution.execute(retval)
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 579, in execute
future.result()
File "/opt/conda/lib/python3.7/concurrent/futures/_base.py", line 435, in result
return self.__get_result()
File "/opt/conda/lib/python3.7/concurrent/futures/_base.py", line 384, in __get_result
raise self._exception
File "/opt/conda/lib/python3.7/concurrent/futures/thread.py", line 57, in run
result = self.fn(*self.args, **self.kwargs)
File "/opt/conda/lib/python3.7/site-packages/mars/utils.py", line 457, in _inner
return func(*args, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 446, in _execute_operand
self.handle_op(first_op, results, self._mock)
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 378, in handle_op
return Executor.handle(*args, **kw)
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 649, in handle
return runner(results, op)
File "/opt/conda/lib/python3.7/site-packages/odps/mars_extension/dataframe/datastore.py", line 359, in execute
cls._execute_arrow_tunnel(ctx, op)
File "/opt/conda/lib/python3.7/site-packages/odps/mars_extension/dataframe/datastore.py", line 349, in _execute_arrow_tunnel
writer.close()
File "/opt/conda/lib/python3.7/site-packages/odps/tunnel/io/writer.py", line 548, in close
self._flush()
File "/opt/conda/lib/python3.7/site-packages/odps/tunnel/io/writer.py", line 545, in _flush
self._request_callback(gen())
File "/opt/conda/lib/python3.7/site-packages/odps/tunnel/tabletunnel.py", line 315, in upload
self._client.put(url, data=data, params=params, headers=headers)
File "/opt/conda/lib/python3.7/site-packages/odps/rest.py", line 154, in put
return self.request(url, 'put', data=data, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/odps/rest.py", line 133, in request
proxies=self._proxy)
File "/opt/conda/lib/python3.7/site-packages/requests/sessions.py", line 643, in send
r = adapter.send(request, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/requests/adapters.py", line 498, in send
raise ConnectionError(err, request=request)
requests.exceptions.ConnectionError: timed out
2021-02-03 13:21:04,751 mars.promise 77 ERROR Unhandled exception in promise
Traceback (most recent call last):
File "/opt/conda/lib/python3.7/site-packages/requests/adapters.py", line 479, in send
r = low_conn.getresponse(buffering=True)
TypeError: getresponse() got an unexpected keyword argument 'buffering'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/opt/conda/lib/python3.7/site-packages/requests/adapters.py", line 482, in send
r = low_conn.getresponse()
File "/opt/conda/lib/python3.7/http/client.py", line 1344, in getresponse
response.begin()
File "/opt/conda/lib/python3.7/http/client.py", line 306, in begin
version, status, reason = self._read_status()
File "/opt/conda/lib/python3.7/http/client.py", line 267, in _read_status
line = str(self.fp.readline(_MAXLINE + 1), "iso-8859-1")
File "/opt/conda/lib/python3.7/socket.py", line 589, in readinto
return self._sock.recv_into(b)
socket.timeout: timed out
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/opt/conda/lib/python3.7/site-packages/mars/promise.py", line 100, in _wrapped
result = func(*args, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/mars/worker/calc.py", line 301, in <lambda>
.then(lambda context_dict: _start_calc(context_dict)) \
File "/opt/conda/lib/python3.7/site-packages/mars/worker/calc.py", line 276, in _start_calc
return self._calc_results(session_id, graph_key, graph, context_dict, chunk_targets)
File "/opt/conda/lib/python3.7/site-packages/mars/utils.py", line 383, in _wrapped
return func(*args, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/mars/worker/calc.py", line 200, in _calc_results
chunk_targets, retval=False).result()
File "src/gevent/event.py", line 383, in gevent._gevent_cevent.AsyncResult.result
File "src/gevent/event.py", line 305, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 335, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 323, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 303, in gevent._gevent_cevent.AsyncResult._raise_exception
File "/opt/conda/lib/python3.7/site-packages/gevent/_compat.py", line 65, in reraise
raise value.with_traceback(tb)
File "/opt/conda/lib/python3.7/site-packages/gevent/threadpool.py", line 142, in __run_task
thread_result.set(func(*args, **kwargs))
File "mars/actors/pool/gevent_pool.pyx", line 201, in mars.actors.pool.gevent_pool.GeventThreadPool._wrap_watch.inner
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 698, in execute_graph
res = graph_execution.execute(retval)
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 579, in execute
future.result()
File "/opt/conda/lib/python3.7/concurrent/futures/_base.py", line 435, in result
return self.__get_result()
File "/opt/conda/lib/python3.7/concurrent/futures/_base.py", line 384, in __get_result
raise self._exception
File "/opt/conda/lib/python3.7/concurrent/futures/thread.py", line 57, in run
result = self.fn(*self.args, **self.kwargs)
File "/opt/conda/lib/python3.7/site-packages/mars/utils.py", line 457, in _inner
return func(*args, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 446, in _execute_operand
self.handle_op(first_op, results, self._mock)
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 378, in handle_op
return Executor.handle(*args, **kw)
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 649, in handle
return runner(results, op)
File "/opt/conda/lib/python3.7/site-packages/odps/mars_extension/dataframe/datastore.py", line 359, in execute
cls._execute_arrow_tunnel(ctx, op)
File "/opt/conda/lib/python3.7/site-packages/odps/mars_extension/dataframe/datastore.py", line 349, in _execute_arrow_tunnel
writer.close()
File "/opt/conda/lib/python3.7/site-packages/odps/tunnel/io/writer.py", line 548, in close
self._flush()
File "/opt/conda/lib/python3.7/site-packages/odps/tunnel/io/writer.py", line 545, in _flush
self._request_callback(gen())
File "/opt/conda/lib/python3.7/site-packages/odps/tunnel/tabletunnel.py", line 315, in upload
self._client.put(url, data=data, params=params, headers=headers)
File "/opt/conda/lib/python3.7/site-packages/odps/rest.py", line 154, in put
return self.request(url, 'put', data=data, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/odps/rest.py", line 133, in request
proxies=self._proxy)
File "/opt/conda/lib/python3.7/site-packages/requests/sessions.py", line 643, in send
r = adapter.send(request, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/requests/adapters.py", line 498, in send
raise ConnectionError(err, request=request)
requests.exceptions.ConnectionError: timed out
2021-02-03 13:21:04,753 mars.promise 77 ERROR Exception met in executing promise.
Traceback (most recent call last):
File "/opt/conda/lib/python3.7/site-packages/mars/promise.py", line 100, in _wrapped
result = func(*args, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/mars/worker/calc.py", line 302, in <lambda>
.then(lambda keys: _finalize(keys, None), lambda *exc_info: _finalize(None, exc_info))
File "/opt/conda/lib/python3.7/site-packages/mars/worker/calc.py", line 285, in _finalize
self.tell_promise(callback, *exc_info, _accept=False)
File "/opt/conda/lib/python3.7/site-packages/mars/promise.py", line 530, in tell_promise
return self.ctx.actor_ref(uid, address=address).tell(callback_args, wait=wait)
File "mars/actors/core.pyx", line 39, in mars.actors.core.ActorRef.tell
File "mars/actors/core.pyx", line 41, in mars.actors.core.ActorRef.tell
File "mars/actors/pool/gevent_pool.pyx", line 282, in mars.actors.pool.gevent_pool.ActorContext.tell
File "mars/actors/pool/gevent_pool.pyx", line 853, in mars.actors.pool.gevent_pool.Communicator.tell
File "mars/actors/pool/gevent_pool.pyx", line 859, in mars.actors.pool.gevent_pool.Communicator.tell
File "mars/actors/pool/gevent_pool.pyx", line 846, in mars.actors.pool.gevent_pool.Communicator._send
File "mars/actors/pool/gevent_pool.pyx", line 755, in mars.actors.pool.gevent_pool.Communicator._dispatch
File "mars/actors/pool/gevent_pool.pyx", line 819, in mars.actors.pool.gevent_pool.Communicator._send_process
File "mars/actors/pool/messages.pyx", line 570, in mars.actors.pool.messages.pack_tell_message
File "mars/actors/pool/messages.pyx", line 526, in mars.actors.pool.messages._pack_send_message
File "mars/actors/pool/messages.pyx", line 469, in mars.actors.pool.messages._pack_message
File "mars/actors/pool/messages.pyx", line 436, in mars.actors.pool.messages._pack_tuple_message
TypeError: can't pickle generator objects
2021-02-03 13:21:04,754 mars.promise 77 ERROR Unhandled exception in promise
Traceback (most recent call last):
File "/opt/conda/lib/python3.7/site-packages/mars/promise.py", line 100, in _wrapped
result = func(*args, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/mars/worker/calc.py", line 302, in <lambda>
.then(lambda keys: _finalize(keys, None), lambda *exc_info: _finalize(None, exc_info))
File "/opt/conda/lib/python3.7/site-packages/mars/worker/calc.py", line 285, in _finalize
self.tell_promise(callback, *exc_info, _accept=False)
File "/opt/conda/lib/python3.7/site-packages/mars/promise.py", line 530, in tell_promise
return self.ctx.actor_ref(uid, address=address).tell(callback_args, wait=wait)
File "mars/actors/core.pyx", line 39, in mars.actors.core.ActorRef.tell
File "mars/actors/core.pyx", line 41, in mars.actors.core.ActorRef.tell
File "mars/actors/pool/gevent_pool.pyx", line 282, in mars.actors.pool.gevent_pool.ActorContext.tell
File "mars/actors/pool/gevent_pool.pyx", line 853, in mars.actors.pool.gevent_pool.Communicator.tell
File "mars/actors/pool/gevent_pool.pyx", line 859, in mars.actors.pool.gevent_pool.Communicator.tell
File "mars/actors/pool/gevent_pool.pyx", line 846, in mars.actors.pool.gevent_pool.Communicator._send
File "mars/actors/pool/gevent_pool.pyx", line 755, in mars.actors.pool.gevent_pool.Communicator._dispatch
File "mars/actors/pool/gevent_pool.pyx", line 819, in mars.actors.pool.gevent_pool.Communicator._send_process
2021-02-03 13:21:02,172 mars.worker.execution 58 DEBUG Executing states: {'402439ba636c37f11bea13fe28ad9bfb': (278.46709537506104, 'CALCULATING'), '4a57865de1482157ed1f3bdd28b8d030': (75.9635705947876, 'CALCULATING')}
File "mars/actors/pool/messages.pyx", line 570, in mars.actors.pool.messages.pack_tell_message
File "mars/actors/pool/messages.pyx", line 526, in mars.actors.pool.messages._pack_send_message
File "mars/actors/pool/messages.pyx", line 469, in mars.actors.pool.messages._pack_message
File "mars/actors/pool/messages.pyx", line 436, in mars.actors.pool.messages._pack_tuple_message
TypeError: can't pickle generator objects
|
TypeError
|
def tile(cls, op):
ctx = get_context()
range_ = op.range
if isinstance(op.bins, str):
check_chunks_unknown_shape([op.input], TilesError)
if op.input_min is not None:
# check if input min and max are calculated
min_max_chunk_keys = [inp.chunks[0].key for inp in (op.input_min, op.input_max)]
metas = ctx.get_chunk_metas(min_max_chunk_keys)
if any(meta is None for meta in metas):
raise TilesError("`input_min` or `input_max` need be executed first")
range_results = ctx.get_chunk_results(min_max_chunk_keys)
# make sure returned bounds are valid
if all(x.size > 0 for x in range_results):
range_ = tuple(x[0] for x in range_results)
if isinstance(op.bins, TENSOR_TYPE):
# `bins` is a Tensor, needs to be calculated first
bins_chunk_keys = [c.key for c in op.bins.chunks]
metas = ctx.get_chunk_metas(bins_chunk_keys)
if any(meta is None for meta in metas):
raise TilesError("`bins` should be executed first if it's a tensor")
bin_datas = ctx.get_chunk_results(bins_chunk_keys)
bins = np.concatenate(bin_datas)
else:
bins = op.bins
bin_edges, _ = _get_bin_edges(op, op.input, bins, range_, op.weights)
bin_edges = bin_edges._inplace_tile()
return [bin_edges]
|
def tile(cls, op):
ctx = get_context()
range_ = op.range
if isinstance(op.bins, str):
check_chunks_unknown_shape([op.input], TilesError)
if op.input_min is not None:
# check if input min and max are calculated
min_max_chunk_keys = [inp.chunks[0].key for inp in (op.input_min, op.input_max)]
metas = ctx.get_chunk_metas(min_max_chunk_keys)
if any(meta is None for meta in metas):
raise TilesError("`input_min` or `input_max` need be executed first")
range_ = tuple(ctx.get_chunk_results(min_max_chunk_keys))
if isinstance(op.bins, TENSOR_TYPE):
# `bins` is a Tensor, needs to be calculated first
bins_chunk_keys = [c.key for c in op.bins.chunks]
metas = ctx.get_chunk_metas(bins_chunk_keys)
if any(meta is None for meta in metas):
raise TilesError("`bins` should be executed first if it's a tensor")
bin_datas = ctx.get_chunk_results(bins_chunk_keys)
bins = np.concatenate(bin_datas)
else:
bins = op.bins
bin_edges, _ = _get_bin_edges(op, op.input, bins, range_, op.weights)
bin_edges = bin_edges._inplace_tile()
return [bin_edges]
|
https://github.com/mars-project/mars/issues/1959
|
Traceback (most recent call last):
File "/opt/conda/lib/python3.7/site-packages/requests/adapters.py", line 479, in send
r = low_conn.getresponse(buffering=True)
TypeError: getresponse() got an unexpected keyword argument 'buffering'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/opt/conda/lib/python3.7/site-packages/requests/adapters.py", line 482, in send
r = low_conn.getresponse()
File "/opt/conda/lib/python3.7/http/client.py", line 1344, in getresponse
response.begin()
File "/opt/conda/lib/python3.7/http/client.py", line 306, in begin
version, status, reason = self._read_status()
File "/opt/conda/lib/python3.7/http/client.py", line 267, in _read_status
line = str(self.fp.readline(_MAXLINE + 1), "iso-8859-1")
File "/opt/conda/lib/python3.7/socket.py", line 589, in readinto
return self._sock.recv_into(b)
socket.timeout: timed out
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/opt/conda/lib/python3.7/site-packages/gevent/threadpool.py", line 142, in __run_task
thread_result.set(func(*args, **kwargs))
File "mars/actors/pool/gevent_pool.pyx", line 201, in mars.actors.pool.gevent_pool.GeventThreadPool._wrap_watch.inner
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 698, in execute_graph
res = graph_execution.execute(retval)
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 579, in execute
future.result()
File "/opt/conda/lib/python3.7/concurrent/futures/_base.py", line 435, in result
return self.__get_result()
File "/opt/conda/lib/python3.7/concurrent/futures/_base.py", line 384, in __get_result
raise self._exception
File "/opt/conda/lib/python3.7/concurrent/futures/thread.py", line 57, in run
result = self.fn(*self.args, **self.kwargs)
File "/opt/conda/lib/python3.7/site-packages/mars/utils.py", line 457, in _inner
return func(*args, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 446, in _execute_operand
self.handle_op(first_op, results, self._mock)
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 378, in handle_op
return Executor.handle(*args, **kw)
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 649, in handle
return runner(results, op)
File "/opt/conda/lib/python3.7/site-packages/odps/mars_extension/dataframe/datastore.py", line 359, in execute
cls._execute_arrow_tunnel(ctx, op)
File "/opt/conda/lib/python3.7/site-packages/odps/mars_extension/dataframe/datastore.py", line 349, in _execute_arrow_tunnel
writer.close()
File "/opt/conda/lib/python3.7/site-packages/odps/tunnel/io/writer.py", line 548, in close
self._flush()
File "/opt/conda/lib/python3.7/site-packages/odps/tunnel/io/writer.py", line 545, in _flush
self._request_callback(gen())
File "/opt/conda/lib/python3.7/site-packages/odps/tunnel/tabletunnel.py", line 315, in upload
self._client.put(url, data=data, params=params, headers=headers)
File "/opt/conda/lib/python3.7/site-packages/odps/rest.py", line 154, in put
return self.request(url, 'put', data=data, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/odps/rest.py", line 133, in request
proxies=self._proxy)
File "/opt/conda/lib/python3.7/site-packages/requests/sessions.py", line 643, in send
r = adapter.send(request, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/requests/adapters.py", line 498, in send
raise ConnectionError(err, request=request)
requests.exceptions.ConnectionError: timed out
2021-02-03T05:21:04Z (<ThreadPoolWorker at 0x7f14c2e8e7e0 thread_ident=0x7f14cfe3f700 threadpool-hub=<Hub at 0x7f14c338d1d0 thread_ident=0x7f14f8070740>>, <cyfunction GeventThreadPool._wrap_watch.<locals>.inner at 0x7f14c2ec37a0>) failed with ConnectionError
2021-02-03 13:21:04,748 mars.worker.calc 77 ERROR Unexpected exception occurred in BaseCalcActor._calc_results. graph_key=402439ba636c37f11bea13fe28ad9bfb
Traceback (most recent call last):
File "/opt/conda/lib/python3.7/site-packages/requests/adapters.py", line 479, in send
r = low_conn.getresponse(buffering=True)
TypeError: getresponse() got an unexpected keyword argument 'buffering'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/opt/conda/lib/python3.7/site-packages/requests/adapters.py", line 482, in send
r = low_conn.getresponse()
File "/opt/conda/lib/python3.7/http/client.py", line 1344, in getresponse
response.begin()
File "/opt/conda/lib/python3.7/http/client.py", line 306, in begin
version, status, reason = self._read_status()
File "/opt/conda/lib/python3.7/http/client.py", line 267, in _read_status
line = str(self.fp.readline(_MAXLINE + 1), "iso-8859-1")
File "/opt/conda/lib/python3.7/socket.py", line 589, in readinto
return self._sock.recv_into(b)
socket.timeout: timed out
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/opt/conda/lib/python3.7/site-packages/mars/utils.py", line 383, in _wrapped
return func(*args, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/mars/worker/calc.py", line 200, in _calc_results
chunk_targets, retval=False).result()
File "src/gevent/event.py", line 383, in gevent._gevent_cevent.AsyncResult.result
File "src/gevent/event.py", line 305, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 335, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 323, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 303, in gevent._gevent_cevent.AsyncResult._raise_exception
File "/opt/conda/lib/python3.7/site-packages/gevent/_compat.py", line 65, in reraise
raise value.with_traceback(tb)
File "/opt/conda/lib/python3.7/site-packages/gevent/threadpool.py", line 142, in __run_task
thread_result.set(func(*args, **kwargs))
File "mars/actors/pool/gevent_pool.pyx", line 201, in mars.actors.pool.gevent_pool.GeventThreadPool._wrap_watch.inner
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 698, in execute_graph
res = graph_execution.execute(retval)
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 579, in execute
future.result()
File "/opt/conda/lib/python3.7/concurrent/futures/_base.py", line 435, in result
return self.__get_result()
File "/opt/conda/lib/python3.7/concurrent/futures/_base.py", line 384, in __get_result
raise self._exception
File "/opt/conda/lib/python3.7/concurrent/futures/thread.py", line 57, in run
result = self.fn(*self.args, **self.kwargs)
File "/opt/conda/lib/python3.7/site-packages/mars/utils.py", line 457, in _inner
return func(*args, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 446, in _execute_operand
self.handle_op(first_op, results, self._mock)
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 378, in handle_op
return Executor.handle(*args, **kw)
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 649, in handle
return runner(results, op)
File "/opt/conda/lib/python3.7/site-packages/odps/mars_extension/dataframe/datastore.py", line 359, in execute
cls._execute_arrow_tunnel(ctx, op)
File "/opt/conda/lib/python3.7/site-packages/odps/mars_extension/dataframe/datastore.py", line 349, in _execute_arrow_tunnel
writer.close()
File "/opt/conda/lib/python3.7/site-packages/odps/tunnel/io/writer.py", line 548, in close
self._flush()
File "/opt/conda/lib/python3.7/site-packages/odps/tunnel/io/writer.py", line 545, in _flush
self._request_callback(gen())
File "/opt/conda/lib/python3.7/site-packages/odps/tunnel/tabletunnel.py", line 315, in upload
self._client.put(url, data=data, params=params, headers=headers)
File "/opt/conda/lib/python3.7/site-packages/odps/rest.py", line 154, in put
return self.request(url, 'put', data=data, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/odps/rest.py", line 133, in request
proxies=self._proxy)
File "/opt/conda/lib/python3.7/site-packages/requests/sessions.py", line 643, in send
r = adapter.send(request, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/requests/adapters.py", line 498, in send
raise ConnectionError(err, request=request)
requests.exceptions.ConnectionError: timed out
2021-02-03 13:21:04,749 mars.promise 77 ERROR Exception met in executing promise.
Traceback (most recent call last):
File "/opt/conda/lib/python3.7/site-packages/requests/adapters.py", line 479, in send
r = low_conn.getresponse(buffering=True)
TypeError: getresponse() got an unexpected keyword argument 'buffering'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/opt/conda/lib/python3.7/site-packages/requests/adapters.py", line 482, in send
r = low_conn.getresponse()
File "/opt/conda/lib/python3.7/http/client.py", line 1344, in getresponse
response.begin()
File "/opt/conda/lib/python3.7/http/client.py", line 306, in begin
version, status, reason = self._read_status()
File "/opt/conda/lib/python3.7/http/client.py", line 267, in _read_status
line = str(self.fp.readline(_MAXLINE + 1), "iso-8859-1")
File "/opt/conda/lib/python3.7/socket.py", line 589, in readinto
return self._sock.recv_into(b)
socket.timeout: timed out
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/opt/conda/lib/python3.7/site-packages/mars/promise.py", line 100, in _wrapped
result = func(*args, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/mars/worker/calc.py", line 301, in <lambda>
.then(lambda context_dict: _start_calc(context_dict)) \
File "/opt/conda/lib/python3.7/site-packages/mars/worker/calc.py", line 276, in _start_calc
return self._calc_results(session_id, graph_key, graph, context_dict, chunk_targets)
File "/opt/conda/lib/python3.7/site-packages/mars/utils.py", line 383, in _wrapped
return func(*args, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/mars/worker/calc.py", line 200, in _calc_results
chunk_targets, retval=False).result()
File "src/gevent/event.py", line 383, in gevent._gevent_cevent.AsyncResult.result
File "src/gevent/event.py", line 305, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 335, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 323, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 303, in gevent._gevent_cevent.AsyncResult._raise_exception
File "/opt/conda/lib/python3.7/site-packages/gevent/_compat.py", line 65, in reraise
raise value.with_traceback(tb)
File "/opt/conda/lib/python3.7/site-packages/gevent/threadpool.py", line 142, in __run_task
thread_result.set(func(*args, **kwargs))
File "mars/actors/pool/gevent_pool.pyx", line 201, in mars.actors.pool.gevent_pool.GeventThreadPool._wrap_watch.inner
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 698, in execute_graph
res = graph_execution.execute(retval)
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 579, in execute
future.result()
File "/opt/conda/lib/python3.7/concurrent/futures/_base.py", line 435, in result
return self.__get_result()
File "/opt/conda/lib/python3.7/concurrent/futures/_base.py", line 384, in __get_result
raise self._exception
File "/opt/conda/lib/python3.7/concurrent/futures/thread.py", line 57, in run
result = self.fn(*self.args, **self.kwargs)
File "/opt/conda/lib/python3.7/site-packages/mars/utils.py", line 457, in _inner
return func(*args, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 446, in _execute_operand
self.handle_op(first_op, results, self._mock)
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 378, in handle_op
return Executor.handle(*args, **kw)
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 649, in handle
return runner(results, op)
File "/opt/conda/lib/python3.7/site-packages/odps/mars_extension/dataframe/datastore.py", line 359, in execute
cls._execute_arrow_tunnel(ctx, op)
File "/opt/conda/lib/python3.7/site-packages/odps/mars_extension/dataframe/datastore.py", line 349, in _execute_arrow_tunnel
writer.close()
File "/opt/conda/lib/python3.7/site-packages/odps/tunnel/io/writer.py", line 548, in close
self._flush()
File "/opt/conda/lib/python3.7/site-packages/odps/tunnel/io/writer.py", line 545, in _flush
self._request_callback(gen())
File "/opt/conda/lib/python3.7/site-packages/odps/tunnel/tabletunnel.py", line 315, in upload
self._client.put(url, data=data, params=params, headers=headers)
File "/opt/conda/lib/python3.7/site-packages/odps/rest.py", line 154, in put
return self.request(url, 'put', data=data, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/odps/rest.py", line 133, in request
proxies=self._proxy)
File "/opt/conda/lib/python3.7/site-packages/requests/sessions.py", line 643, in send
r = adapter.send(request, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/requests/adapters.py", line 498, in send
raise ConnectionError(err, request=request)
requests.exceptions.ConnectionError: timed out
2021-02-03 13:21:04,751 mars.promise 77 ERROR Unhandled exception in promise
Traceback (most recent call last):
File "/opt/conda/lib/python3.7/site-packages/requests/adapters.py", line 479, in send
r = low_conn.getresponse(buffering=True)
TypeError: getresponse() got an unexpected keyword argument 'buffering'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/opt/conda/lib/python3.7/site-packages/requests/adapters.py", line 482, in send
r = low_conn.getresponse()
File "/opt/conda/lib/python3.7/http/client.py", line 1344, in getresponse
response.begin()
File "/opt/conda/lib/python3.7/http/client.py", line 306, in begin
version, status, reason = self._read_status()
File "/opt/conda/lib/python3.7/http/client.py", line 267, in _read_status
line = str(self.fp.readline(_MAXLINE + 1), "iso-8859-1")
File "/opt/conda/lib/python3.7/socket.py", line 589, in readinto
return self._sock.recv_into(b)
socket.timeout: timed out
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/opt/conda/lib/python3.7/site-packages/mars/promise.py", line 100, in _wrapped
result = func(*args, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/mars/worker/calc.py", line 301, in <lambda>
.then(lambda context_dict: _start_calc(context_dict)) \
File "/opt/conda/lib/python3.7/site-packages/mars/worker/calc.py", line 276, in _start_calc
return self._calc_results(session_id, graph_key, graph, context_dict, chunk_targets)
File "/opt/conda/lib/python3.7/site-packages/mars/utils.py", line 383, in _wrapped
return func(*args, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/mars/worker/calc.py", line 200, in _calc_results
chunk_targets, retval=False).result()
File "src/gevent/event.py", line 383, in gevent._gevent_cevent.AsyncResult.result
File "src/gevent/event.py", line 305, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 335, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 323, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 303, in gevent._gevent_cevent.AsyncResult._raise_exception
File "/opt/conda/lib/python3.7/site-packages/gevent/_compat.py", line 65, in reraise
raise value.with_traceback(tb)
File "/opt/conda/lib/python3.7/site-packages/gevent/threadpool.py", line 142, in __run_task
thread_result.set(func(*args, **kwargs))
File "mars/actors/pool/gevent_pool.pyx", line 201, in mars.actors.pool.gevent_pool.GeventThreadPool._wrap_watch.inner
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 698, in execute_graph
res = graph_execution.execute(retval)
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 579, in execute
future.result()
File "/opt/conda/lib/python3.7/concurrent/futures/_base.py", line 435, in result
return self.__get_result()
File "/opt/conda/lib/python3.7/concurrent/futures/_base.py", line 384, in __get_result
raise self._exception
File "/opt/conda/lib/python3.7/concurrent/futures/thread.py", line 57, in run
result = self.fn(*self.args, **self.kwargs)
File "/opt/conda/lib/python3.7/site-packages/mars/utils.py", line 457, in _inner
return func(*args, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 446, in _execute_operand
self.handle_op(first_op, results, self._mock)
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 378, in handle_op
return Executor.handle(*args, **kw)
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 649, in handle
return runner(results, op)
File "/opt/conda/lib/python3.7/site-packages/odps/mars_extension/dataframe/datastore.py", line 359, in execute
cls._execute_arrow_tunnel(ctx, op)
File "/opt/conda/lib/python3.7/site-packages/odps/mars_extension/dataframe/datastore.py", line 349, in _execute_arrow_tunnel
writer.close()
File "/opt/conda/lib/python3.7/site-packages/odps/tunnel/io/writer.py", line 548, in close
self._flush()
File "/opt/conda/lib/python3.7/site-packages/odps/tunnel/io/writer.py", line 545, in _flush
self._request_callback(gen())
File "/opt/conda/lib/python3.7/site-packages/odps/tunnel/tabletunnel.py", line 315, in upload
self._client.put(url, data=data, params=params, headers=headers)
File "/opt/conda/lib/python3.7/site-packages/odps/rest.py", line 154, in put
return self.request(url, 'put', data=data, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/odps/rest.py", line 133, in request
proxies=self._proxy)
File "/opt/conda/lib/python3.7/site-packages/requests/sessions.py", line 643, in send
r = adapter.send(request, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/requests/adapters.py", line 498, in send
raise ConnectionError(err, request=request)
requests.exceptions.ConnectionError: timed out
2021-02-03 13:21:04,753 mars.promise 77 ERROR Exception met in executing promise.
Traceback (most recent call last):
File "/opt/conda/lib/python3.7/site-packages/mars/promise.py", line 100, in _wrapped
result = func(*args, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/mars/worker/calc.py", line 302, in <lambda>
.then(lambda keys: _finalize(keys, None), lambda *exc_info: _finalize(None, exc_info))
File "/opt/conda/lib/python3.7/site-packages/mars/worker/calc.py", line 285, in _finalize
self.tell_promise(callback, *exc_info, _accept=False)
File "/opt/conda/lib/python3.7/site-packages/mars/promise.py", line 530, in tell_promise
return self.ctx.actor_ref(uid, address=address).tell(callback_args, wait=wait)
File "mars/actors/core.pyx", line 39, in mars.actors.core.ActorRef.tell
File "mars/actors/core.pyx", line 41, in mars.actors.core.ActorRef.tell
File "mars/actors/pool/gevent_pool.pyx", line 282, in mars.actors.pool.gevent_pool.ActorContext.tell
File "mars/actors/pool/gevent_pool.pyx", line 853, in mars.actors.pool.gevent_pool.Communicator.tell
File "mars/actors/pool/gevent_pool.pyx", line 859, in mars.actors.pool.gevent_pool.Communicator.tell
File "mars/actors/pool/gevent_pool.pyx", line 846, in mars.actors.pool.gevent_pool.Communicator._send
File "mars/actors/pool/gevent_pool.pyx", line 755, in mars.actors.pool.gevent_pool.Communicator._dispatch
File "mars/actors/pool/gevent_pool.pyx", line 819, in mars.actors.pool.gevent_pool.Communicator._send_process
File "mars/actors/pool/messages.pyx", line 570, in mars.actors.pool.messages.pack_tell_message
File "mars/actors/pool/messages.pyx", line 526, in mars.actors.pool.messages._pack_send_message
File "mars/actors/pool/messages.pyx", line 469, in mars.actors.pool.messages._pack_message
File "mars/actors/pool/messages.pyx", line 436, in mars.actors.pool.messages._pack_tuple_message
TypeError: can't pickle generator objects
2021-02-03 13:21:04,754 mars.promise 77 ERROR Unhandled exception in promise
Traceback (most recent call last):
File "/opt/conda/lib/python3.7/site-packages/mars/promise.py", line 100, in _wrapped
result = func(*args, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/mars/worker/calc.py", line 302, in <lambda>
.then(lambda keys: _finalize(keys, None), lambda *exc_info: _finalize(None, exc_info))
File "/opt/conda/lib/python3.7/site-packages/mars/worker/calc.py", line 285, in _finalize
self.tell_promise(callback, *exc_info, _accept=False)
File "/opt/conda/lib/python3.7/site-packages/mars/promise.py", line 530, in tell_promise
return self.ctx.actor_ref(uid, address=address).tell(callback_args, wait=wait)
File "mars/actors/core.pyx", line 39, in mars.actors.core.ActorRef.tell
File "mars/actors/core.pyx", line 41, in mars.actors.core.ActorRef.tell
File "mars/actors/pool/gevent_pool.pyx", line 282, in mars.actors.pool.gevent_pool.ActorContext.tell
File "mars/actors/pool/gevent_pool.pyx", line 853, in mars.actors.pool.gevent_pool.Communicator.tell
File "mars/actors/pool/gevent_pool.pyx", line 859, in mars.actors.pool.gevent_pool.Communicator.tell
File "mars/actors/pool/gevent_pool.pyx", line 846, in mars.actors.pool.gevent_pool.Communicator._send
File "mars/actors/pool/gevent_pool.pyx", line 755, in mars.actors.pool.gevent_pool.Communicator._dispatch
File "mars/actors/pool/gevent_pool.pyx", line 819, in mars.actors.pool.gevent_pool.Communicator._send_process
2021-02-03 13:21:02,172 mars.worker.execution 58 DEBUG Executing states: {'402439ba636c37f11bea13fe28ad9bfb': (278.46709537506104, 'CALCULATING'), '4a57865de1482157ed1f3bdd28b8d030': (75.9635705947876, 'CALCULATING')}
File "mars/actors/pool/messages.pyx", line 570, in mars.actors.pool.messages.pack_tell_message
File "mars/actors/pool/messages.pyx", line 526, in mars.actors.pool.messages._pack_send_message
File "mars/actors/pool/messages.pyx", line 469, in mars.actors.pool.messages._pack_message
File "mars/actors/pool/messages.pyx", line 436, in mars.actors.pool.messages._pack_tuple_message
TypeError: can't pickle generator objects
|
TypeError
|
def calc(self, session_id, graph_key, ser_graph, chunk_targets, callback):
"""
Do actual calculation. This method should be called when all data
is available (i.e., either in shared cache or in memory)
:param session_id: session id
:param graph_key: key of executable graph
:param ser_graph: serialized executable graph
:param chunk_targets: keys of target chunks
:param callback: promise callback, returns the uid of InProcessCacheActor
"""
self._executing_set.add(graph_key)
graph = deserialize_graph(ser_graph)
chunk_targets = set(chunk_targets)
keys_to_fetch = self._get_keys_to_fetch(graph)
self._make_quotas_local(
session_id, graph_key, keys_to_fetch + list(chunk_targets), process_quota=True
)
def _start_calc(context_dict):
return self._calc_results(
session_id, graph_key, graph, context_dict, chunk_targets
)
def _finalize(keys, exc_info):
if not self._marked_as_destroy:
self._dispatch_ref.register_free_slot(
self.uid, self._slot_name, _tell=True, _wait=False
)
if not exc_info:
self.tell_promise(callback, keys)
else:
try:
self.tell_promise(callback, *exc_info, _accept=False)
except:
self.tell_promise(
callback,
*build_exc_info(
SystemError,
f"Failed to send errors to scheduler, type: {exc_info[0].__name__}, "
f"message: {str(exc_info[1])}",
),
_accept=False,
)
raise
keys_to_release = [
k for k in keys_to_fetch if get_chunk_key(k) not in chunk_targets
]
if exc_info:
keys_to_release.extend(chunk_targets)
if self._remove_intermediate:
keys_to_delete = keys_to_release
else:
keys_to_delete = []
if keys_to_delete:
self.storage_client.delete(
session_id, keys_to_delete, [self._calc_intermediate_device]
)
logger.debug("Finish calculating operand %s.", graph_key)
return (
self._fetch_keys_to_process(session_id, keys_to_fetch)
.then(lambda context_dict: _start_calc(context_dict))
.then(
lambda keys: _finalize(keys, None),
lambda *exc_info: _finalize(None, exc_info),
)
)
|
def calc(self, session_id, graph_key, ser_graph, chunk_targets, callback):
"""
Do actual calculation. This method should be called when all data
is available (i.e., either in shared cache or in memory)
:param session_id: session id
:param graph_key: key of executable graph
:param ser_graph: serialized executable graph
:param chunk_targets: keys of target chunks
:param callback: promise callback, returns the uid of InProcessCacheActor
"""
self._executing_set.add(graph_key)
graph = deserialize_graph(ser_graph)
chunk_targets = set(chunk_targets)
keys_to_fetch = self._get_keys_to_fetch(graph)
self._make_quotas_local(
session_id, graph_key, keys_to_fetch + list(chunk_targets), process_quota=True
)
def _start_calc(context_dict):
return self._calc_results(
session_id, graph_key, graph, context_dict, chunk_targets
)
def _finalize(keys, exc_info):
if not self._marked_as_destroy:
self._dispatch_ref.register_free_slot(
self.uid, self._slot_name, _tell=True, _wait=False
)
if not exc_info:
self.tell_promise(callback, keys)
else:
self.tell_promise(callback, *exc_info, _accept=False)
keys_to_release = [
k for k in keys_to_fetch if get_chunk_key(k) not in chunk_targets
]
if exc_info:
keys_to_release.extend(chunk_targets)
if self._remove_intermediate:
keys_to_delete = keys_to_release
else:
keys_to_delete = []
if keys_to_delete:
self.storage_client.delete(
session_id, keys_to_delete, [self._calc_intermediate_device]
)
logger.debug("Finish calculating operand %s.", graph_key)
return (
self._fetch_keys_to_process(session_id, keys_to_fetch)
.then(lambda context_dict: _start_calc(context_dict))
.then(
lambda keys: _finalize(keys, None),
lambda *exc_info: _finalize(None, exc_info),
)
)
|
https://github.com/mars-project/mars/issues/1959
|
Traceback (most recent call last):
File "/opt/conda/lib/python3.7/site-packages/requests/adapters.py", line 479, in send
r = low_conn.getresponse(buffering=True)
TypeError: getresponse() got an unexpected keyword argument 'buffering'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/opt/conda/lib/python3.7/site-packages/requests/adapters.py", line 482, in send
r = low_conn.getresponse()
File "/opt/conda/lib/python3.7/http/client.py", line 1344, in getresponse
response.begin()
File "/opt/conda/lib/python3.7/http/client.py", line 306, in begin
version, status, reason = self._read_status()
File "/opt/conda/lib/python3.7/http/client.py", line 267, in _read_status
line = str(self.fp.readline(_MAXLINE + 1), "iso-8859-1")
File "/opt/conda/lib/python3.7/socket.py", line 589, in readinto
return self._sock.recv_into(b)
socket.timeout: timed out
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/opt/conda/lib/python3.7/site-packages/gevent/threadpool.py", line 142, in __run_task
thread_result.set(func(*args, **kwargs))
File "mars/actors/pool/gevent_pool.pyx", line 201, in mars.actors.pool.gevent_pool.GeventThreadPool._wrap_watch.inner
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 698, in execute_graph
res = graph_execution.execute(retval)
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 579, in execute
future.result()
File "/opt/conda/lib/python3.7/concurrent/futures/_base.py", line 435, in result
return self.__get_result()
File "/opt/conda/lib/python3.7/concurrent/futures/_base.py", line 384, in __get_result
raise self._exception
File "/opt/conda/lib/python3.7/concurrent/futures/thread.py", line 57, in run
result = self.fn(*self.args, **self.kwargs)
File "/opt/conda/lib/python3.7/site-packages/mars/utils.py", line 457, in _inner
return func(*args, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 446, in _execute_operand
self.handle_op(first_op, results, self._mock)
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 378, in handle_op
return Executor.handle(*args, **kw)
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 649, in handle
return runner(results, op)
File "/opt/conda/lib/python3.7/site-packages/odps/mars_extension/dataframe/datastore.py", line 359, in execute
cls._execute_arrow_tunnel(ctx, op)
File "/opt/conda/lib/python3.7/site-packages/odps/mars_extension/dataframe/datastore.py", line 349, in _execute_arrow_tunnel
writer.close()
File "/opt/conda/lib/python3.7/site-packages/odps/tunnel/io/writer.py", line 548, in close
self._flush()
File "/opt/conda/lib/python3.7/site-packages/odps/tunnel/io/writer.py", line 545, in _flush
self._request_callback(gen())
File "/opt/conda/lib/python3.7/site-packages/odps/tunnel/tabletunnel.py", line 315, in upload
self._client.put(url, data=data, params=params, headers=headers)
File "/opt/conda/lib/python3.7/site-packages/odps/rest.py", line 154, in put
return self.request(url, 'put', data=data, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/odps/rest.py", line 133, in request
proxies=self._proxy)
File "/opt/conda/lib/python3.7/site-packages/requests/sessions.py", line 643, in send
r = adapter.send(request, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/requests/adapters.py", line 498, in send
raise ConnectionError(err, request=request)
requests.exceptions.ConnectionError: timed out
2021-02-03T05:21:04Z (<ThreadPoolWorker at 0x7f14c2e8e7e0 thread_ident=0x7f14cfe3f700 threadpool-hub=<Hub at 0x7f14c338d1d0 thread_ident=0x7f14f8070740>>, <cyfunction GeventThreadPool._wrap_watch.<locals>.inner at 0x7f14c2ec37a0>) failed with ConnectionError
2021-02-03 13:21:04,748 mars.worker.calc 77 ERROR Unexpected exception occurred in BaseCalcActor._calc_results. graph_key=402439ba636c37f11bea13fe28ad9bfb
Traceback (most recent call last):
File "/opt/conda/lib/python3.7/site-packages/requests/adapters.py", line 479, in send
r = low_conn.getresponse(buffering=True)
TypeError: getresponse() got an unexpected keyword argument 'buffering'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/opt/conda/lib/python3.7/site-packages/requests/adapters.py", line 482, in send
r = low_conn.getresponse()
File "/opt/conda/lib/python3.7/http/client.py", line 1344, in getresponse
response.begin()
File "/opt/conda/lib/python3.7/http/client.py", line 306, in begin
version, status, reason = self._read_status()
File "/opt/conda/lib/python3.7/http/client.py", line 267, in _read_status
line = str(self.fp.readline(_MAXLINE + 1), "iso-8859-1")
File "/opt/conda/lib/python3.7/socket.py", line 589, in readinto
return self._sock.recv_into(b)
socket.timeout: timed out
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/opt/conda/lib/python3.7/site-packages/mars/utils.py", line 383, in _wrapped
return func(*args, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/mars/worker/calc.py", line 200, in _calc_results
chunk_targets, retval=False).result()
File "src/gevent/event.py", line 383, in gevent._gevent_cevent.AsyncResult.result
File "src/gevent/event.py", line 305, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 335, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 323, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 303, in gevent._gevent_cevent.AsyncResult._raise_exception
File "/opt/conda/lib/python3.7/site-packages/gevent/_compat.py", line 65, in reraise
raise value.with_traceback(tb)
File "/opt/conda/lib/python3.7/site-packages/gevent/threadpool.py", line 142, in __run_task
thread_result.set(func(*args, **kwargs))
File "mars/actors/pool/gevent_pool.pyx", line 201, in mars.actors.pool.gevent_pool.GeventThreadPool._wrap_watch.inner
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 698, in execute_graph
res = graph_execution.execute(retval)
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 579, in execute
future.result()
File "/opt/conda/lib/python3.7/concurrent/futures/_base.py", line 435, in result
return self.__get_result()
File "/opt/conda/lib/python3.7/concurrent/futures/_base.py", line 384, in __get_result
raise self._exception
File "/opt/conda/lib/python3.7/concurrent/futures/thread.py", line 57, in run
result = self.fn(*self.args, **self.kwargs)
File "/opt/conda/lib/python3.7/site-packages/mars/utils.py", line 457, in _inner
return func(*args, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 446, in _execute_operand
self.handle_op(first_op, results, self._mock)
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 378, in handle_op
return Executor.handle(*args, **kw)
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 649, in handle
return runner(results, op)
File "/opt/conda/lib/python3.7/site-packages/odps/mars_extension/dataframe/datastore.py", line 359, in execute
cls._execute_arrow_tunnel(ctx, op)
File "/opt/conda/lib/python3.7/site-packages/odps/mars_extension/dataframe/datastore.py", line 349, in _execute_arrow_tunnel
writer.close()
File "/opt/conda/lib/python3.7/site-packages/odps/tunnel/io/writer.py", line 548, in close
self._flush()
File "/opt/conda/lib/python3.7/site-packages/odps/tunnel/io/writer.py", line 545, in _flush
self._request_callback(gen())
File "/opt/conda/lib/python3.7/site-packages/odps/tunnel/tabletunnel.py", line 315, in upload
self._client.put(url, data=data, params=params, headers=headers)
File "/opt/conda/lib/python3.7/site-packages/odps/rest.py", line 154, in put
return self.request(url, 'put', data=data, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/odps/rest.py", line 133, in request
proxies=self._proxy)
File "/opt/conda/lib/python3.7/site-packages/requests/sessions.py", line 643, in send
r = adapter.send(request, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/requests/adapters.py", line 498, in send
raise ConnectionError(err, request=request)
requests.exceptions.ConnectionError: timed out
2021-02-03 13:21:04,749 mars.promise 77 ERROR Exception met in executing promise.
Traceback (most recent call last):
File "/opt/conda/lib/python3.7/site-packages/requests/adapters.py", line 479, in send
r = low_conn.getresponse(buffering=True)
TypeError: getresponse() got an unexpected keyword argument 'buffering'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/opt/conda/lib/python3.7/site-packages/requests/adapters.py", line 482, in send
r = low_conn.getresponse()
File "/opt/conda/lib/python3.7/http/client.py", line 1344, in getresponse
response.begin()
File "/opt/conda/lib/python3.7/http/client.py", line 306, in begin
version, status, reason = self._read_status()
File "/opt/conda/lib/python3.7/http/client.py", line 267, in _read_status
line = str(self.fp.readline(_MAXLINE + 1), "iso-8859-1")
File "/opt/conda/lib/python3.7/socket.py", line 589, in readinto
return self._sock.recv_into(b)
socket.timeout: timed out
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/opt/conda/lib/python3.7/site-packages/mars/promise.py", line 100, in _wrapped
result = func(*args, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/mars/worker/calc.py", line 301, in <lambda>
.then(lambda context_dict: _start_calc(context_dict)) \
File "/opt/conda/lib/python3.7/site-packages/mars/worker/calc.py", line 276, in _start_calc
return self._calc_results(session_id, graph_key, graph, context_dict, chunk_targets)
File "/opt/conda/lib/python3.7/site-packages/mars/utils.py", line 383, in _wrapped
return func(*args, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/mars/worker/calc.py", line 200, in _calc_results
chunk_targets, retval=False).result()
File "src/gevent/event.py", line 383, in gevent._gevent_cevent.AsyncResult.result
File "src/gevent/event.py", line 305, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 335, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 323, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 303, in gevent._gevent_cevent.AsyncResult._raise_exception
File "/opt/conda/lib/python3.7/site-packages/gevent/_compat.py", line 65, in reraise
raise value.with_traceback(tb)
File "/opt/conda/lib/python3.7/site-packages/gevent/threadpool.py", line 142, in __run_task
thread_result.set(func(*args, **kwargs))
File "mars/actors/pool/gevent_pool.pyx", line 201, in mars.actors.pool.gevent_pool.GeventThreadPool._wrap_watch.inner
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 698, in execute_graph
res = graph_execution.execute(retval)
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 579, in execute
future.result()
File "/opt/conda/lib/python3.7/concurrent/futures/_base.py", line 435, in result
return self.__get_result()
File "/opt/conda/lib/python3.7/concurrent/futures/_base.py", line 384, in __get_result
raise self._exception
File "/opt/conda/lib/python3.7/concurrent/futures/thread.py", line 57, in run
result = self.fn(*self.args, **self.kwargs)
File "/opt/conda/lib/python3.7/site-packages/mars/utils.py", line 457, in _inner
return func(*args, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 446, in _execute_operand
self.handle_op(first_op, results, self._mock)
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 378, in handle_op
return Executor.handle(*args, **kw)
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 649, in handle
return runner(results, op)
File "/opt/conda/lib/python3.7/site-packages/odps/mars_extension/dataframe/datastore.py", line 359, in execute
cls._execute_arrow_tunnel(ctx, op)
File "/opt/conda/lib/python3.7/site-packages/odps/mars_extension/dataframe/datastore.py", line 349, in _execute_arrow_tunnel
writer.close()
File "/opt/conda/lib/python3.7/site-packages/odps/tunnel/io/writer.py", line 548, in close
self._flush()
File "/opt/conda/lib/python3.7/site-packages/odps/tunnel/io/writer.py", line 545, in _flush
self._request_callback(gen())
File "/opt/conda/lib/python3.7/site-packages/odps/tunnel/tabletunnel.py", line 315, in upload
self._client.put(url, data=data, params=params, headers=headers)
File "/opt/conda/lib/python3.7/site-packages/odps/rest.py", line 154, in put
return self.request(url, 'put', data=data, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/odps/rest.py", line 133, in request
proxies=self._proxy)
File "/opt/conda/lib/python3.7/site-packages/requests/sessions.py", line 643, in send
r = adapter.send(request, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/requests/adapters.py", line 498, in send
raise ConnectionError(err, request=request)
requests.exceptions.ConnectionError: timed out
2021-02-03 13:21:04,751 mars.promise 77 ERROR Unhandled exception in promise
Traceback (most recent call last):
File "/opt/conda/lib/python3.7/site-packages/requests/adapters.py", line 479, in send
r = low_conn.getresponse(buffering=True)
TypeError: getresponse() got an unexpected keyword argument 'buffering'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/opt/conda/lib/python3.7/site-packages/requests/adapters.py", line 482, in send
r = low_conn.getresponse()
File "/opt/conda/lib/python3.7/http/client.py", line 1344, in getresponse
response.begin()
File "/opt/conda/lib/python3.7/http/client.py", line 306, in begin
version, status, reason = self._read_status()
File "/opt/conda/lib/python3.7/http/client.py", line 267, in _read_status
line = str(self.fp.readline(_MAXLINE + 1), "iso-8859-1")
File "/opt/conda/lib/python3.7/socket.py", line 589, in readinto
return self._sock.recv_into(b)
socket.timeout: timed out
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/opt/conda/lib/python3.7/site-packages/mars/promise.py", line 100, in _wrapped
result = func(*args, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/mars/worker/calc.py", line 301, in <lambda>
.then(lambda context_dict: _start_calc(context_dict)) \
File "/opt/conda/lib/python3.7/site-packages/mars/worker/calc.py", line 276, in _start_calc
return self._calc_results(session_id, graph_key, graph, context_dict, chunk_targets)
File "/opt/conda/lib/python3.7/site-packages/mars/utils.py", line 383, in _wrapped
return func(*args, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/mars/worker/calc.py", line 200, in _calc_results
chunk_targets, retval=False).result()
File "src/gevent/event.py", line 383, in gevent._gevent_cevent.AsyncResult.result
File "src/gevent/event.py", line 305, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 335, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 323, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 303, in gevent._gevent_cevent.AsyncResult._raise_exception
File "/opt/conda/lib/python3.7/site-packages/gevent/_compat.py", line 65, in reraise
raise value.with_traceback(tb)
File "/opt/conda/lib/python3.7/site-packages/gevent/threadpool.py", line 142, in __run_task
thread_result.set(func(*args, **kwargs))
File "mars/actors/pool/gevent_pool.pyx", line 201, in mars.actors.pool.gevent_pool.GeventThreadPool._wrap_watch.inner
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 698, in execute_graph
res = graph_execution.execute(retval)
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 579, in execute
future.result()
File "/opt/conda/lib/python3.7/concurrent/futures/_base.py", line 435, in result
return self.__get_result()
File "/opt/conda/lib/python3.7/concurrent/futures/_base.py", line 384, in __get_result
raise self._exception
File "/opt/conda/lib/python3.7/concurrent/futures/thread.py", line 57, in run
result = self.fn(*self.args, **self.kwargs)
File "/opt/conda/lib/python3.7/site-packages/mars/utils.py", line 457, in _inner
return func(*args, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 446, in _execute_operand
self.handle_op(first_op, results, self._mock)
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 378, in handle_op
return Executor.handle(*args, **kw)
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 649, in handle
return runner(results, op)
File "/opt/conda/lib/python3.7/site-packages/odps/mars_extension/dataframe/datastore.py", line 359, in execute
cls._execute_arrow_tunnel(ctx, op)
File "/opt/conda/lib/python3.7/site-packages/odps/mars_extension/dataframe/datastore.py", line 349, in _execute_arrow_tunnel
writer.close()
File "/opt/conda/lib/python3.7/site-packages/odps/tunnel/io/writer.py", line 548, in close
self._flush()
File "/opt/conda/lib/python3.7/site-packages/odps/tunnel/io/writer.py", line 545, in _flush
self._request_callback(gen())
File "/opt/conda/lib/python3.7/site-packages/odps/tunnel/tabletunnel.py", line 315, in upload
self._client.put(url, data=data, params=params, headers=headers)
File "/opt/conda/lib/python3.7/site-packages/odps/rest.py", line 154, in put
return self.request(url, 'put', data=data, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/odps/rest.py", line 133, in request
proxies=self._proxy)
File "/opt/conda/lib/python3.7/site-packages/requests/sessions.py", line 643, in send
r = adapter.send(request, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/requests/adapters.py", line 498, in send
raise ConnectionError(err, request=request)
requests.exceptions.ConnectionError: timed out
2021-02-03 13:21:04,753 mars.promise 77 ERROR Exception met in executing promise.
Traceback (most recent call last):
File "/opt/conda/lib/python3.7/site-packages/mars/promise.py", line 100, in _wrapped
result = func(*args, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/mars/worker/calc.py", line 302, in <lambda>
.then(lambda keys: _finalize(keys, None), lambda *exc_info: _finalize(None, exc_info))
File "/opt/conda/lib/python3.7/site-packages/mars/worker/calc.py", line 285, in _finalize
self.tell_promise(callback, *exc_info, _accept=False)
File "/opt/conda/lib/python3.7/site-packages/mars/promise.py", line 530, in tell_promise
return self.ctx.actor_ref(uid, address=address).tell(callback_args, wait=wait)
File "mars/actors/core.pyx", line 39, in mars.actors.core.ActorRef.tell
File "mars/actors/core.pyx", line 41, in mars.actors.core.ActorRef.tell
File "mars/actors/pool/gevent_pool.pyx", line 282, in mars.actors.pool.gevent_pool.ActorContext.tell
File "mars/actors/pool/gevent_pool.pyx", line 853, in mars.actors.pool.gevent_pool.Communicator.tell
File "mars/actors/pool/gevent_pool.pyx", line 859, in mars.actors.pool.gevent_pool.Communicator.tell
File "mars/actors/pool/gevent_pool.pyx", line 846, in mars.actors.pool.gevent_pool.Communicator._send
File "mars/actors/pool/gevent_pool.pyx", line 755, in mars.actors.pool.gevent_pool.Communicator._dispatch
File "mars/actors/pool/gevent_pool.pyx", line 819, in mars.actors.pool.gevent_pool.Communicator._send_process
File "mars/actors/pool/messages.pyx", line 570, in mars.actors.pool.messages.pack_tell_message
File "mars/actors/pool/messages.pyx", line 526, in mars.actors.pool.messages._pack_send_message
File "mars/actors/pool/messages.pyx", line 469, in mars.actors.pool.messages._pack_message
File "mars/actors/pool/messages.pyx", line 436, in mars.actors.pool.messages._pack_tuple_message
TypeError: can't pickle generator objects
2021-02-03 13:21:04,754 mars.promise 77 ERROR Unhandled exception in promise
Traceback (most recent call last):
File "/opt/conda/lib/python3.7/site-packages/mars/promise.py", line 100, in _wrapped
result = func(*args, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/mars/worker/calc.py", line 302, in <lambda>
.then(lambda keys: _finalize(keys, None), lambda *exc_info: _finalize(None, exc_info))
File "/opt/conda/lib/python3.7/site-packages/mars/worker/calc.py", line 285, in _finalize
self.tell_promise(callback, *exc_info, _accept=False)
File "/opt/conda/lib/python3.7/site-packages/mars/promise.py", line 530, in tell_promise
return self.ctx.actor_ref(uid, address=address).tell(callback_args, wait=wait)
File "mars/actors/core.pyx", line 39, in mars.actors.core.ActorRef.tell
File "mars/actors/core.pyx", line 41, in mars.actors.core.ActorRef.tell
File "mars/actors/pool/gevent_pool.pyx", line 282, in mars.actors.pool.gevent_pool.ActorContext.tell
File "mars/actors/pool/gevent_pool.pyx", line 853, in mars.actors.pool.gevent_pool.Communicator.tell
File "mars/actors/pool/gevent_pool.pyx", line 859, in mars.actors.pool.gevent_pool.Communicator.tell
File "mars/actors/pool/gevent_pool.pyx", line 846, in mars.actors.pool.gevent_pool.Communicator._send
File "mars/actors/pool/gevent_pool.pyx", line 755, in mars.actors.pool.gevent_pool.Communicator._dispatch
File "mars/actors/pool/gevent_pool.pyx", line 819, in mars.actors.pool.gevent_pool.Communicator._send_process
2021-02-03 13:21:02,172 mars.worker.execution 58 DEBUG Executing states: {'402439ba636c37f11bea13fe28ad9bfb': (278.46709537506104, 'CALCULATING'), '4a57865de1482157ed1f3bdd28b8d030': (75.9635705947876, 'CALCULATING')}
File "mars/actors/pool/messages.pyx", line 570, in mars.actors.pool.messages.pack_tell_message
File "mars/actors/pool/messages.pyx", line 526, in mars.actors.pool.messages._pack_send_message
File "mars/actors/pool/messages.pyx", line 469, in mars.actors.pool.messages._pack_message
File "mars/actors/pool/messages.pyx", line 436, in mars.actors.pool.messages._pack_tuple_message
TypeError: can't pickle generator objects
|
TypeError
|
def _finalize(keys, exc_info):
if not self._marked_as_destroy:
self._dispatch_ref.register_free_slot(
self.uid, self._slot_name, _tell=True, _wait=False
)
if not exc_info:
self.tell_promise(callback, keys)
else:
try:
self.tell_promise(callback, *exc_info, _accept=False)
except:
self.tell_promise(
callback,
*build_exc_info(
SystemError,
f"Failed to send errors to scheduler, type: {exc_info[0].__name__}, "
f"message: {str(exc_info[1])}",
),
_accept=False,
)
raise
keys_to_release = [
k for k in keys_to_fetch if get_chunk_key(k) not in chunk_targets
]
if exc_info:
keys_to_release.extend(chunk_targets)
if self._remove_intermediate:
keys_to_delete = keys_to_release
else:
keys_to_delete = []
if keys_to_delete:
self.storage_client.delete(
session_id, keys_to_delete, [self._calc_intermediate_device]
)
logger.debug("Finish calculating operand %s.", graph_key)
|
def _finalize(keys, exc_info):
if not self._marked_as_destroy:
self._dispatch_ref.register_free_slot(
self.uid, self._slot_name, _tell=True, _wait=False
)
if not exc_info:
self.tell_promise(callback, keys)
else:
self.tell_promise(callback, *exc_info, _accept=False)
keys_to_release = [
k for k in keys_to_fetch if get_chunk_key(k) not in chunk_targets
]
if exc_info:
keys_to_release.extend(chunk_targets)
if self._remove_intermediate:
keys_to_delete = keys_to_release
else:
keys_to_delete = []
if keys_to_delete:
self.storage_client.delete(
session_id, keys_to_delete, [self._calc_intermediate_device]
)
logger.debug("Finish calculating operand %s.", graph_key)
|
https://github.com/mars-project/mars/issues/1959
|
Traceback (most recent call last):
File "/opt/conda/lib/python3.7/site-packages/requests/adapters.py", line 479, in send
r = low_conn.getresponse(buffering=True)
TypeError: getresponse() got an unexpected keyword argument 'buffering'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/opt/conda/lib/python3.7/site-packages/requests/adapters.py", line 482, in send
r = low_conn.getresponse()
File "/opt/conda/lib/python3.7/http/client.py", line 1344, in getresponse
response.begin()
File "/opt/conda/lib/python3.7/http/client.py", line 306, in begin
version, status, reason = self._read_status()
File "/opt/conda/lib/python3.7/http/client.py", line 267, in _read_status
line = str(self.fp.readline(_MAXLINE + 1), "iso-8859-1")
File "/opt/conda/lib/python3.7/socket.py", line 589, in readinto
return self._sock.recv_into(b)
socket.timeout: timed out
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/opt/conda/lib/python3.7/site-packages/gevent/threadpool.py", line 142, in __run_task
thread_result.set(func(*args, **kwargs))
File "mars/actors/pool/gevent_pool.pyx", line 201, in mars.actors.pool.gevent_pool.GeventThreadPool._wrap_watch.inner
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 698, in execute_graph
res = graph_execution.execute(retval)
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 579, in execute
future.result()
File "/opt/conda/lib/python3.7/concurrent/futures/_base.py", line 435, in result
return self.__get_result()
File "/opt/conda/lib/python3.7/concurrent/futures/_base.py", line 384, in __get_result
raise self._exception
File "/opt/conda/lib/python3.7/concurrent/futures/thread.py", line 57, in run
result = self.fn(*self.args, **self.kwargs)
File "/opt/conda/lib/python3.7/site-packages/mars/utils.py", line 457, in _inner
return func(*args, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 446, in _execute_operand
self.handle_op(first_op, results, self._mock)
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 378, in handle_op
return Executor.handle(*args, **kw)
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 649, in handle
return runner(results, op)
File "/opt/conda/lib/python3.7/site-packages/odps/mars_extension/dataframe/datastore.py", line 359, in execute
cls._execute_arrow_tunnel(ctx, op)
File "/opt/conda/lib/python3.7/site-packages/odps/mars_extension/dataframe/datastore.py", line 349, in _execute_arrow_tunnel
writer.close()
File "/opt/conda/lib/python3.7/site-packages/odps/tunnel/io/writer.py", line 548, in close
self._flush()
File "/opt/conda/lib/python3.7/site-packages/odps/tunnel/io/writer.py", line 545, in _flush
self._request_callback(gen())
File "/opt/conda/lib/python3.7/site-packages/odps/tunnel/tabletunnel.py", line 315, in upload
self._client.put(url, data=data, params=params, headers=headers)
File "/opt/conda/lib/python3.7/site-packages/odps/rest.py", line 154, in put
return self.request(url, 'put', data=data, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/odps/rest.py", line 133, in request
proxies=self._proxy)
File "/opt/conda/lib/python3.7/site-packages/requests/sessions.py", line 643, in send
r = adapter.send(request, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/requests/adapters.py", line 498, in send
raise ConnectionError(err, request=request)
requests.exceptions.ConnectionError: timed out
2021-02-03T05:21:04Z (<ThreadPoolWorker at 0x7f14c2e8e7e0 thread_ident=0x7f14cfe3f700 threadpool-hub=<Hub at 0x7f14c338d1d0 thread_ident=0x7f14f8070740>>, <cyfunction GeventThreadPool._wrap_watch.<locals>.inner at 0x7f14c2ec37a0>) failed with ConnectionError
2021-02-03 13:21:04,748 mars.worker.calc 77 ERROR Unexpected exception occurred in BaseCalcActor._calc_results. graph_key=402439ba636c37f11bea13fe28ad9bfb
Traceback (most recent call last):
File "/opt/conda/lib/python3.7/site-packages/requests/adapters.py", line 479, in send
r = low_conn.getresponse(buffering=True)
TypeError: getresponse() got an unexpected keyword argument 'buffering'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/opt/conda/lib/python3.7/site-packages/requests/adapters.py", line 482, in send
r = low_conn.getresponse()
File "/opt/conda/lib/python3.7/http/client.py", line 1344, in getresponse
response.begin()
File "/opt/conda/lib/python3.7/http/client.py", line 306, in begin
version, status, reason = self._read_status()
File "/opt/conda/lib/python3.7/http/client.py", line 267, in _read_status
line = str(self.fp.readline(_MAXLINE + 1), "iso-8859-1")
File "/opt/conda/lib/python3.7/socket.py", line 589, in readinto
return self._sock.recv_into(b)
socket.timeout: timed out
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/opt/conda/lib/python3.7/site-packages/mars/utils.py", line 383, in _wrapped
return func(*args, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/mars/worker/calc.py", line 200, in _calc_results
chunk_targets, retval=False).result()
File "src/gevent/event.py", line 383, in gevent._gevent_cevent.AsyncResult.result
File "src/gevent/event.py", line 305, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 335, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 323, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 303, in gevent._gevent_cevent.AsyncResult._raise_exception
File "/opt/conda/lib/python3.7/site-packages/gevent/_compat.py", line 65, in reraise
raise value.with_traceback(tb)
File "/opt/conda/lib/python3.7/site-packages/gevent/threadpool.py", line 142, in __run_task
thread_result.set(func(*args, **kwargs))
File "mars/actors/pool/gevent_pool.pyx", line 201, in mars.actors.pool.gevent_pool.GeventThreadPool._wrap_watch.inner
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 698, in execute_graph
res = graph_execution.execute(retval)
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 579, in execute
future.result()
File "/opt/conda/lib/python3.7/concurrent/futures/_base.py", line 435, in result
return self.__get_result()
File "/opt/conda/lib/python3.7/concurrent/futures/_base.py", line 384, in __get_result
raise self._exception
File "/opt/conda/lib/python3.7/concurrent/futures/thread.py", line 57, in run
result = self.fn(*self.args, **self.kwargs)
File "/opt/conda/lib/python3.7/site-packages/mars/utils.py", line 457, in _inner
return func(*args, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 446, in _execute_operand
self.handle_op(first_op, results, self._mock)
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 378, in handle_op
return Executor.handle(*args, **kw)
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 649, in handle
return runner(results, op)
File "/opt/conda/lib/python3.7/site-packages/odps/mars_extension/dataframe/datastore.py", line 359, in execute
cls._execute_arrow_tunnel(ctx, op)
File "/opt/conda/lib/python3.7/site-packages/odps/mars_extension/dataframe/datastore.py", line 349, in _execute_arrow_tunnel
writer.close()
File "/opt/conda/lib/python3.7/site-packages/odps/tunnel/io/writer.py", line 548, in close
self._flush()
File "/opt/conda/lib/python3.7/site-packages/odps/tunnel/io/writer.py", line 545, in _flush
self._request_callback(gen())
File "/opt/conda/lib/python3.7/site-packages/odps/tunnel/tabletunnel.py", line 315, in upload
self._client.put(url, data=data, params=params, headers=headers)
File "/opt/conda/lib/python3.7/site-packages/odps/rest.py", line 154, in put
return self.request(url, 'put', data=data, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/odps/rest.py", line 133, in request
proxies=self._proxy)
File "/opt/conda/lib/python3.7/site-packages/requests/sessions.py", line 643, in send
r = adapter.send(request, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/requests/adapters.py", line 498, in send
raise ConnectionError(err, request=request)
requests.exceptions.ConnectionError: timed out
2021-02-03 13:21:04,749 mars.promise 77 ERROR Exception met in executing promise.
Traceback (most recent call last):
File "/opt/conda/lib/python3.7/site-packages/requests/adapters.py", line 479, in send
r = low_conn.getresponse(buffering=True)
TypeError: getresponse() got an unexpected keyword argument 'buffering'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/opt/conda/lib/python3.7/site-packages/requests/adapters.py", line 482, in send
r = low_conn.getresponse()
File "/opt/conda/lib/python3.7/http/client.py", line 1344, in getresponse
response.begin()
File "/opt/conda/lib/python3.7/http/client.py", line 306, in begin
version, status, reason = self._read_status()
File "/opt/conda/lib/python3.7/http/client.py", line 267, in _read_status
line = str(self.fp.readline(_MAXLINE + 1), "iso-8859-1")
File "/opt/conda/lib/python3.7/socket.py", line 589, in readinto
return self._sock.recv_into(b)
socket.timeout: timed out
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/opt/conda/lib/python3.7/site-packages/mars/promise.py", line 100, in _wrapped
result = func(*args, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/mars/worker/calc.py", line 301, in <lambda>
.then(lambda context_dict: _start_calc(context_dict)) \
File "/opt/conda/lib/python3.7/site-packages/mars/worker/calc.py", line 276, in _start_calc
return self._calc_results(session_id, graph_key, graph, context_dict, chunk_targets)
File "/opt/conda/lib/python3.7/site-packages/mars/utils.py", line 383, in _wrapped
return func(*args, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/mars/worker/calc.py", line 200, in _calc_results
chunk_targets, retval=False).result()
File "src/gevent/event.py", line 383, in gevent._gevent_cevent.AsyncResult.result
File "src/gevent/event.py", line 305, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 335, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 323, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 303, in gevent._gevent_cevent.AsyncResult._raise_exception
File "/opt/conda/lib/python3.7/site-packages/gevent/_compat.py", line 65, in reraise
raise value.with_traceback(tb)
File "/opt/conda/lib/python3.7/site-packages/gevent/threadpool.py", line 142, in __run_task
thread_result.set(func(*args, **kwargs))
File "mars/actors/pool/gevent_pool.pyx", line 201, in mars.actors.pool.gevent_pool.GeventThreadPool._wrap_watch.inner
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 698, in execute_graph
res = graph_execution.execute(retval)
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 579, in execute
future.result()
File "/opt/conda/lib/python3.7/concurrent/futures/_base.py", line 435, in result
return self.__get_result()
File "/opt/conda/lib/python3.7/concurrent/futures/_base.py", line 384, in __get_result
raise self._exception
File "/opt/conda/lib/python3.7/concurrent/futures/thread.py", line 57, in run
result = self.fn(*self.args, **self.kwargs)
File "/opt/conda/lib/python3.7/site-packages/mars/utils.py", line 457, in _inner
return func(*args, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 446, in _execute_operand
self.handle_op(first_op, results, self._mock)
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 378, in handle_op
return Executor.handle(*args, **kw)
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 649, in handle
return runner(results, op)
File "/opt/conda/lib/python3.7/site-packages/odps/mars_extension/dataframe/datastore.py", line 359, in execute
cls._execute_arrow_tunnel(ctx, op)
File "/opt/conda/lib/python3.7/site-packages/odps/mars_extension/dataframe/datastore.py", line 349, in _execute_arrow_tunnel
writer.close()
File "/opt/conda/lib/python3.7/site-packages/odps/tunnel/io/writer.py", line 548, in close
self._flush()
File "/opt/conda/lib/python3.7/site-packages/odps/tunnel/io/writer.py", line 545, in _flush
self._request_callback(gen())
File "/opt/conda/lib/python3.7/site-packages/odps/tunnel/tabletunnel.py", line 315, in upload
self._client.put(url, data=data, params=params, headers=headers)
File "/opt/conda/lib/python3.7/site-packages/odps/rest.py", line 154, in put
return self.request(url, 'put', data=data, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/odps/rest.py", line 133, in request
proxies=self._proxy)
File "/opt/conda/lib/python3.7/site-packages/requests/sessions.py", line 643, in send
r = adapter.send(request, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/requests/adapters.py", line 498, in send
raise ConnectionError(err, request=request)
requests.exceptions.ConnectionError: timed out
2021-02-03 13:21:04,751 mars.promise 77 ERROR Unhandled exception in promise
Traceback (most recent call last):
File "/opt/conda/lib/python3.7/site-packages/requests/adapters.py", line 479, in send
r = low_conn.getresponse(buffering=True)
TypeError: getresponse() got an unexpected keyword argument 'buffering'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/opt/conda/lib/python3.7/site-packages/requests/adapters.py", line 482, in send
r = low_conn.getresponse()
File "/opt/conda/lib/python3.7/http/client.py", line 1344, in getresponse
response.begin()
File "/opt/conda/lib/python3.7/http/client.py", line 306, in begin
version, status, reason = self._read_status()
File "/opt/conda/lib/python3.7/http/client.py", line 267, in _read_status
line = str(self.fp.readline(_MAXLINE + 1), "iso-8859-1")
File "/opt/conda/lib/python3.7/socket.py", line 589, in readinto
return self._sock.recv_into(b)
socket.timeout: timed out
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/opt/conda/lib/python3.7/site-packages/mars/promise.py", line 100, in _wrapped
result = func(*args, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/mars/worker/calc.py", line 301, in <lambda>
.then(lambda context_dict: _start_calc(context_dict)) \
File "/opt/conda/lib/python3.7/site-packages/mars/worker/calc.py", line 276, in _start_calc
return self._calc_results(session_id, graph_key, graph, context_dict, chunk_targets)
File "/opt/conda/lib/python3.7/site-packages/mars/utils.py", line 383, in _wrapped
return func(*args, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/mars/worker/calc.py", line 200, in _calc_results
chunk_targets, retval=False).result()
File "src/gevent/event.py", line 383, in gevent._gevent_cevent.AsyncResult.result
File "src/gevent/event.py", line 305, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 335, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 323, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 303, in gevent._gevent_cevent.AsyncResult._raise_exception
File "/opt/conda/lib/python3.7/site-packages/gevent/_compat.py", line 65, in reraise
raise value.with_traceback(tb)
File "/opt/conda/lib/python3.7/site-packages/gevent/threadpool.py", line 142, in __run_task
thread_result.set(func(*args, **kwargs))
File "mars/actors/pool/gevent_pool.pyx", line 201, in mars.actors.pool.gevent_pool.GeventThreadPool._wrap_watch.inner
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 698, in execute_graph
res = graph_execution.execute(retval)
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 579, in execute
future.result()
File "/opt/conda/lib/python3.7/concurrent/futures/_base.py", line 435, in result
return self.__get_result()
File "/opt/conda/lib/python3.7/concurrent/futures/_base.py", line 384, in __get_result
raise self._exception
File "/opt/conda/lib/python3.7/concurrent/futures/thread.py", line 57, in run
result = self.fn(*self.args, **self.kwargs)
File "/opt/conda/lib/python3.7/site-packages/mars/utils.py", line 457, in _inner
return func(*args, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 446, in _execute_operand
self.handle_op(first_op, results, self._mock)
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 378, in handle_op
return Executor.handle(*args, **kw)
File "/opt/conda/lib/python3.7/site-packages/mars/executor.py", line 649, in handle
return runner(results, op)
File "/opt/conda/lib/python3.7/site-packages/odps/mars_extension/dataframe/datastore.py", line 359, in execute
cls._execute_arrow_tunnel(ctx, op)
File "/opt/conda/lib/python3.7/site-packages/odps/mars_extension/dataframe/datastore.py", line 349, in _execute_arrow_tunnel
writer.close()
File "/opt/conda/lib/python3.7/site-packages/odps/tunnel/io/writer.py", line 548, in close
self._flush()
File "/opt/conda/lib/python3.7/site-packages/odps/tunnel/io/writer.py", line 545, in _flush
self._request_callback(gen())
File "/opt/conda/lib/python3.7/site-packages/odps/tunnel/tabletunnel.py", line 315, in upload
self._client.put(url, data=data, params=params, headers=headers)
File "/opt/conda/lib/python3.7/site-packages/odps/rest.py", line 154, in put
return self.request(url, 'put', data=data, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/odps/rest.py", line 133, in request
proxies=self._proxy)
File "/opt/conda/lib/python3.7/site-packages/requests/sessions.py", line 643, in send
r = adapter.send(request, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/requests/adapters.py", line 498, in send
raise ConnectionError(err, request=request)
requests.exceptions.ConnectionError: timed out
2021-02-03 13:21:04,753 mars.promise 77 ERROR Exception met in executing promise.
Traceback (most recent call last):
File "/opt/conda/lib/python3.7/site-packages/mars/promise.py", line 100, in _wrapped
result = func(*args, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/mars/worker/calc.py", line 302, in <lambda>
.then(lambda keys: _finalize(keys, None), lambda *exc_info: _finalize(None, exc_info))
File "/opt/conda/lib/python3.7/site-packages/mars/worker/calc.py", line 285, in _finalize
self.tell_promise(callback, *exc_info, _accept=False)
File "/opt/conda/lib/python3.7/site-packages/mars/promise.py", line 530, in tell_promise
return self.ctx.actor_ref(uid, address=address).tell(callback_args, wait=wait)
File "mars/actors/core.pyx", line 39, in mars.actors.core.ActorRef.tell
File "mars/actors/core.pyx", line 41, in mars.actors.core.ActorRef.tell
File "mars/actors/pool/gevent_pool.pyx", line 282, in mars.actors.pool.gevent_pool.ActorContext.tell
File "mars/actors/pool/gevent_pool.pyx", line 853, in mars.actors.pool.gevent_pool.Communicator.tell
File "mars/actors/pool/gevent_pool.pyx", line 859, in mars.actors.pool.gevent_pool.Communicator.tell
File "mars/actors/pool/gevent_pool.pyx", line 846, in mars.actors.pool.gevent_pool.Communicator._send
File "mars/actors/pool/gevent_pool.pyx", line 755, in mars.actors.pool.gevent_pool.Communicator._dispatch
File "mars/actors/pool/gevent_pool.pyx", line 819, in mars.actors.pool.gevent_pool.Communicator._send_process
File "mars/actors/pool/messages.pyx", line 570, in mars.actors.pool.messages.pack_tell_message
File "mars/actors/pool/messages.pyx", line 526, in mars.actors.pool.messages._pack_send_message
File "mars/actors/pool/messages.pyx", line 469, in mars.actors.pool.messages._pack_message
File "mars/actors/pool/messages.pyx", line 436, in mars.actors.pool.messages._pack_tuple_message
TypeError: can't pickle generator objects
2021-02-03 13:21:04,754 mars.promise 77 ERROR Unhandled exception in promise
Traceback (most recent call last):
File "/opt/conda/lib/python3.7/site-packages/mars/promise.py", line 100, in _wrapped
result = func(*args, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/mars/worker/calc.py", line 302, in <lambda>
.then(lambda keys: _finalize(keys, None), lambda *exc_info: _finalize(None, exc_info))
File "/opt/conda/lib/python3.7/site-packages/mars/worker/calc.py", line 285, in _finalize
self.tell_promise(callback, *exc_info, _accept=False)
File "/opt/conda/lib/python3.7/site-packages/mars/promise.py", line 530, in tell_promise
return self.ctx.actor_ref(uid, address=address).tell(callback_args, wait=wait)
File "mars/actors/core.pyx", line 39, in mars.actors.core.ActorRef.tell
File "mars/actors/core.pyx", line 41, in mars.actors.core.ActorRef.tell
File "mars/actors/pool/gevent_pool.pyx", line 282, in mars.actors.pool.gevent_pool.ActorContext.tell
File "mars/actors/pool/gevent_pool.pyx", line 853, in mars.actors.pool.gevent_pool.Communicator.tell
File "mars/actors/pool/gevent_pool.pyx", line 859, in mars.actors.pool.gevent_pool.Communicator.tell
File "mars/actors/pool/gevent_pool.pyx", line 846, in mars.actors.pool.gevent_pool.Communicator._send
File "mars/actors/pool/gevent_pool.pyx", line 755, in mars.actors.pool.gevent_pool.Communicator._dispatch
File "mars/actors/pool/gevent_pool.pyx", line 819, in mars.actors.pool.gevent_pool.Communicator._send_process
2021-02-03 13:21:02,172 mars.worker.execution 58 DEBUG Executing states: {'402439ba636c37f11bea13fe28ad9bfb': (278.46709537506104, 'CALCULATING'), '4a57865de1482157ed1f3bdd28b8d030': (75.9635705947876, 'CALCULATING')}
File "mars/actors/pool/messages.pyx", line 570, in mars.actors.pool.messages.pack_tell_message
File "mars/actors/pool/messages.pyx", line 526, in mars.actors.pool.messages._pack_send_message
File "mars/actors/pool/messages.pyx", line 469, in mars.actors.pool.messages._pack_message
File "mars/actors/pool/messages.pyx", line 436, in mars.actors.pool.messages._pack_tuple_message
TypeError: can't pickle generator objects
|
TypeError
|
def execute_agg(cls, ctx, op):
(input_chunk,), device_id, xp = as_same_device(
[ctx[c.key] for c in op.inputs], device=op.device, ret_extra=True
)
axis = cls.get_axis(op.axis)
func_name = getattr(cls, "_func_name", None)
reduce_func = getattr(xp, func_name)
out = op.outputs[0]
with device(device_id):
if input_chunk.size == 0 and op.keepdims:
# input chunk is empty, when keepdims is True, return itself
ret = input_chunk
elif "dtype" in inspect.getfullargspec(reduce_func).args:
ret = reduce_func(
input_chunk, axis=axis, dtype=op.dtype, keepdims=bool(op.keepdims)
)
else:
ret = reduce_func(input_chunk, axis=axis, keepdims=bool(op.keepdims))
if hasattr(ret, "astype"):
# for non-object dtype
ret = ret.astype(op.dtype, order=out.order.value, copy=False)
ctx[out.key] = ret
|
def execute_agg(cls, ctx, op):
(input_chunk,), device_id, xp = as_same_device(
[ctx[c.key] for c in op.inputs], device=op.device, ret_extra=True
)
axis = cls.get_axis(op.axis)
func_name = getattr(cls, "_func_name", None)
reduce_func = getattr(xp, func_name)
out = op.outputs[0]
with device(device_id):
if "dtype" in inspect.getfullargspec(reduce_func).args:
ret = reduce_func(
input_chunk, axis=axis, dtype=op.dtype, keepdims=bool(op.keepdims)
)
else:
ret = reduce_func(input_chunk, axis=axis, keepdims=bool(op.keepdims))
if hasattr(ret, "astype"):
# for non-object dtype
ret = ret.astype(op.dtype, order=out.order.value, copy=False)
ctx[out.key] = ret
|
https://github.com/mars-project/mars/issues/1977
|
Traceback (most recent call last):
File "miniconda3/lib/python3.8/site-packages/IPython/core/interactiveshell.py", line 3417, in run_code
exec(code_obj, self.user_global_ns, self.user_ns)
File "<ipython-input-26-c79b5d2b7d19>", line 1, in <module>
histogram(vt).execute()
File "Code/mars/mars/core.py", line 764, in execute
return super().execute(session=session, **kw)
File "Code/mars/mars/core.py", line 379, in execute
return run()
File "Code/mars/mars/core.py", line 374, in run
session.run(self, **kw)
File "Code/mars/mars/session.py", line 506, in run
result = self._sess.run(*tileables, **kw)
File "Code/mars/mars/session.py", line 111, in run
res = self._executor.execute_tileables(tileables, **kw)
File "Code/mars/mars/utils.py", line 459, in _inner
return func(*args, **kwargs)
File "Code/mars/mars/executor.py", line 883, in execute_tileables
self.execute_graph(chunk_graph, list(temp_result_keys),
File "Code/mars/mars/executor.py", line 698, in execute_graph
res = graph_execution.execute(retval)
File "Code/mars/mars/executor.py", line 579, in execute
future.result()
File "miniconda3/lib/python3.8/concurrent/futures/_base.py", line 432, in result
return self.__get_result()
File "miniconda3/lib/python3.8/concurrent/futures/_base.py", line 388, in __get_result
raise self._exception
File "miniconda3/lib/python3.8/concurrent/futures/thread.py", line 57, in run
result = self.fn(*self.args, **self.kwargs)
File "Code/mars/mars/utils.py", line 459, in _inner
return func(*args, **kwargs)
File "Code/mars/mars/executor.py", line 446, in _execute_operand
self.handle_op(first_op, results, self._mock)
File "Code/mars/mars/executor.py", line 378, in handle_op
return Executor.handle(*args, **kw)
File "Code/mars/mars/executor.py", line 649, in handle
return runner(results, op)
File "Code/mars/mars/tensor/reduction/core.py", line 291, in execute
return cls.execute_agg(ctx, op)
File "Code/mars/mars/tensor/reduction/core.py", line 272, in execute_agg
ret = reduce_func(input_chunk, axis=axis,
File "<__array_function__ internals>", line 5, in amin
File "miniconda3/lib/python3.8/site-packages/numpy/core/fromnumeric.py", line 2830, in amin
return _wrapreduction(a, np.minimum, 'min', axis, None, out,
File "miniconda3/lib/python3.8/site-packages/numpy/core/fromnumeric.py", line 87, in _wrapreduction
return ufunc.reduce(obj, axis, dtype, out, **passkwargs)
ValueError: zero-size array to reduction operation minimum which has no identity
|
ValueError
|
def execute(cls, ctx, op):
(a,), device_id, xp = as_same_device(
[ctx[c.key] for c in op.inputs], device=op.device, ret_extra=True
)
if len(a) == 0:
# when chunk is empty, return the empty chunk itself
ctx[op.outputs[0].key] = ctx[op.outputs[-1].key] = a
return
with device(device_id):
n = op.n_partition
w = a.shape[op.axis] * 1.0 / (n + 1)
if not op.return_indices:
if op.kind is not None:
# sort
res = ctx[op.outputs[0].key] = _sort(a, op, xp)
else:
# do not sort, prepare for sample by `xp.partition`
kth = xp.linspace(
max(w - 1, 0), a.shape[op.axis] - 1, num=n, endpoint=False
).astype(int)
ctx[op.outputs[0].key] = res = xp.partition(
a, kth, axis=op.axis, order=op.order
)
else:
if op.kind is not None:
# argsort
indices = _argsort(a, op, xp)
else:
# do not sort, use `xp.argpartition`
kth = xp.linspace(
max(w - 1, 0), a.shape[op.axis] - 1, num=n, endpoint=False
).astype(int)
indices = xp.argpartition(a, kth, axis=op.axis, order=op.order)
ctx[op.outputs[0].key] = res = xp.take_along_axis(a, indices, op.axis)
ctx[op.outputs[1].key] = op.axis_offset + indices
# do regular sample
if op.order is not None:
res = res[op.order]
slc = xp.linspace(
max(w - 1, 0), a.shape[op.axis] - 1, num=n, endpoint=False
).astype(int)
slc = (slice(None),) * op.axis + (slc,)
ctx[op.outputs[-1].key] = res[slc]
|
def execute(cls, ctx, op):
(a,), device_id, xp = as_same_device(
[ctx[c.key] for c in op.inputs], device=op.device, ret_extra=True
)
with device(device_id):
n = op.n_partition
w = a.shape[op.axis] * 1.0 / (n + 1)
if not op.return_indices:
if op.kind is not None:
# sort
res = ctx[op.outputs[0].key] = _sort(a, op, xp)
else:
# do not sort, prepare for sample by `xp.partition`
kth = xp.linspace(
max(w - 1, 0), a.shape[op.axis] - 1, num=n, endpoint=False
).astype(int)
ctx[op.outputs[0].key] = res = xp.partition(
a, kth, axis=op.axis, order=op.order
)
else:
if op.kind is not None:
# argsort
indices = _argsort(a, op, xp)
else:
# do not sort, use `xp.argpartition`
kth = xp.linspace(
max(w - 1, 0), a.shape[op.axis] - 1, num=n, endpoint=False
).astype(int)
indices = xp.argpartition(a, kth, axis=op.axis, order=op.order)
ctx[op.outputs[0].key] = res = xp.take_along_axis(a, indices, op.axis)
ctx[op.outputs[1].key] = op.axis_offset + indices
# do regular sample
if op.order is not None:
res = res[op.order]
slc = xp.linspace(
max(w - 1, 0), a.shape[op.axis] - 1, num=n, endpoint=False
).astype(int)
slc = (slice(None),) * op.axis + (slc,)
ctx[op.outputs[-1].key] = res[slc]
|
https://github.com/mars-project/mars/issues/1960
|
In [59]: a = mt.random.randint(0, 2, size=(1000,), chunk_size=300)
In [60]: mt.sort(mt.sort(a)).execute()
---------------------------------------------------------------------------
IndexError Traceback (most recent call last)
<ipython-input-60-6c95dab88665> in <module>
----> 1 mt.sort(mt.sort(a)).execute()
~/Documents/mars_dev/mars/mars/core.py in execute(self, session, **kw)
644
645 if wait:
--> 646 return run()
647 else:
648 thread_executor = ThreadPoolExecutor(1)
~/Documents/mars_dev/mars/mars/core.py in run()
640
641 def run():
--> 642 self.data.execute(session, **kw)
643 return self
644
~/Documents/mars_dev/mars/mars/core.py in execute(self, session, **kw)
377
378 if wait:
--> 379 return run()
380 else:
381 # leverage ThreadPoolExecutor to submit task,
~/Documents/mars_dev/mars/mars/core.py in run()
372 def run():
373 # no more fetch, thus just fire run
--> 374 session.run(self, **kw)
375 # return Tileable or ExecutableTuple itself
376 return self
~/Documents/mars_dev/mars/mars/session.py in run(self, *tileables, **kw)
503 tileables = tuple(mt.tensor(t) if not isinstance(t, (Entity, Base)) else t
504 for t in tileables)
--> 505 result = self._sess.run(*tileables, **kw)
506
507 for t in tileables:
~/Documents/mars_dev/mars/mars/session.py in run(self, *tileables, **kw)
109 # set number of running cores
110 self.context.set_ncores(kw['n_parallel'])
--> 111 res = self._executor.execute_tileables(tileables, **kw)
112 return res
113
~/Documents/mars_dev/mars/mars/utils.py in _inner(*args, **kwargs)
456 def _inner(*args, **kwargs):
457 with self:
--> 458 return func(*args, **kwargs)
459
460 return _inner
~/Documents/mars_dev/mars/mars/executor.py in execute_tileables(self, tileables, fetch, n_parallel, n_thread, print_progress, mock, compose, name)
884 n_parallel=n_parallel or n_thread,
885 print_progress=print_progress, mock=mock,
--> 886 chunk_result=chunk_result)
887
888 # update shape of tileable and its chunks whatever it's successful or not
~/Documents/mars_dev/mars/mars/executor.py in execute_graph(self, graph, keys, n_parallel, print_progress, mock, no_intermediate, compose, retval, chunk_result)
696 print_progress=print_progress, mock=mock, mock_max_memory=self._mock_max_memory,
697 fetch_keys=fetch_keys, no_intermediate=no_intermediate)
--> 698 res = graph_execution.execute(retval)
699 self._mock_max_memory = max(self._mock_max_memory, graph_execution._mock_max_memory)
700 if mock:
~/Documents/mars_dev/mars/mars/executor.py in execute(self, retval)
577 # wait until all the futures completed
578 for future in executed_futures:
--> 579 future.result()
580
581 if retval:
~/miniconda3/envs/py3.7.2/lib/python3.7/concurrent/futures/_base.py in result(self, timeout)
423 raise CancelledError()
424 elif self._state == FINISHED:
--> 425 return self.__get_result()
426
427 self._condition.wait(timeout)
~/miniconda3/envs/py3.7.2/lib/python3.7/concurrent/futures/_base.py in __get_result(self)
382 def __get_result(self):
383 if self._exception:
--> 384 raise self._exception
385 else:
386 return self._result
~/miniconda3/envs/py3.7.2/lib/python3.7/concurrent/futures/thread.py in run(self)
55
56 try:
---> 57 result = self.fn(*self.args, **self.kwargs)
58 except BaseException as exc:
59 self.future.set_exception(exc)
~/Documents/mars_dev/mars/mars/utils.py in _inner(*args, **kwargs)
456 def _inner(*args, **kwargs):
457 with self:
--> 458 return func(*args, **kwargs)
459
460 return _inner
~/Documents/mars_dev/mars/mars/executor.py in _execute_operand(self, op)
444 # so we pass the first operand's first output to Executor.handle
445 first_op = ops[0]
--> 446 self.handle_op(first_op, results, self._mock)
447
448 # update maximal memory usage during execution
~/Documents/mars_dev/mars/mars/executor.py in handle_op(self, *args, **kw)
376
377 def handle_op(self, *args, **kw):
--> 378 return Executor.handle(*args, **kw)
379
380 def _order_starts(self):
~/Documents/mars_dev/mars/mars/executor.py in handle(cls, op, results, mock)
647 # The `UFuncTypeError` was introduced by numpy#12593 since v1.17.0.
648 try:
--> 649 return runner(results, op)
650 except UFuncTypeError as e:
651 raise TypeError(str(e)).with_traceback(sys.exc_info()[2]) from None
~/Documents/mars_dev/mars/mars/tensor/base/psrs.py in execute(cls, ctx, op)
426 num=n, endpoint=False).astype(int)
427 slc = (slice(None),) * op.axis + (slc,)
--> 428 ctx[op.outputs[-1].key] = res[slc]
429
430
IndexError: index 0 is out of bounds for axis 0 with size 0
|
IndexError
|
def execute(cls, ctx, op):
inputs, device_id, xp = as_same_device(
[ctx[c.key] for c in op.inputs if len(ctx[c.key]) > 0],
device=op.device,
ret_extra=True,
)
with device(device_id):
a = xp.concatenate(inputs, axis=op.axis)
p = len(inputs)
assert a.shape[op.axis] == p * len(op.inputs)
if op.kind is not None:
# sort
_sort(a, op, xp, inplace=True)
else:
# prepare for sampling via `partition`
kth = xp.linspace(
p - 1, a.shape[op.axis] - 1, num=p - 1, endpoint=False
).astype(int)
a.partition(kth, axis=op.axis)
select = xp.linspace(
p - 1, a.shape[op.axis] - 1, num=len(op.inputs) - 1, endpoint=False
).astype(int)
slc = (slice(None),) * op.axis + (select,)
ctx[op.outputs[0].key] = a[slc]
|
def execute(cls, ctx, op):
inputs, device_id, xp = as_same_device(
[ctx[c.key] for c in op.inputs], device=op.device, ret_extra=True
)
with device(device_id):
a = xp.concatenate(inputs, axis=op.axis)
p = len(inputs)
assert a.shape[op.axis] == p**2
if op.kind is not None:
# sort
_sort(a, op, xp, inplace=True)
else:
# prepare for sampling via `partition`
kth = xp.linspace(
p - 1, a.shape[op.axis] - 1, num=p - 1, endpoint=False
).astype(int)
a.partition(kth, axis=op.axis)
select = xp.linspace(
p - 1, a.shape[op.axis] - 1, num=p - 1, endpoint=False
).astype(int)
slc = (slice(None),) * op.axis + (select,)
ctx[op.outputs[0].key] = result = a[slc]
assert result.shape[op.axis] == p - 1
|
https://github.com/mars-project/mars/issues/1960
|
In [59]: a = mt.random.randint(0, 2, size=(1000,), chunk_size=300)
In [60]: mt.sort(mt.sort(a)).execute()
---------------------------------------------------------------------------
IndexError Traceback (most recent call last)
<ipython-input-60-6c95dab88665> in <module>
----> 1 mt.sort(mt.sort(a)).execute()
~/Documents/mars_dev/mars/mars/core.py in execute(self, session, **kw)
644
645 if wait:
--> 646 return run()
647 else:
648 thread_executor = ThreadPoolExecutor(1)
~/Documents/mars_dev/mars/mars/core.py in run()
640
641 def run():
--> 642 self.data.execute(session, **kw)
643 return self
644
~/Documents/mars_dev/mars/mars/core.py in execute(self, session, **kw)
377
378 if wait:
--> 379 return run()
380 else:
381 # leverage ThreadPoolExecutor to submit task,
~/Documents/mars_dev/mars/mars/core.py in run()
372 def run():
373 # no more fetch, thus just fire run
--> 374 session.run(self, **kw)
375 # return Tileable or ExecutableTuple itself
376 return self
~/Documents/mars_dev/mars/mars/session.py in run(self, *tileables, **kw)
503 tileables = tuple(mt.tensor(t) if not isinstance(t, (Entity, Base)) else t
504 for t in tileables)
--> 505 result = self._sess.run(*tileables, **kw)
506
507 for t in tileables:
~/Documents/mars_dev/mars/mars/session.py in run(self, *tileables, **kw)
109 # set number of running cores
110 self.context.set_ncores(kw['n_parallel'])
--> 111 res = self._executor.execute_tileables(tileables, **kw)
112 return res
113
~/Documents/mars_dev/mars/mars/utils.py in _inner(*args, **kwargs)
456 def _inner(*args, **kwargs):
457 with self:
--> 458 return func(*args, **kwargs)
459
460 return _inner
~/Documents/mars_dev/mars/mars/executor.py in execute_tileables(self, tileables, fetch, n_parallel, n_thread, print_progress, mock, compose, name)
884 n_parallel=n_parallel or n_thread,
885 print_progress=print_progress, mock=mock,
--> 886 chunk_result=chunk_result)
887
888 # update shape of tileable and its chunks whatever it's successful or not
~/Documents/mars_dev/mars/mars/executor.py in execute_graph(self, graph, keys, n_parallel, print_progress, mock, no_intermediate, compose, retval, chunk_result)
696 print_progress=print_progress, mock=mock, mock_max_memory=self._mock_max_memory,
697 fetch_keys=fetch_keys, no_intermediate=no_intermediate)
--> 698 res = graph_execution.execute(retval)
699 self._mock_max_memory = max(self._mock_max_memory, graph_execution._mock_max_memory)
700 if mock:
~/Documents/mars_dev/mars/mars/executor.py in execute(self, retval)
577 # wait until all the futures completed
578 for future in executed_futures:
--> 579 future.result()
580
581 if retval:
~/miniconda3/envs/py3.7.2/lib/python3.7/concurrent/futures/_base.py in result(self, timeout)
423 raise CancelledError()
424 elif self._state == FINISHED:
--> 425 return self.__get_result()
426
427 self._condition.wait(timeout)
~/miniconda3/envs/py3.7.2/lib/python3.7/concurrent/futures/_base.py in __get_result(self)
382 def __get_result(self):
383 if self._exception:
--> 384 raise self._exception
385 else:
386 return self._result
~/miniconda3/envs/py3.7.2/lib/python3.7/concurrent/futures/thread.py in run(self)
55
56 try:
---> 57 result = self.fn(*self.args, **self.kwargs)
58 except BaseException as exc:
59 self.future.set_exception(exc)
~/Documents/mars_dev/mars/mars/utils.py in _inner(*args, **kwargs)
456 def _inner(*args, **kwargs):
457 with self:
--> 458 return func(*args, **kwargs)
459
460 return _inner
~/Documents/mars_dev/mars/mars/executor.py in _execute_operand(self, op)
444 # so we pass the first operand's first output to Executor.handle
445 first_op = ops[0]
--> 446 self.handle_op(first_op, results, self._mock)
447
448 # update maximal memory usage during execution
~/Documents/mars_dev/mars/mars/executor.py in handle_op(self, *args, **kw)
376
377 def handle_op(self, *args, **kw):
--> 378 return Executor.handle(*args, **kw)
379
380 def _order_starts(self):
~/Documents/mars_dev/mars/mars/executor.py in handle(cls, op, results, mock)
647 # The `UFuncTypeError` was introduced by numpy#12593 since v1.17.0.
648 try:
--> 649 return runner(results, op)
650 except UFuncTypeError as e:
651 raise TypeError(str(e)).with_traceback(sys.exc_info()[2]) from None
~/Documents/mars_dev/mars/mars/tensor/base/psrs.py in execute(cls, ctx, op)
426 num=n, endpoint=False).astype(int)
427 slc = (slice(None),) * op.axis + (slc,)
--> 428 ctx[op.outputs[-1].key] = res[slc]
429
430
IndexError: index 0 is out of bounds for axis 0 with size 0
|
IndexError
|
def _check_response_finished(self, graph_url, timeout=None):
import requests
try:
resp = self._req_session.get(graph_url, params={"wait_timeout": timeout})
except requests.ConnectionError as ex:
err_msg = str(ex)
if (
"ConnectionResetError" in err_msg
or "Connection refused" in err_msg
or "Connection aborted" in err_msg
):
return False
raise
if resp.status_code == 504:
logging.debug("Gateway Time-out, try again")
return False
if resp.status_code >= 400:
raise SystemError(
f"Failed to obtain execution status. Code: {resp.status_code}, "
f"Reason: {resp.reason}, Content:\n{resp.text}"
)
resp_json = self._handle_json_response(resp, raises=False)
if resp_json["state"] == "succeeded":
return True
elif resp_json["state"] in ("running", "preparing"):
return False
elif resp_json["state"] in ("cancelled", "cancelling"):
raise ExecutionInterrupted
elif resp_json["state"] == "failed":
if "exc_info" in resp_json:
exc_info = pickle.loads(base64.b64decode(resp_json["exc_info"]))
exc = exc_info[1].with_traceback(exc_info[2])
raise ExecutionFailed("Graph execution failed.") from exc
else:
raise ExecutionFailed("Graph execution failed with unknown reason.")
raise ExecutionStateUnknown("Unknown graph execution state " + resp_json["state"])
|
def _check_response_finished(self, graph_url, timeout=None):
import requests
try:
resp = self._req_session.get(graph_url, params={"wait_timeout": timeout})
except requests.ConnectionError as ex:
err_msg = str(ex)
if "ConnectionResetError" in err_msg or "Connection refused" in err_msg:
return False
raise
if resp.status_code == 504:
logging.debug("Gateway Time-out, try again")
return False
if resp.status_code >= 400:
raise SystemError(
f"Failed to obtain execution status. Code: {resp.status_code}, "
f"Reason: {resp.reason}, Content:\n{resp.text}"
)
resp_json = self._handle_json_response(resp, raises=False)
if resp_json["state"] == "succeeded":
return True
elif resp_json["state"] in ("running", "preparing"):
return False
elif resp_json["state"] in ("cancelled", "cancelling"):
raise ExecutionInterrupted
elif resp_json["state"] == "failed":
if "exc_info" in resp_json:
exc_info = pickle.loads(base64.b64decode(resp_json["exc_info"]))
exc = exc_info[1].with_traceback(exc_info[2])
raise ExecutionFailed("Graph execution failed.") from exc
else:
raise ExecutionFailed("Graph execution failed with unknown reason.")
raise ExecutionStateUnknown("Unknown graph execution state " + resp_json["state"])
|
https://github.com/mars-project/mars/issues/1960
|
In [59]: a = mt.random.randint(0, 2, size=(1000,), chunk_size=300)
In [60]: mt.sort(mt.sort(a)).execute()
---------------------------------------------------------------------------
IndexError Traceback (most recent call last)
<ipython-input-60-6c95dab88665> in <module>
----> 1 mt.sort(mt.sort(a)).execute()
~/Documents/mars_dev/mars/mars/core.py in execute(self, session, **kw)
644
645 if wait:
--> 646 return run()
647 else:
648 thread_executor = ThreadPoolExecutor(1)
~/Documents/mars_dev/mars/mars/core.py in run()
640
641 def run():
--> 642 self.data.execute(session, **kw)
643 return self
644
~/Documents/mars_dev/mars/mars/core.py in execute(self, session, **kw)
377
378 if wait:
--> 379 return run()
380 else:
381 # leverage ThreadPoolExecutor to submit task,
~/Documents/mars_dev/mars/mars/core.py in run()
372 def run():
373 # no more fetch, thus just fire run
--> 374 session.run(self, **kw)
375 # return Tileable or ExecutableTuple itself
376 return self
~/Documents/mars_dev/mars/mars/session.py in run(self, *tileables, **kw)
503 tileables = tuple(mt.tensor(t) if not isinstance(t, (Entity, Base)) else t
504 for t in tileables)
--> 505 result = self._sess.run(*tileables, **kw)
506
507 for t in tileables:
~/Documents/mars_dev/mars/mars/session.py in run(self, *tileables, **kw)
109 # set number of running cores
110 self.context.set_ncores(kw['n_parallel'])
--> 111 res = self._executor.execute_tileables(tileables, **kw)
112 return res
113
~/Documents/mars_dev/mars/mars/utils.py in _inner(*args, **kwargs)
456 def _inner(*args, **kwargs):
457 with self:
--> 458 return func(*args, **kwargs)
459
460 return _inner
~/Documents/mars_dev/mars/mars/executor.py in execute_tileables(self, tileables, fetch, n_parallel, n_thread, print_progress, mock, compose, name)
884 n_parallel=n_parallel or n_thread,
885 print_progress=print_progress, mock=mock,
--> 886 chunk_result=chunk_result)
887
888 # update shape of tileable and its chunks whatever it's successful or not
~/Documents/mars_dev/mars/mars/executor.py in execute_graph(self, graph, keys, n_parallel, print_progress, mock, no_intermediate, compose, retval, chunk_result)
696 print_progress=print_progress, mock=mock, mock_max_memory=self._mock_max_memory,
697 fetch_keys=fetch_keys, no_intermediate=no_intermediate)
--> 698 res = graph_execution.execute(retval)
699 self._mock_max_memory = max(self._mock_max_memory, graph_execution._mock_max_memory)
700 if mock:
~/Documents/mars_dev/mars/mars/executor.py in execute(self, retval)
577 # wait until all the futures completed
578 for future in executed_futures:
--> 579 future.result()
580
581 if retval:
~/miniconda3/envs/py3.7.2/lib/python3.7/concurrent/futures/_base.py in result(self, timeout)
423 raise CancelledError()
424 elif self._state == FINISHED:
--> 425 return self.__get_result()
426
427 self._condition.wait(timeout)
~/miniconda3/envs/py3.7.2/lib/python3.7/concurrent/futures/_base.py in __get_result(self)
382 def __get_result(self):
383 if self._exception:
--> 384 raise self._exception
385 else:
386 return self._result
~/miniconda3/envs/py3.7.2/lib/python3.7/concurrent/futures/thread.py in run(self)
55
56 try:
---> 57 result = self.fn(*self.args, **self.kwargs)
58 except BaseException as exc:
59 self.future.set_exception(exc)
~/Documents/mars_dev/mars/mars/utils.py in _inner(*args, **kwargs)
456 def _inner(*args, **kwargs):
457 with self:
--> 458 return func(*args, **kwargs)
459
460 return _inner
~/Documents/mars_dev/mars/mars/executor.py in _execute_operand(self, op)
444 # so we pass the first operand's first output to Executor.handle
445 first_op = ops[0]
--> 446 self.handle_op(first_op, results, self._mock)
447
448 # update maximal memory usage during execution
~/Documents/mars_dev/mars/mars/executor.py in handle_op(self, *args, **kw)
376
377 def handle_op(self, *args, **kw):
--> 378 return Executor.handle(*args, **kw)
379
380 def _order_starts(self):
~/Documents/mars_dev/mars/mars/executor.py in handle(cls, op, results, mock)
647 # The `UFuncTypeError` was introduced by numpy#12593 since v1.17.0.
648 try:
--> 649 return runner(results, op)
650 except UFuncTypeError as e:
651 raise TypeError(str(e)).with_traceback(sys.exc_info()[2]) from None
~/Documents/mars_dev/mars/mars/tensor/base/psrs.py in execute(cls, ctx, op)
426 num=n, endpoint=False).astype(int)
427 slc = (slice(None),) * op.axis + (slc,)
--> 428 ctx[op.outputs[-1].key] = res[slc]
429
430
IndexError: index 0 is out of bounds for axis 0 with size 0
|
IndexError
|
def rechunk(
a, chunk_size, threshold=None, chunk_size_limit=None, reassign_worker=False
):
if not any(pd.isna(s) for s in a.shape) and not a.is_coarse():
try:
check_chunks_unknown_shape([a], ValueError)
except ValueError:
# due to reason that tileable has unknown chunk shape,
# just ignore to hand over to operand
pass
else:
# do client check only when no unknown shape,
# real nsplits will be recalculated inside `tile`
chunk_size = _get_chunk_size(a, chunk_size)
if chunk_size == a.nsplits:
return a
op = DataFrameRechunk(
chunk_size=chunk_size,
threshold=threshold,
chunk_size_limit=chunk_size_limit,
reassign_worker=reassign_worker,
)
return op(a)
|
def rechunk(
a, chunk_size, threshold=None, chunk_size_limit=None, reassign_worker=False
):
if not any(pd.isna(s) for s in a.shape) and not a.is_coarse():
# do client check only when no unknown shape,
# real nsplits will be recalculated inside `tile`
chunk_size = _get_chunk_size(a, chunk_size)
if chunk_size == a.nsplits:
return a
op = DataFrameRechunk(
chunk_size=chunk_size,
threshold=threshold,
chunk_size_limit=chunk_size_limit,
reassign_worker=reassign_worker,
)
return op(a)
|
https://github.com/mars-project/mars/issues/1963
|
In [1]: import mars.dataframe as md
In [2]: import numpy as np
In [3]: df = md.DataFrame({'a': np.random.randint(100, size=10), 'b':np.random.rand(10), 'label': np.random.randint(2, size=10)}, chunk_size=4)
In [4]: df = df[df.a > 0]
In [5]: df._shape = (10, 3)
In [6]: data = df[['a', 'b']]
In [7]: label = df['label']
In [8]: from mars.learn.contrib import xgboost as xgb
In [9]: xgb.MarsDMatrix(data=data, label=label).execute()
Out[9]:
Traceback (most recent call last):
File "/Users/qinxuye/miniconda3/lib/python3.7/site-packages/IPython/core/interactiveshell.py", line 3331, in run_code
exec(code_obj, self.user_global_ns, self.user_ns)
File "<ipython-input-13-43292b123748>", line 1, in <module>
xgb.MarsDMatrix(data=data, label=label).execute()
File "/Users/qinxuye/Workspace/mars/mars/learn/contrib/xgboost/dmatrix.py", line 327, in to_dmatrix
outs.execute(session=session, **(run_kwargs or dict()))
File "/Users/qinxuye/Workspace/mars/mars/core.py", line 736, in execute
return super().execute(session=session, **kw)
File "/Users/qinxuye/Workspace/mars/mars/core.py", line 379, in execute
return run()
File "/Users/qinxuye/Workspace/mars/mars/core.py", line 374, in run
session.run(self, **kw)
File "/Users/qinxuye/Workspace/mars/mars/session.py", line 505, in run
result = self._sess.run(*tileables, **kw)
File "/Users/qinxuye/Workspace/mars/mars/session.py", line 111, in run
res = self._executor.execute_tileables(tileables, **kw)
File "/Users/qinxuye/Workspace/mars/mars/utils.py", line 458, in _inner
return func(*args, **kwargs)
File "/Users/qinxuye/Workspace/mars/mars/executor.py", line 866, in execute_tileables
tileables, tileable_graph=tileable_graph)
File "/Users/qinxuye/Workspace/mars/mars/utils.py", line 458, in _inner
return func(*args, **kwargs)
File "/Users/qinxuye/Workspace/mars/mars/tiles.py", line 348, in build
tileables, tileable_graph=tileable_graph)
File "/Users/qinxuye/Workspace/mars/mars/utils.py", line 458, in _inner
return func(*args, **kwargs)
File "/Users/qinxuye/Workspace/mars/mars/tiles.py", line 262, in build
self._on_tile_failure(tileable_data.op, exc_info)
File "/Users/qinxuye/Workspace/mars/mars/tiles.py", line 301, in inner
raise exc_info[1].with_traceback(exc_info[2]) from None
File "/Users/qinxuye/Workspace/mars/mars/tiles.py", line 242, in build
tiled = self._tile(tileable_data, tileable_graph)
File "/Users/qinxuye/Workspace/mars/mars/tiles.py", line 337, in _tile
return super()._tile(tileable_data, tileable_graph)
File "/Users/qinxuye/Workspace/mars/mars/tiles.py", line 201, in _tile
tds[0]._inplace_tile()
File "/Users/qinxuye/Workspace/mars/mars/core.py", line 168, in _inplace_tile
return handler.inplace_tile(self)
File "/Users/qinxuye/Workspace/mars/mars/tiles.py", line 136, in inplace_tile
dispatched = self.dispatch(to_tile.op)
File "/Users/qinxuye/Workspace/mars/mars/utils.py", line 458, in _inner
return func(*args, **kwargs)
File "/Users/qinxuye/Workspace/mars/mars/tiles.py", line 119, in dispatch
tiled = op_cls.tile(op)
File "/Users/qinxuye/Workspace/mars/mars/learn/contrib/xgboost/dmatrix.py", line 255, in tile
return cls._tile_multi_output(op)
File "/Users/qinxuye/Workspace/mars/mars/learn/contrib/xgboost/dmatrix.py", line 139, in _tile_multi_output
label = label.rechunk({0: nsplit})._inplace_tile()
File "/Users/qinxuye/Workspace/mars/mars/tensor/rechunk/rechunk.py", line 97, in rechunk
chunk_size = get_nsplits(tensor, chunk_size, tensor.dtype.itemsize)
File "/Users/qinxuye/Workspace/mars/mars/tensor/rechunk/core.py", line 38, in get_nsplits
return decide_chunk_sizes(tileable.shape, chunk_size, itemsize)
File "/Users/qinxuye/Workspace/mars/mars/tensor/utils.py", line 551, in decide_chunk_sizes
return normalize_chunk_sizes(shape, tuple(chunk_size[j] for j in range(len(shape))))
File "/Users/qinxuye/Workspace/mars/mars/tensor/utils.py", line 66, in normalize_chunk_sizes
raise ValueError('chunks shape should be of the same length, '
ValueError: chunks shape should be of the same length, got shape: 10, chunks: (nan, nan, nan)
|
ValueError
|
def predict(self, data, **kw):
session = kw.pop("session", None)
run_kwargs = kw.pop("run_kwargs", dict())
run = kw.pop("run", True)
prob = predict(self.get_booster(), data, run=False, **kw)
if prob.ndim > 1:
prediction = mt.argmax(prob, axis=1)
else:
prediction = (prob > 0.5).astype(mt.int64)
if run:
prediction.execute(session=session, **run_kwargs)
return prediction
|
def predict(self, data, **kw):
session = kw.pop("session", None)
run_kwargs = kw.pop("run_kwargs", dict())
run = kw.pop("run", True)
if kw:
raise TypeError(
f"predict got an unexpected keyword argument '{next(iter(kw))}'"
)
prob = predict(self.get_booster(), data, run=False)
if prob.ndim > 1:
prediction = mt.argmax(prob, axis=1)
else:
prediction = (prob > 0.5).astype(mt.int64)
if run:
prediction.execute(session=session, **run_kwargs)
return prediction
|
https://github.com/mars-project/mars/issues/1963
|
In [1]: import mars.dataframe as md
In [2]: import numpy as np
In [3]: df = md.DataFrame({'a': np.random.randint(100, size=10), 'b':np.random.rand(10), 'label': np.random.randint(2, size=10)}, chunk_size=4)
In [4]: df = df[df.a > 0]
In [5]: df._shape = (10, 3)
In [6]: data = df[['a', 'b']]
In [7]: label = df['label']
In [8]: from mars.learn.contrib import xgboost as xgb
In [9]: xgb.MarsDMatrix(data=data, label=label).execute()
Out[9]:
Traceback (most recent call last):
File "/Users/qinxuye/miniconda3/lib/python3.7/site-packages/IPython/core/interactiveshell.py", line 3331, in run_code
exec(code_obj, self.user_global_ns, self.user_ns)
File "<ipython-input-13-43292b123748>", line 1, in <module>
xgb.MarsDMatrix(data=data, label=label).execute()
File "/Users/qinxuye/Workspace/mars/mars/learn/contrib/xgboost/dmatrix.py", line 327, in to_dmatrix
outs.execute(session=session, **(run_kwargs or dict()))
File "/Users/qinxuye/Workspace/mars/mars/core.py", line 736, in execute
return super().execute(session=session, **kw)
File "/Users/qinxuye/Workspace/mars/mars/core.py", line 379, in execute
return run()
File "/Users/qinxuye/Workspace/mars/mars/core.py", line 374, in run
session.run(self, **kw)
File "/Users/qinxuye/Workspace/mars/mars/session.py", line 505, in run
result = self._sess.run(*tileables, **kw)
File "/Users/qinxuye/Workspace/mars/mars/session.py", line 111, in run
res = self._executor.execute_tileables(tileables, **kw)
File "/Users/qinxuye/Workspace/mars/mars/utils.py", line 458, in _inner
return func(*args, **kwargs)
File "/Users/qinxuye/Workspace/mars/mars/executor.py", line 866, in execute_tileables
tileables, tileable_graph=tileable_graph)
File "/Users/qinxuye/Workspace/mars/mars/utils.py", line 458, in _inner
return func(*args, **kwargs)
File "/Users/qinxuye/Workspace/mars/mars/tiles.py", line 348, in build
tileables, tileable_graph=tileable_graph)
File "/Users/qinxuye/Workspace/mars/mars/utils.py", line 458, in _inner
return func(*args, **kwargs)
File "/Users/qinxuye/Workspace/mars/mars/tiles.py", line 262, in build
self._on_tile_failure(tileable_data.op, exc_info)
File "/Users/qinxuye/Workspace/mars/mars/tiles.py", line 301, in inner
raise exc_info[1].with_traceback(exc_info[2]) from None
File "/Users/qinxuye/Workspace/mars/mars/tiles.py", line 242, in build
tiled = self._tile(tileable_data, tileable_graph)
File "/Users/qinxuye/Workspace/mars/mars/tiles.py", line 337, in _tile
return super()._tile(tileable_data, tileable_graph)
File "/Users/qinxuye/Workspace/mars/mars/tiles.py", line 201, in _tile
tds[0]._inplace_tile()
File "/Users/qinxuye/Workspace/mars/mars/core.py", line 168, in _inplace_tile
return handler.inplace_tile(self)
File "/Users/qinxuye/Workspace/mars/mars/tiles.py", line 136, in inplace_tile
dispatched = self.dispatch(to_tile.op)
File "/Users/qinxuye/Workspace/mars/mars/utils.py", line 458, in _inner
return func(*args, **kwargs)
File "/Users/qinxuye/Workspace/mars/mars/tiles.py", line 119, in dispatch
tiled = op_cls.tile(op)
File "/Users/qinxuye/Workspace/mars/mars/learn/contrib/xgboost/dmatrix.py", line 255, in tile
return cls._tile_multi_output(op)
File "/Users/qinxuye/Workspace/mars/mars/learn/contrib/xgboost/dmatrix.py", line 139, in _tile_multi_output
label = label.rechunk({0: nsplit})._inplace_tile()
File "/Users/qinxuye/Workspace/mars/mars/tensor/rechunk/rechunk.py", line 97, in rechunk
chunk_size = get_nsplits(tensor, chunk_size, tensor.dtype.itemsize)
File "/Users/qinxuye/Workspace/mars/mars/tensor/rechunk/core.py", line 38, in get_nsplits
return decide_chunk_sizes(tileable.shape, chunk_size, itemsize)
File "/Users/qinxuye/Workspace/mars/mars/tensor/utils.py", line 551, in decide_chunk_sizes
return normalize_chunk_sizes(shape, tuple(chunk_size[j] for j in range(len(shape))))
File "/Users/qinxuye/Workspace/mars/mars/tensor/utils.py", line 66, in normalize_chunk_sizes
raise ValueError('chunks shape should be of the same length, '
ValueError: chunks shape should be of the same length, got shape: 10, chunks: (nan, nan, nan)
|
ValueError
|
def __init__(
self, data=None, model=None, kwargs=None, output_types=None, gpu=None, **kw
):
super().__init__(
_data=data,
_model=model,
_kwargs=kwargs,
_gpu=gpu,
_output_types=output_types,
**kw,
)
|
def __init__(self, data=None, model=None, output_types=None, gpu=None, **kw):
super().__init__(
_data=data, _model=model, _gpu=gpu, _output_types=output_types, **kw
)
|
https://github.com/mars-project/mars/issues/1963
|
In [1]: import mars.dataframe as md
In [2]: import numpy as np
In [3]: df = md.DataFrame({'a': np.random.randint(100, size=10), 'b':np.random.rand(10), 'label': np.random.randint(2, size=10)}, chunk_size=4)
In [4]: df = df[df.a > 0]
In [5]: df._shape = (10, 3)
In [6]: data = df[['a', 'b']]
In [7]: label = df['label']
In [8]: from mars.learn.contrib import xgboost as xgb
In [9]: xgb.MarsDMatrix(data=data, label=label).execute()
Out[9]:
Traceback (most recent call last):
File "/Users/qinxuye/miniconda3/lib/python3.7/site-packages/IPython/core/interactiveshell.py", line 3331, in run_code
exec(code_obj, self.user_global_ns, self.user_ns)
File "<ipython-input-13-43292b123748>", line 1, in <module>
xgb.MarsDMatrix(data=data, label=label).execute()
File "/Users/qinxuye/Workspace/mars/mars/learn/contrib/xgboost/dmatrix.py", line 327, in to_dmatrix
outs.execute(session=session, **(run_kwargs or dict()))
File "/Users/qinxuye/Workspace/mars/mars/core.py", line 736, in execute
return super().execute(session=session, **kw)
File "/Users/qinxuye/Workspace/mars/mars/core.py", line 379, in execute
return run()
File "/Users/qinxuye/Workspace/mars/mars/core.py", line 374, in run
session.run(self, **kw)
File "/Users/qinxuye/Workspace/mars/mars/session.py", line 505, in run
result = self._sess.run(*tileables, **kw)
File "/Users/qinxuye/Workspace/mars/mars/session.py", line 111, in run
res = self._executor.execute_tileables(tileables, **kw)
File "/Users/qinxuye/Workspace/mars/mars/utils.py", line 458, in _inner
return func(*args, **kwargs)
File "/Users/qinxuye/Workspace/mars/mars/executor.py", line 866, in execute_tileables
tileables, tileable_graph=tileable_graph)
File "/Users/qinxuye/Workspace/mars/mars/utils.py", line 458, in _inner
return func(*args, **kwargs)
File "/Users/qinxuye/Workspace/mars/mars/tiles.py", line 348, in build
tileables, tileable_graph=tileable_graph)
File "/Users/qinxuye/Workspace/mars/mars/utils.py", line 458, in _inner
return func(*args, **kwargs)
File "/Users/qinxuye/Workspace/mars/mars/tiles.py", line 262, in build
self._on_tile_failure(tileable_data.op, exc_info)
File "/Users/qinxuye/Workspace/mars/mars/tiles.py", line 301, in inner
raise exc_info[1].with_traceback(exc_info[2]) from None
File "/Users/qinxuye/Workspace/mars/mars/tiles.py", line 242, in build
tiled = self._tile(tileable_data, tileable_graph)
File "/Users/qinxuye/Workspace/mars/mars/tiles.py", line 337, in _tile
return super()._tile(tileable_data, tileable_graph)
File "/Users/qinxuye/Workspace/mars/mars/tiles.py", line 201, in _tile
tds[0]._inplace_tile()
File "/Users/qinxuye/Workspace/mars/mars/core.py", line 168, in _inplace_tile
return handler.inplace_tile(self)
File "/Users/qinxuye/Workspace/mars/mars/tiles.py", line 136, in inplace_tile
dispatched = self.dispatch(to_tile.op)
File "/Users/qinxuye/Workspace/mars/mars/utils.py", line 458, in _inner
return func(*args, **kwargs)
File "/Users/qinxuye/Workspace/mars/mars/tiles.py", line 119, in dispatch
tiled = op_cls.tile(op)
File "/Users/qinxuye/Workspace/mars/mars/learn/contrib/xgboost/dmatrix.py", line 255, in tile
return cls._tile_multi_output(op)
File "/Users/qinxuye/Workspace/mars/mars/learn/contrib/xgboost/dmatrix.py", line 139, in _tile_multi_output
label = label.rechunk({0: nsplit})._inplace_tile()
File "/Users/qinxuye/Workspace/mars/mars/tensor/rechunk/rechunk.py", line 97, in rechunk
chunk_size = get_nsplits(tensor, chunk_size, tensor.dtype.itemsize)
File "/Users/qinxuye/Workspace/mars/mars/tensor/rechunk/core.py", line 38, in get_nsplits
return decide_chunk_sizes(tileable.shape, chunk_size, itemsize)
File "/Users/qinxuye/Workspace/mars/mars/tensor/utils.py", line 551, in decide_chunk_sizes
return normalize_chunk_sizes(shape, tuple(chunk_size[j] for j in range(len(shape))))
File "/Users/qinxuye/Workspace/mars/mars/tensor/utils.py", line 66, in normalize_chunk_sizes
raise ValueError('chunks shape should be of the same length, '
ValueError: chunks shape should be of the same length, got shape: 10, chunks: (nan, nan, nan)
|
ValueError
|
def __call__(self):
num_class = self._model.attr("num_class")
if num_class is not None:
num_class = int(num_class)
if num_class is not None:
shape = (self._data.shape[0], num_class)
else:
shape = (self._data.shape[0],)
inputs = [self._data]
if self.output_types[0] == OutputType.tensor:
# tensor
return self.new_tileable(
inputs, shape=shape, dtype=np.dtype(np.float32), order=TensorOrder.C_ORDER
)
elif self.output_types[0] == OutputType.dataframe:
# dataframe
dtypes = pd.DataFrame(np.random.rand(0, num_class), dtype=np.float32).dtypes
return self.new_tileable(
inputs,
shape=shape,
dtypes=dtypes,
columns_value=parse_index(dtypes.index),
index_value=self._data.index_value,
)
else:
# series
return self.new_tileable(
inputs,
shape=shape,
index_value=self._data.index_value,
name="predictions",
dtype=np.dtype(np.float32),
)
|
def __call__(self):
num_class = self._model.attr("num_class")
if num_class is not None:
num_class = int(num_class)
if num_class is not None:
shape = (self._data.shape[0], num_class)
else:
shape = (self._data.shape[0],)
if self.output_types[0] == OutputType.tensor:
# tensor
return self.new_tileable(
[self._data],
shape=shape,
dtype=np.dtype(np.float32),
order=TensorOrder.C_ORDER,
)
elif self.output_types[0] == OutputType.dataframe:
# dataframe
dtypes = pd.DataFrame(np.random.rand(0, num_class), dtype=np.float32).dtypes
return self.new_tileable(
[self._data],
shape=shape,
dtypes=dtypes,
columns_value=parse_index(dtypes.index),
index_value=self._data.index_value,
)
else:
# series
return self.new_tileable(
[self._data],
shape=shape,
index_value=self._data.index_value,
name="predictions",
dtype=np.dtype(np.float32),
)
|
https://github.com/mars-project/mars/issues/1963
|
In [1]: import mars.dataframe as md
In [2]: import numpy as np
In [3]: df = md.DataFrame({'a': np.random.randint(100, size=10), 'b':np.random.rand(10), 'label': np.random.randint(2, size=10)}, chunk_size=4)
In [4]: df = df[df.a > 0]
In [5]: df._shape = (10, 3)
In [6]: data = df[['a', 'b']]
In [7]: label = df['label']
In [8]: from mars.learn.contrib import xgboost as xgb
In [9]: xgb.MarsDMatrix(data=data, label=label).execute()
Out[9]:
Traceback (most recent call last):
File "/Users/qinxuye/miniconda3/lib/python3.7/site-packages/IPython/core/interactiveshell.py", line 3331, in run_code
exec(code_obj, self.user_global_ns, self.user_ns)
File "<ipython-input-13-43292b123748>", line 1, in <module>
xgb.MarsDMatrix(data=data, label=label).execute()
File "/Users/qinxuye/Workspace/mars/mars/learn/contrib/xgboost/dmatrix.py", line 327, in to_dmatrix
outs.execute(session=session, **(run_kwargs or dict()))
File "/Users/qinxuye/Workspace/mars/mars/core.py", line 736, in execute
return super().execute(session=session, **kw)
File "/Users/qinxuye/Workspace/mars/mars/core.py", line 379, in execute
return run()
File "/Users/qinxuye/Workspace/mars/mars/core.py", line 374, in run
session.run(self, **kw)
File "/Users/qinxuye/Workspace/mars/mars/session.py", line 505, in run
result = self._sess.run(*tileables, **kw)
File "/Users/qinxuye/Workspace/mars/mars/session.py", line 111, in run
res = self._executor.execute_tileables(tileables, **kw)
File "/Users/qinxuye/Workspace/mars/mars/utils.py", line 458, in _inner
return func(*args, **kwargs)
File "/Users/qinxuye/Workspace/mars/mars/executor.py", line 866, in execute_tileables
tileables, tileable_graph=tileable_graph)
File "/Users/qinxuye/Workspace/mars/mars/utils.py", line 458, in _inner
return func(*args, **kwargs)
File "/Users/qinxuye/Workspace/mars/mars/tiles.py", line 348, in build
tileables, tileable_graph=tileable_graph)
File "/Users/qinxuye/Workspace/mars/mars/utils.py", line 458, in _inner
return func(*args, **kwargs)
File "/Users/qinxuye/Workspace/mars/mars/tiles.py", line 262, in build
self._on_tile_failure(tileable_data.op, exc_info)
File "/Users/qinxuye/Workspace/mars/mars/tiles.py", line 301, in inner
raise exc_info[1].with_traceback(exc_info[2]) from None
File "/Users/qinxuye/Workspace/mars/mars/tiles.py", line 242, in build
tiled = self._tile(tileable_data, tileable_graph)
File "/Users/qinxuye/Workspace/mars/mars/tiles.py", line 337, in _tile
return super()._tile(tileable_data, tileable_graph)
File "/Users/qinxuye/Workspace/mars/mars/tiles.py", line 201, in _tile
tds[0]._inplace_tile()
File "/Users/qinxuye/Workspace/mars/mars/core.py", line 168, in _inplace_tile
return handler.inplace_tile(self)
File "/Users/qinxuye/Workspace/mars/mars/tiles.py", line 136, in inplace_tile
dispatched = self.dispatch(to_tile.op)
File "/Users/qinxuye/Workspace/mars/mars/utils.py", line 458, in _inner
return func(*args, **kwargs)
File "/Users/qinxuye/Workspace/mars/mars/tiles.py", line 119, in dispatch
tiled = op_cls.tile(op)
File "/Users/qinxuye/Workspace/mars/mars/learn/contrib/xgboost/dmatrix.py", line 255, in tile
return cls._tile_multi_output(op)
File "/Users/qinxuye/Workspace/mars/mars/learn/contrib/xgboost/dmatrix.py", line 139, in _tile_multi_output
label = label.rechunk({0: nsplit})._inplace_tile()
File "/Users/qinxuye/Workspace/mars/mars/tensor/rechunk/rechunk.py", line 97, in rechunk
chunk_size = get_nsplits(tensor, chunk_size, tensor.dtype.itemsize)
File "/Users/qinxuye/Workspace/mars/mars/tensor/rechunk/core.py", line 38, in get_nsplits
return decide_chunk_sizes(tileable.shape, chunk_size, itemsize)
File "/Users/qinxuye/Workspace/mars/mars/tensor/utils.py", line 551, in decide_chunk_sizes
return normalize_chunk_sizes(shape, tuple(chunk_size[j] for j in range(len(shape))))
File "/Users/qinxuye/Workspace/mars/mars/tensor/utils.py", line 66, in normalize_chunk_sizes
raise ValueError('chunks shape should be of the same length, '
ValueError: chunks shape should be of the same length, got shape: 10, chunks: (nan, nan, nan)
|
ValueError
|
def execute(cls, ctx, op):
from xgboost import DMatrix
raw_data = data = ctx[op.data.key]
if isinstance(data, tuple):
data = ToDMatrix.get_xgb_dmatrix(data)
else:
data = data.spmatrix if hasattr(data, "spmatrix") else data
data = DMatrix(data)
# do not pass arguments that are None
kwargs = dict((k, v) for k, v in op.kwargs.items() if v is not None)
result = op.model.predict(data, **kwargs)
if isinstance(op.outputs[0], DATAFRAME_CHUNK_TYPE):
result = pd.DataFrame(result, index=raw_data.index)
elif isinstance(op.outputs[0], SERIES_CHUNK_TYPE):
result = pd.Series(result, index=raw_data.index, name="predictions")
ctx[op.outputs[0].key] = result
|
def execute(cls, ctx, op):
from xgboost import DMatrix
raw_data = data = ctx[op.data.key]
if isinstance(data, tuple):
data = ToDMatrix.get_xgb_dmatrix(data)
else:
data = data.spmatrix if hasattr(data, "spmatrix") else data
data = DMatrix(data)
result = op.model.predict(data)
if isinstance(op.outputs[0], DATAFRAME_CHUNK_TYPE):
result = pd.DataFrame(result, index=raw_data.index)
elif isinstance(op.outputs[0], SERIES_CHUNK_TYPE):
result = pd.Series(result, index=raw_data.index, name="predictions")
ctx[op.outputs[0].key] = result
|
https://github.com/mars-project/mars/issues/1963
|
In [1]: import mars.dataframe as md
In [2]: import numpy as np
In [3]: df = md.DataFrame({'a': np.random.randint(100, size=10), 'b':np.random.rand(10), 'label': np.random.randint(2, size=10)}, chunk_size=4)
In [4]: df = df[df.a > 0]
In [5]: df._shape = (10, 3)
In [6]: data = df[['a', 'b']]
In [7]: label = df['label']
In [8]: from mars.learn.contrib import xgboost as xgb
In [9]: xgb.MarsDMatrix(data=data, label=label).execute()
Out[9]:
Traceback (most recent call last):
File "/Users/qinxuye/miniconda3/lib/python3.7/site-packages/IPython/core/interactiveshell.py", line 3331, in run_code
exec(code_obj, self.user_global_ns, self.user_ns)
File "<ipython-input-13-43292b123748>", line 1, in <module>
xgb.MarsDMatrix(data=data, label=label).execute()
File "/Users/qinxuye/Workspace/mars/mars/learn/contrib/xgboost/dmatrix.py", line 327, in to_dmatrix
outs.execute(session=session, **(run_kwargs or dict()))
File "/Users/qinxuye/Workspace/mars/mars/core.py", line 736, in execute
return super().execute(session=session, **kw)
File "/Users/qinxuye/Workspace/mars/mars/core.py", line 379, in execute
return run()
File "/Users/qinxuye/Workspace/mars/mars/core.py", line 374, in run
session.run(self, **kw)
File "/Users/qinxuye/Workspace/mars/mars/session.py", line 505, in run
result = self._sess.run(*tileables, **kw)
File "/Users/qinxuye/Workspace/mars/mars/session.py", line 111, in run
res = self._executor.execute_tileables(tileables, **kw)
File "/Users/qinxuye/Workspace/mars/mars/utils.py", line 458, in _inner
return func(*args, **kwargs)
File "/Users/qinxuye/Workspace/mars/mars/executor.py", line 866, in execute_tileables
tileables, tileable_graph=tileable_graph)
File "/Users/qinxuye/Workspace/mars/mars/utils.py", line 458, in _inner
return func(*args, **kwargs)
File "/Users/qinxuye/Workspace/mars/mars/tiles.py", line 348, in build
tileables, tileable_graph=tileable_graph)
File "/Users/qinxuye/Workspace/mars/mars/utils.py", line 458, in _inner
return func(*args, **kwargs)
File "/Users/qinxuye/Workspace/mars/mars/tiles.py", line 262, in build
self._on_tile_failure(tileable_data.op, exc_info)
File "/Users/qinxuye/Workspace/mars/mars/tiles.py", line 301, in inner
raise exc_info[1].with_traceback(exc_info[2]) from None
File "/Users/qinxuye/Workspace/mars/mars/tiles.py", line 242, in build
tiled = self._tile(tileable_data, tileable_graph)
File "/Users/qinxuye/Workspace/mars/mars/tiles.py", line 337, in _tile
return super()._tile(tileable_data, tileable_graph)
File "/Users/qinxuye/Workspace/mars/mars/tiles.py", line 201, in _tile
tds[0]._inplace_tile()
File "/Users/qinxuye/Workspace/mars/mars/core.py", line 168, in _inplace_tile
return handler.inplace_tile(self)
File "/Users/qinxuye/Workspace/mars/mars/tiles.py", line 136, in inplace_tile
dispatched = self.dispatch(to_tile.op)
File "/Users/qinxuye/Workspace/mars/mars/utils.py", line 458, in _inner
return func(*args, **kwargs)
File "/Users/qinxuye/Workspace/mars/mars/tiles.py", line 119, in dispatch
tiled = op_cls.tile(op)
File "/Users/qinxuye/Workspace/mars/mars/learn/contrib/xgboost/dmatrix.py", line 255, in tile
return cls._tile_multi_output(op)
File "/Users/qinxuye/Workspace/mars/mars/learn/contrib/xgboost/dmatrix.py", line 139, in _tile_multi_output
label = label.rechunk({0: nsplit})._inplace_tile()
File "/Users/qinxuye/Workspace/mars/mars/tensor/rechunk/rechunk.py", line 97, in rechunk
chunk_size = get_nsplits(tensor, chunk_size, tensor.dtype.itemsize)
File "/Users/qinxuye/Workspace/mars/mars/tensor/rechunk/core.py", line 38, in get_nsplits
return decide_chunk_sizes(tileable.shape, chunk_size, itemsize)
File "/Users/qinxuye/Workspace/mars/mars/tensor/utils.py", line 551, in decide_chunk_sizes
return normalize_chunk_sizes(shape, tuple(chunk_size[j] for j in range(len(shape))))
File "/Users/qinxuye/Workspace/mars/mars/tensor/utils.py", line 66, in normalize_chunk_sizes
raise ValueError('chunks shape should be of the same length, '
ValueError: chunks shape should be of the same length, got shape: 10, chunks: (nan, nan, nan)
|
ValueError
|
def predict(
model,
data,
output_margin=False,
ntree_limit=None,
validate_features=True,
base_margin=None,
session=None,
run_kwargs=None,
run=True,
):
from xgboost import Booster
data = check_data(data)
if not isinstance(model, Booster):
raise TypeError(f"model has to be a xgboost.Booster, got {type(model)} instead")
num_class = model.attr("num_class")
if isinstance(data, TENSOR_TYPE):
output_types = [OutputType.tensor]
elif num_class is not None:
output_types = [OutputType.dataframe]
else:
output_types = [OutputType.series]
kwargs = {
"output_margin": output_margin,
"ntree_limit": ntree_limit,
"validate_features": validate_features,
"base_margin": base_margin,
}
op = XGBPredict(
data=data,
model=model,
kwargs=kwargs,
gpu=data.op.gpu,
output_types=output_types,
)
result = op()
if run:
result.execute(session=session, **(run_kwargs or dict()))
return result
|
def predict(model, data, session=None, run_kwargs=None, run=True):
from xgboost import Booster
data = check_data(data)
if not isinstance(model, Booster):
raise TypeError(f"model has to be a xgboost.Booster, got {type(model)} instead")
num_class = model.attr("num_class")
if isinstance(data, TENSOR_TYPE):
output_types = [OutputType.tensor]
elif num_class is not None:
output_types = [OutputType.dataframe]
else:
output_types = [OutputType.series]
op = XGBPredict(data=data, model=model, gpu=data.op.gpu, output_types=output_types)
result = op()
if run:
result.execute(session=session, **(run_kwargs or dict()))
return result
|
https://github.com/mars-project/mars/issues/1963
|
In [1]: import mars.dataframe as md
In [2]: import numpy as np
In [3]: df = md.DataFrame({'a': np.random.randint(100, size=10), 'b':np.random.rand(10), 'label': np.random.randint(2, size=10)}, chunk_size=4)
In [4]: df = df[df.a > 0]
In [5]: df._shape = (10, 3)
In [6]: data = df[['a', 'b']]
In [7]: label = df['label']
In [8]: from mars.learn.contrib import xgboost as xgb
In [9]: xgb.MarsDMatrix(data=data, label=label).execute()
Out[9]:
Traceback (most recent call last):
File "/Users/qinxuye/miniconda3/lib/python3.7/site-packages/IPython/core/interactiveshell.py", line 3331, in run_code
exec(code_obj, self.user_global_ns, self.user_ns)
File "<ipython-input-13-43292b123748>", line 1, in <module>
xgb.MarsDMatrix(data=data, label=label).execute()
File "/Users/qinxuye/Workspace/mars/mars/learn/contrib/xgboost/dmatrix.py", line 327, in to_dmatrix
outs.execute(session=session, **(run_kwargs or dict()))
File "/Users/qinxuye/Workspace/mars/mars/core.py", line 736, in execute
return super().execute(session=session, **kw)
File "/Users/qinxuye/Workspace/mars/mars/core.py", line 379, in execute
return run()
File "/Users/qinxuye/Workspace/mars/mars/core.py", line 374, in run
session.run(self, **kw)
File "/Users/qinxuye/Workspace/mars/mars/session.py", line 505, in run
result = self._sess.run(*tileables, **kw)
File "/Users/qinxuye/Workspace/mars/mars/session.py", line 111, in run
res = self._executor.execute_tileables(tileables, **kw)
File "/Users/qinxuye/Workspace/mars/mars/utils.py", line 458, in _inner
return func(*args, **kwargs)
File "/Users/qinxuye/Workspace/mars/mars/executor.py", line 866, in execute_tileables
tileables, tileable_graph=tileable_graph)
File "/Users/qinxuye/Workspace/mars/mars/utils.py", line 458, in _inner
return func(*args, **kwargs)
File "/Users/qinxuye/Workspace/mars/mars/tiles.py", line 348, in build
tileables, tileable_graph=tileable_graph)
File "/Users/qinxuye/Workspace/mars/mars/utils.py", line 458, in _inner
return func(*args, **kwargs)
File "/Users/qinxuye/Workspace/mars/mars/tiles.py", line 262, in build
self._on_tile_failure(tileable_data.op, exc_info)
File "/Users/qinxuye/Workspace/mars/mars/tiles.py", line 301, in inner
raise exc_info[1].with_traceback(exc_info[2]) from None
File "/Users/qinxuye/Workspace/mars/mars/tiles.py", line 242, in build
tiled = self._tile(tileable_data, tileable_graph)
File "/Users/qinxuye/Workspace/mars/mars/tiles.py", line 337, in _tile
return super()._tile(tileable_data, tileable_graph)
File "/Users/qinxuye/Workspace/mars/mars/tiles.py", line 201, in _tile
tds[0]._inplace_tile()
File "/Users/qinxuye/Workspace/mars/mars/core.py", line 168, in _inplace_tile
return handler.inplace_tile(self)
File "/Users/qinxuye/Workspace/mars/mars/tiles.py", line 136, in inplace_tile
dispatched = self.dispatch(to_tile.op)
File "/Users/qinxuye/Workspace/mars/mars/utils.py", line 458, in _inner
return func(*args, **kwargs)
File "/Users/qinxuye/Workspace/mars/mars/tiles.py", line 119, in dispatch
tiled = op_cls.tile(op)
File "/Users/qinxuye/Workspace/mars/mars/learn/contrib/xgboost/dmatrix.py", line 255, in tile
return cls._tile_multi_output(op)
File "/Users/qinxuye/Workspace/mars/mars/learn/contrib/xgboost/dmatrix.py", line 139, in _tile_multi_output
label = label.rechunk({0: nsplit})._inplace_tile()
File "/Users/qinxuye/Workspace/mars/mars/tensor/rechunk/rechunk.py", line 97, in rechunk
chunk_size = get_nsplits(tensor, chunk_size, tensor.dtype.itemsize)
File "/Users/qinxuye/Workspace/mars/mars/tensor/rechunk/core.py", line 38, in get_nsplits
return decide_chunk_sizes(tileable.shape, chunk_size, itemsize)
File "/Users/qinxuye/Workspace/mars/mars/tensor/utils.py", line 551, in decide_chunk_sizes
return normalize_chunk_sizes(shape, tuple(chunk_size[j] for j in range(len(shape))))
File "/Users/qinxuye/Workspace/mars/mars/tensor/utils.py", line 66, in normalize_chunk_sizes
raise ValueError('chunks shape should be of the same length, '
ValueError: chunks shape should be of the same length, got shape: 10, chunks: (nan, nan, nan)
|
ValueError
|
def predict(self, data, **kw):
session = kw.pop("session", None)
run_kwargs = kw.pop("run_kwargs", None)
return predict(
self.get_booster(), data, session=session, run_kwargs=run_kwargs, **kw
)
|
def predict(self, data, **kw):
session = kw.pop("session", None)
run_kwargs = kw.pop("run_kwargs", None)
if kw:
raise TypeError(
f"predict got an unexpected keyword argument '{next(iter(kw))}'"
)
return predict(self.get_booster(), data, session=session, run_kwargs=run_kwargs)
|
https://github.com/mars-project/mars/issues/1963
|
In [1]: import mars.dataframe as md
In [2]: import numpy as np
In [3]: df = md.DataFrame({'a': np.random.randint(100, size=10), 'b':np.random.rand(10), 'label': np.random.randint(2, size=10)}, chunk_size=4)
In [4]: df = df[df.a > 0]
In [5]: df._shape = (10, 3)
In [6]: data = df[['a', 'b']]
In [7]: label = df['label']
In [8]: from mars.learn.contrib import xgboost as xgb
In [9]: xgb.MarsDMatrix(data=data, label=label).execute()
Out[9]:
Traceback (most recent call last):
File "/Users/qinxuye/miniconda3/lib/python3.7/site-packages/IPython/core/interactiveshell.py", line 3331, in run_code
exec(code_obj, self.user_global_ns, self.user_ns)
File "<ipython-input-13-43292b123748>", line 1, in <module>
xgb.MarsDMatrix(data=data, label=label).execute()
File "/Users/qinxuye/Workspace/mars/mars/learn/contrib/xgboost/dmatrix.py", line 327, in to_dmatrix
outs.execute(session=session, **(run_kwargs or dict()))
File "/Users/qinxuye/Workspace/mars/mars/core.py", line 736, in execute
return super().execute(session=session, **kw)
File "/Users/qinxuye/Workspace/mars/mars/core.py", line 379, in execute
return run()
File "/Users/qinxuye/Workspace/mars/mars/core.py", line 374, in run
session.run(self, **kw)
File "/Users/qinxuye/Workspace/mars/mars/session.py", line 505, in run
result = self._sess.run(*tileables, **kw)
File "/Users/qinxuye/Workspace/mars/mars/session.py", line 111, in run
res = self._executor.execute_tileables(tileables, **kw)
File "/Users/qinxuye/Workspace/mars/mars/utils.py", line 458, in _inner
return func(*args, **kwargs)
File "/Users/qinxuye/Workspace/mars/mars/executor.py", line 866, in execute_tileables
tileables, tileable_graph=tileable_graph)
File "/Users/qinxuye/Workspace/mars/mars/utils.py", line 458, in _inner
return func(*args, **kwargs)
File "/Users/qinxuye/Workspace/mars/mars/tiles.py", line 348, in build
tileables, tileable_graph=tileable_graph)
File "/Users/qinxuye/Workspace/mars/mars/utils.py", line 458, in _inner
return func(*args, **kwargs)
File "/Users/qinxuye/Workspace/mars/mars/tiles.py", line 262, in build
self._on_tile_failure(tileable_data.op, exc_info)
File "/Users/qinxuye/Workspace/mars/mars/tiles.py", line 301, in inner
raise exc_info[1].with_traceback(exc_info[2]) from None
File "/Users/qinxuye/Workspace/mars/mars/tiles.py", line 242, in build
tiled = self._tile(tileable_data, tileable_graph)
File "/Users/qinxuye/Workspace/mars/mars/tiles.py", line 337, in _tile
return super()._tile(tileable_data, tileable_graph)
File "/Users/qinxuye/Workspace/mars/mars/tiles.py", line 201, in _tile
tds[0]._inplace_tile()
File "/Users/qinxuye/Workspace/mars/mars/core.py", line 168, in _inplace_tile
return handler.inplace_tile(self)
File "/Users/qinxuye/Workspace/mars/mars/tiles.py", line 136, in inplace_tile
dispatched = self.dispatch(to_tile.op)
File "/Users/qinxuye/Workspace/mars/mars/utils.py", line 458, in _inner
return func(*args, **kwargs)
File "/Users/qinxuye/Workspace/mars/mars/tiles.py", line 119, in dispatch
tiled = op_cls.tile(op)
File "/Users/qinxuye/Workspace/mars/mars/learn/contrib/xgboost/dmatrix.py", line 255, in tile
return cls._tile_multi_output(op)
File "/Users/qinxuye/Workspace/mars/mars/learn/contrib/xgboost/dmatrix.py", line 139, in _tile_multi_output
label = label.rechunk({0: nsplit})._inplace_tile()
File "/Users/qinxuye/Workspace/mars/mars/tensor/rechunk/rechunk.py", line 97, in rechunk
chunk_size = get_nsplits(tensor, chunk_size, tensor.dtype.itemsize)
File "/Users/qinxuye/Workspace/mars/mars/tensor/rechunk/core.py", line 38, in get_nsplits
return decide_chunk_sizes(tileable.shape, chunk_size, itemsize)
File "/Users/qinxuye/Workspace/mars/mars/tensor/utils.py", line 551, in decide_chunk_sizes
return normalize_chunk_sizes(shape, tuple(chunk_size[j] for j in range(len(shape))))
File "/Users/qinxuye/Workspace/mars/mars/tensor/utils.py", line 66, in normalize_chunk_sizes
raise ValueError('chunks shape should be of the same length, '
ValueError: chunks shape should be of the same length, got shape: 10, chunks: (nan, nan, nan)
|
ValueError
|
def rechunk(
tensor, chunk_size, threshold=None, chunk_size_limit=None, reassign_worker=False
):
if not any(np.isnan(s) for s in tensor.shape) and not tensor.is_coarse():
try:
check_chunks_unknown_shape([tensor], ValueError)
except ValueError:
# due to reason that tensor has unknown chunk shape,
# just ignore to hand over to operand
pass
else:
# do client check only when tensor has no unknown shape,
# otherwise, recalculate chunk_size in `tile`
chunk_size = get_nsplits(tensor, chunk_size, tensor.dtype.itemsize)
if chunk_size == tensor.nsplits:
return tensor
op = TensorRechunk(
chunk_size,
threshold,
chunk_size_limit,
reassign_worker=reassign_worker,
dtype=tensor.dtype,
sparse=tensor.issparse(),
)
return op(tensor)
|
def rechunk(
tensor, chunk_size, threshold=None, chunk_size_limit=None, reassign_worker=False
):
if not any(np.isnan(s) for s in tensor.shape) and not tensor.is_coarse():
# do client check only when tensor has no unknown shape,
# otherwise, recalculate chunk_size in `tile`
chunk_size = get_nsplits(tensor, chunk_size, tensor.dtype.itemsize)
if chunk_size == tensor.nsplits:
return tensor
op = TensorRechunk(
chunk_size,
threshold,
chunk_size_limit,
reassign_worker=reassign_worker,
dtype=tensor.dtype,
sparse=tensor.issparse(),
)
return op(tensor)
|
https://github.com/mars-project/mars/issues/1963
|
In [1]: import mars.dataframe as md
In [2]: import numpy as np
In [3]: df = md.DataFrame({'a': np.random.randint(100, size=10), 'b':np.random.rand(10), 'label': np.random.randint(2, size=10)}, chunk_size=4)
In [4]: df = df[df.a > 0]
In [5]: df._shape = (10, 3)
In [6]: data = df[['a', 'b']]
In [7]: label = df['label']
In [8]: from mars.learn.contrib import xgboost as xgb
In [9]: xgb.MarsDMatrix(data=data, label=label).execute()
Out[9]:
Traceback (most recent call last):
File "/Users/qinxuye/miniconda3/lib/python3.7/site-packages/IPython/core/interactiveshell.py", line 3331, in run_code
exec(code_obj, self.user_global_ns, self.user_ns)
File "<ipython-input-13-43292b123748>", line 1, in <module>
xgb.MarsDMatrix(data=data, label=label).execute()
File "/Users/qinxuye/Workspace/mars/mars/learn/contrib/xgboost/dmatrix.py", line 327, in to_dmatrix
outs.execute(session=session, **(run_kwargs or dict()))
File "/Users/qinxuye/Workspace/mars/mars/core.py", line 736, in execute
return super().execute(session=session, **kw)
File "/Users/qinxuye/Workspace/mars/mars/core.py", line 379, in execute
return run()
File "/Users/qinxuye/Workspace/mars/mars/core.py", line 374, in run
session.run(self, **kw)
File "/Users/qinxuye/Workspace/mars/mars/session.py", line 505, in run
result = self._sess.run(*tileables, **kw)
File "/Users/qinxuye/Workspace/mars/mars/session.py", line 111, in run
res = self._executor.execute_tileables(tileables, **kw)
File "/Users/qinxuye/Workspace/mars/mars/utils.py", line 458, in _inner
return func(*args, **kwargs)
File "/Users/qinxuye/Workspace/mars/mars/executor.py", line 866, in execute_tileables
tileables, tileable_graph=tileable_graph)
File "/Users/qinxuye/Workspace/mars/mars/utils.py", line 458, in _inner
return func(*args, **kwargs)
File "/Users/qinxuye/Workspace/mars/mars/tiles.py", line 348, in build
tileables, tileable_graph=tileable_graph)
File "/Users/qinxuye/Workspace/mars/mars/utils.py", line 458, in _inner
return func(*args, **kwargs)
File "/Users/qinxuye/Workspace/mars/mars/tiles.py", line 262, in build
self._on_tile_failure(tileable_data.op, exc_info)
File "/Users/qinxuye/Workspace/mars/mars/tiles.py", line 301, in inner
raise exc_info[1].with_traceback(exc_info[2]) from None
File "/Users/qinxuye/Workspace/mars/mars/tiles.py", line 242, in build
tiled = self._tile(tileable_data, tileable_graph)
File "/Users/qinxuye/Workspace/mars/mars/tiles.py", line 337, in _tile
return super()._tile(tileable_data, tileable_graph)
File "/Users/qinxuye/Workspace/mars/mars/tiles.py", line 201, in _tile
tds[0]._inplace_tile()
File "/Users/qinxuye/Workspace/mars/mars/core.py", line 168, in _inplace_tile
return handler.inplace_tile(self)
File "/Users/qinxuye/Workspace/mars/mars/tiles.py", line 136, in inplace_tile
dispatched = self.dispatch(to_tile.op)
File "/Users/qinxuye/Workspace/mars/mars/utils.py", line 458, in _inner
return func(*args, **kwargs)
File "/Users/qinxuye/Workspace/mars/mars/tiles.py", line 119, in dispatch
tiled = op_cls.tile(op)
File "/Users/qinxuye/Workspace/mars/mars/learn/contrib/xgboost/dmatrix.py", line 255, in tile
return cls._tile_multi_output(op)
File "/Users/qinxuye/Workspace/mars/mars/learn/contrib/xgboost/dmatrix.py", line 139, in _tile_multi_output
label = label.rechunk({0: nsplit})._inplace_tile()
File "/Users/qinxuye/Workspace/mars/mars/tensor/rechunk/rechunk.py", line 97, in rechunk
chunk_size = get_nsplits(tensor, chunk_size, tensor.dtype.itemsize)
File "/Users/qinxuye/Workspace/mars/mars/tensor/rechunk/core.py", line 38, in get_nsplits
return decide_chunk_sizes(tileable.shape, chunk_size, itemsize)
File "/Users/qinxuye/Workspace/mars/mars/tensor/utils.py", line 551, in decide_chunk_sizes
return normalize_chunk_sizes(shape, tuple(chunk_size[j] for j in range(len(shape))))
File "/Users/qinxuye/Workspace/mars/mars/tensor/utils.py", line 66, in normalize_chunk_sizes
raise ValueError('chunks shape should be of the same length, '
ValueError: chunks shape should be of the same length, got shape: 10, chunks: (nan, nan, nan)
|
ValueError
|
def __mars_tensor__(self, dtype=None, order="K"):
return self._data.__mars_tensor__(dtype=dtype, order=order)
|
def __mars_tensor__(self, dtype=None, order="K"):
return self._to_mars_tensor(dtype=dtype, order=order)
|
https://github.com/mars-project/mars/issues/1943
|
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-52-f2912f2e33a0> in <module>
1 from mars.learn.metrics import accuracy_score
----> 2 accuracy_score(y_test,y_pred).execute()
/home/tops/lib/python3.6/site-packages/mars/learn/metrics/_classification.py in accuracy_score(y_true, y_pred, normalize, sample_weight, session, run_kwargs)
178 op = AccuracyScore(y_true=y_true, y_pred=y_pred, normalize=normalize,
179 sample_weight=sample_weight)
--> 180 score = op(y_true, y_pred)
181 return score.execute(session=session, **(run_kwargs or dict()))
/home/tops/lib/python3.6/site-packages/mars/utils.py in _inner(*args, **kwargs)
455 def _inner(*args, **kwargs):
456 with self:
--> 457 return func(*args, **kwargs)
458
459 return _inner
/home/tops/lib/python3.6/site-packages/mars/learn/metrics/_classification.py in __call__(self, y_true, y_pred)
76
77 def __call__(self, y_true, y_pred):
---> 78 type_true, y_true, y_pred = _check_targets(y_true, y_pred)
79 self._type_true = type_true
80 inputs = [y_true, y_pred, type_true]
/home/tops/lib/python3.6/site-packages/mars/learn/metrics/_check_targets.py in _check_targets(y_true, y_pred)
193 """
194 op = CheckTargets(y_true=y_true, y_pred=y_pred)
--> 195 return op(y_true, y_pred)
/home/tops/lib/python3.6/site-packages/mars/utils.py in _inner(*args, **kwargs)
455 def _inner(*args, **kwargs):
456 with self:
--> 457 return func(*args, **kwargs)
458
459 return _inner
/home/tops/lib/python3.6/site-packages/mars/learn/metrics/_check_targets.py in __call__(self, y_true, y_pred)
82 if isinstance(y_pred, (Base, Entity)):
83 inputs.append(y_pred)
---> 84 self._type_true = type_of_target(y_true)
85 self._type_pred = type_of_target(y_pred)
86 inputs.extend([self._type_true, self._type_pred])
/home/tops/lib/python3.6/site-packages/mars/learn/utils/multiclass.py in type_of_target(y)
403
404 if not valid:
--> 405 raise ValueError(f'Expected array-like (array or non-string sequence), got {y}')
406
407 sparse_pandas = (y.__class__.__name__ in ['SparseSeries', 'SparseArray'])
ValueError: Expected array-like (array or non-string sequence), got 159615 0
134010 0
199100 0
137756 0
200050 0
..
219880 0
82326 0
184439 0
198518 0
109959 0
Name: label_bigint, Length: 1146788, dtype: int64
|
ValueError
|
def to_frame(self, index: bool = True, name=None):
"""
Create a DataFrame with a column containing the Index.
Parameters
----------
index : bool, default True
Set the index of the returned DataFrame as the original Index.
name : object, default None
The passed name should substitute for the index name (if it has
one).
Returns
-------
DataFrame
DataFrame containing the original Index data.
See Also
--------
Index.to_series : Convert an Index to a Series.
Series.to_frame : Convert Series to DataFrame.
Examples
--------
>>> import mars.dataframe as md
>>> idx = md.Index(['Ant', 'Bear', 'Cow'], name='animal')
>>> idx.to_frame().execute()
animal
animal
Ant Ant
Bear Bear
Cow Cow
By default, the original Index is reused. To enforce a new Index:
>>> idx.to_frame(index=False).execute()
animal
0 Ant
1 Bear
2 Cow
To override the name of the resulting column, specify `name`:
>>> idx.to_frame(index=False, name='zoo').execute()
zoo
0 Ant
1 Bear
2 Cow
"""
from . import dataframe_from_tensor
if isinstance(self.index_value.value, IndexValue.MultiIndex):
old_names = self.index_value.value.names
if name is not None and not isinstance(name, Iterable) or isinstance(name, str):
raise TypeError("'name' must be a list / sequence of column names.")
name = list(name if name is not None else old_names)
if len(name) != len(old_names):
raise ValueError(
"'name' should have same length as number of levels on index."
)
columns = [
old or new or idx for idx, (old, new) in enumerate(zip(old_names, name))
]
else:
columns = [name or self.name or 0]
index_ = self if index else None
return dataframe_from_tensor(
self._data._to_mars_tensor(self, extract_multi_index=True),
index=index_,
columns=columns,
)
|
def to_frame(self, index: bool = True, name=None):
"""
Create a DataFrame with a column containing the Index.
Parameters
----------
index : bool, default True
Set the index of the returned DataFrame as the original Index.
name : object, default None
The passed name should substitute for the index name (if it has
one).
Returns
-------
DataFrame
DataFrame containing the original Index data.
See Also
--------
Index.to_series : Convert an Index to a Series.
Series.to_frame : Convert Series to DataFrame.
Examples
--------
>>> import mars.dataframe as md
>>> idx = md.Index(['Ant', 'Bear', 'Cow'], name='animal')
>>> idx.to_frame().execute()
animal
animal
Ant Ant
Bear Bear
Cow Cow
By default, the original Index is reused. To enforce a new Index:
>>> idx.to_frame(index=False).execute()
animal
0 Ant
1 Bear
2 Cow
To override the name of the resulting column, specify `name`:
>>> idx.to_frame(index=False, name='zoo').execute()
zoo
0 Ant
1 Bear
2 Cow
"""
from . import dataframe_from_tensor
if isinstance(self.index_value.value, IndexValue.MultiIndex):
old_names = self.index_value.value.names
if name is not None and not isinstance(name, Iterable) or isinstance(name, str):
raise TypeError("'name' must be a list / sequence of column names.")
name = list(name if name is not None else old_names)
if len(name) != len(old_names):
raise ValueError(
"'name' should have same length as number of levels on index."
)
columns = [
old or new or idx for idx, (old, new) in enumerate(zip(old_names, name))
]
else:
columns = [name or self.name or 0]
index_ = self if index else None
return dataframe_from_tensor(
self._to_mars_tensor(self, extract_multi_index=True),
index=index_,
columns=columns,
)
|
https://github.com/mars-project/mars/issues/1943
|
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-52-f2912f2e33a0> in <module>
1 from mars.learn.metrics import accuracy_score
----> 2 accuracy_score(y_test,y_pred).execute()
/home/tops/lib/python3.6/site-packages/mars/learn/metrics/_classification.py in accuracy_score(y_true, y_pred, normalize, sample_weight, session, run_kwargs)
178 op = AccuracyScore(y_true=y_true, y_pred=y_pred, normalize=normalize,
179 sample_weight=sample_weight)
--> 180 score = op(y_true, y_pred)
181 return score.execute(session=session, **(run_kwargs or dict()))
/home/tops/lib/python3.6/site-packages/mars/utils.py in _inner(*args, **kwargs)
455 def _inner(*args, **kwargs):
456 with self:
--> 457 return func(*args, **kwargs)
458
459 return _inner
/home/tops/lib/python3.6/site-packages/mars/learn/metrics/_classification.py in __call__(self, y_true, y_pred)
76
77 def __call__(self, y_true, y_pred):
---> 78 type_true, y_true, y_pred = _check_targets(y_true, y_pred)
79 self._type_true = type_true
80 inputs = [y_true, y_pred, type_true]
/home/tops/lib/python3.6/site-packages/mars/learn/metrics/_check_targets.py in _check_targets(y_true, y_pred)
193 """
194 op = CheckTargets(y_true=y_true, y_pred=y_pred)
--> 195 return op(y_true, y_pred)
/home/tops/lib/python3.6/site-packages/mars/utils.py in _inner(*args, **kwargs)
455 def _inner(*args, **kwargs):
456 with self:
--> 457 return func(*args, **kwargs)
458
459 return _inner
/home/tops/lib/python3.6/site-packages/mars/learn/metrics/_check_targets.py in __call__(self, y_true, y_pred)
82 if isinstance(y_pred, (Base, Entity)):
83 inputs.append(y_pred)
---> 84 self._type_true = type_of_target(y_true)
85 self._type_pred = type_of_target(y_pred)
86 inputs.extend([self._type_true, self._type_pred])
/home/tops/lib/python3.6/site-packages/mars/learn/utils/multiclass.py in type_of_target(y)
403
404 if not valid:
--> 405 raise ValueError(f'Expected array-like (array or non-string sequence), got {y}')
406
407 sparse_pandas = (y.__class__.__name__ in ['SparseSeries', 'SparseArray'])
ValueError: Expected array-like (array or non-string sequence), got 159615 0
134010 0
199100 0
137756 0
200050 0
..
219880 0
82326 0
184439 0
198518 0
109959 0
Name: label_bigint, Length: 1146788, dtype: int64
|
ValueError
|
def __mars_tensor__(self, dtype=None, order="K"):
return self._data.__mars_tensor__(dtype=dtype, order=order)
|
def __mars_tensor__(self, dtype=None, order="K"):
tensor = self._data.to_tensor()
dtype = dtype if dtype is not None else tensor.dtype
return tensor.astype(dtype=dtype, order=order, copy=False)
|
https://github.com/mars-project/mars/issues/1943
|
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-52-f2912f2e33a0> in <module>
1 from mars.learn.metrics import accuracy_score
----> 2 accuracy_score(y_test,y_pred).execute()
/home/tops/lib/python3.6/site-packages/mars/learn/metrics/_classification.py in accuracy_score(y_true, y_pred, normalize, sample_weight, session, run_kwargs)
178 op = AccuracyScore(y_true=y_true, y_pred=y_pred, normalize=normalize,
179 sample_weight=sample_weight)
--> 180 score = op(y_true, y_pred)
181 return score.execute(session=session, **(run_kwargs or dict()))
/home/tops/lib/python3.6/site-packages/mars/utils.py in _inner(*args, **kwargs)
455 def _inner(*args, **kwargs):
456 with self:
--> 457 return func(*args, **kwargs)
458
459 return _inner
/home/tops/lib/python3.6/site-packages/mars/learn/metrics/_classification.py in __call__(self, y_true, y_pred)
76
77 def __call__(self, y_true, y_pred):
---> 78 type_true, y_true, y_pred = _check_targets(y_true, y_pred)
79 self._type_true = type_true
80 inputs = [y_true, y_pred, type_true]
/home/tops/lib/python3.6/site-packages/mars/learn/metrics/_check_targets.py in _check_targets(y_true, y_pred)
193 """
194 op = CheckTargets(y_true=y_true, y_pred=y_pred)
--> 195 return op(y_true, y_pred)
/home/tops/lib/python3.6/site-packages/mars/utils.py in _inner(*args, **kwargs)
455 def _inner(*args, **kwargs):
456 with self:
--> 457 return func(*args, **kwargs)
458
459 return _inner
/home/tops/lib/python3.6/site-packages/mars/learn/metrics/_check_targets.py in __call__(self, y_true, y_pred)
82 if isinstance(y_pred, (Base, Entity)):
83 inputs.append(y_pred)
---> 84 self._type_true = type_of_target(y_true)
85 self._type_pred = type_of_target(y_pred)
86 inputs.extend([self._type_true, self._type_pred])
/home/tops/lib/python3.6/site-packages/mars/learn/utils/multiclass.py in type_of_target(y)
403
404 if not valid:
--> 405 raise ValueError(f'Expected array-like (array or non-string sequence), got {y}')
406
407 sparse_pandas = (y.__class__.__name__ in ['SparseSeries', 'SparseArray'])
ValueError: Expected array-like (array or non-string sequence), got 159615 0
134010 0
199100 0
137756 0
200050 0
..
219880 0
82326 0
184439 0
198518 0
109959 0
Name: label_bigint, Length: 1146788, dtype: int64
|
ValueError
|
def __mars_tensor__(self, dtype=None, order="K"):
return self._data.__mars_tensor__(dtype=dtype, order=order)
|
def __mars_tensor__(self, dtype=None, order="K"):
return self._data.to_tensor().astype(dtype=dtype, order=order, copy=False)
|
https://github.com/mars-project/mars/issues/1943
|
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-52-f2912f2e33a0> in <module>
1 from mars.learn.metrics import accuracy_score
----> 2 accuracy_score(y_test,y_pred).execute()
/home/tops/lib/python3.6/site-packages/mars/learn/metrics/_classification.py in accuracy_score(y_true, y_pred, normalize, sample_weight, session, run_kwargs)
178 op = AccuracyScore(y_true=y_true, y_pred=y_pred, normalize=normalize,
179 sample_weight=sample_weight)
--> 180 score = op(y_true, y_pred)
181 return score.execute(session=session, **(run_kwargs or dict()))
/home/tops/lib/python3.6/site-packages/mars/utils.py in _inner(*args, **kwargs)
455 def _inner(*args, **kwargs):
456 with self:
--> 457 return func(*args, **kwargs)
458
459 return _inner
/home/tops/lib/python3.6/site-packages/mars/learn/metrics/_classification.py in __call__(self, y_true, y_pred)
76
77 def __call__(self, y_true, y_pred):
---> 78 type_true, y_true, y_pred = _check_targets(y_true, y_pred)
79 self._type_true = type_true
80 inputs = [y_true, y_pred, type_true]
/home/tops/lib/python3.6/site-packages/mars/learn/metrics/_check_targets.py in _check_targets(y_true, y_pred)
193 """
194 op = CheckTargets(y_true=y_true, y_pred=y_pred)
--> 195 return op(y_true, y_pred)
/home/tops/lib/python3.6/site-packages/mars/utils.py in _inner(*args, **kwargs)
455 def _inner(*args, **kwargs):
456 with self:
--> 457 return func(*args, **kwargs)
458
459 return _inner
/home/tops/lib/python3.6/site-packages/mars/learn/metrics/_check_targets.py in __call__(self, y_true, y_pred)
82 if isinstance(y_pred, (Base, Entity)):
83 inputs.append(y_pred)
---> 84 self._type_true = type_of_target(y_true)
85 self._type_pred = type_of_target(y_pred)
86 inputs.extend([self._type_true, self._type_pred])
/home/tops/lib/python3.6/site-packages/mars/learn/utils/multiclass.py in type_of_target(y)
403
404 if not valid:
--> 405 raise ValueError(f'Expected array-like (array or non-string sequence), got {y}')
406
407 sparse_pandas = (y.__class__.__name__ in ['SparseSeries', 'SparseArray'])
ValueError: Expected array-like (array or non-string sequence), got 159615 0
134010 0
199100 0
137756 0
200050 0
..
219880 0
82326 0
184439 0
198518 0
109959 0
Name: label_bigint, Length: 1146788, dtype: int64
|
ValueError
|
def _to_mars_tensor(self, dtype=None, order="K", extract_multi_index=False):
tensor = self.to_tensor(extract_multi_index=extract_multi_index)
dtype = dtype if dtype is not None else tensor.dtype
return tensor.astype(dtype=dtype, order=order, copy=False)
|
def _to_mars_tensor(self, dtype=None, order="K", extract_multi_index=False):
tensor = self._data.to_tensor(extract_multi_index=extract_multi_index)
dtype = dtype if dtype is not None else tensor.dtype
return tensor.astype(dtype=dtype, order=order, copy=False)
|
https://github.com/mars-project/mars/issues/1943
|
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-52-f2912f2e33a0> in <module>
1 from mars.learn.metrics import accuracy_score
----> 2 accuracy_score(y_test,y_pred).execute()
/home/tops/lib/python3.6/site-packages/mars/learn/metrics/_classification.py in accuracy_score(y_true, y_pred, normalize, sample_weight, session, run_kwargs)
178 op = AccuracyScore(y_true=y_true, y_pred=y_pred, normalize=normalize,
179 sample_weight=sample_weight)
--> 180 score = op(y_true, y_pred)
181 return score.execute(session=session, **(run_kwargs or dict()))
/home/tops/lib/python3.6/site-packages/mars/utils.py in _inner(*args, **kwargs)
455 def _inner(*args, **kwargs):
456 with self:
--> 457 return func(*args, **kwargs)
458
459 return _inner
/home/tops/lib/python3.6/site-packages/mars/learn/metrics/_classification.py in __call__(self, y_true, y_pred)
76
77 def __call__(self, y_true, y_pred):
---> 78 type_true, y_true, y_pred = _check_targets(y_true, y_pred)
79 self._type_true = type_true
80 inputs = [y_true, y_pred, type_true]
/home/tops/lib/python3.6/site-packages/mars/learn/metrics/_check_targets.py in _check_targets(y_true, y_pred)
193 """
194 op = CheckTargets(y_true=y_true, y_pred=y_pred)
--> 195 return op(y_true, y_pred)
/home/tops/lib/python3.6/site-packages/mars/utils.py in _inner(*args, **kwargs)
455 def _inner(*args, **kwargs):
456 with self:
--> 457 return func(*args, **kwargs)
458
459 return _inner
/home/tops/lib/python3.6/site-packages/mars/learn/metrics/_check_targets.py in __call__(self, y_true, y_pred)
82 if isinstance(y_pred, (Base, Entity)):
83 inputs.append(y_pred)
---> 84 self._type_true = type_of_target(y_true)
85 self._type_pred = type_of_target(y_pred)
86 inputs.extend([self._type_true, self._type_pred])
/home/tops/lib/python3.6/site-packages/mars/learn/utils/multiclass.py in type_of_target(y)
403
404 if not valid:
--> 405 raise ValueError(f'Expected array-like (array or non-string sequence), got {y}')
406
407 sparse_pandas = (y.__class__.__name__ in ['SparseSeries', 'SparseArray'])
ValueError: Expected array-like (array or non-string sequence), got 159615 0
134010 0
199100 0
137756 0
200050 0
..
219880 0
82326 0
184439 0
198518 0
109959 0
Name: label_bigint, Length: 1146788, dtype: int64
|
ValueError
|
def __init__(self, y_true=None, y_pred=None, type_true=None, type_pred=None, **kw):
super().__init__(
_y_true=y_true, _y_pred=y_pred, _type_true=type_true, _type_pred=type_pred, **kw
)
# scalar(y_type), y_true, y_pred
self.output_types = [OutputType.tensor] * 3
|
def __init__(self, y_true=None, y_pred=None, type_true=None, type_pred=None, **kw):
super().__init__(
_y_true=y_true, _y_pred=y_pred, _type_true=type_true, _type_pred=type_pred, **kw
)
# scalar(y_type), y_true, y_pred
self.output_types = [OutputType.tensor] + get_output_types(
*[y_true, y_pred], unknown_as=OutputType.tensor
)
|
https://github.com/mars-project/mars/issues/1943
|
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-52-f2912f2e33a0> in <module>
1 from mars.learn.metrics import accuracy_score
----> 2 accuracy_score(y_test,y_pred).execute()
/home/tops/lib/python3.6/site-packages/mars/learn/metrics/_classification.py in accuracy_score(y_true, y_pred, normalize, sample_weight, session, run_kwargs)
178 op = AccuracyScore(y_true=y_true, y_pred=y_pred, normalize=normalize,
179 sample_weight=sample_weight)
--> 180 score = op(y_true, y_pred)
181 return score.execute(session=session, **(run_kwargs or dict()))
/home/tops/lib/python3.6/site-packages/mars/utils.py in _inner(*args, **kwargs)
455 def _inner(*args, **kwargs):
456 with self:
--> 457 return func(*args, **kwargs)
458
459 return _inner
/home/tops/lib/python3.6/site-packages/mars/learn/metrics/_classification.py in __call__(self, y_true, y_pred)
76
77 def __call__(self, y_true, y_pred):
---> 78 type_true, y_true, y_pred = _check_targets(y_true, y_pred)
79 self._type_true = type_true
80 inputs = [y_true, y_pred, type_true]
/home/tops/lib/python3.6/site-packages/mars/learn/metrics/_check_targets.py in _check_targets(y_true, y_pred)
193 """
194 op = CheckTargets(y_true=y_true, y_pred=y_pred)
--> 195 return op(y_true, y_pred)
/home/tops/lib/python3.6/site-packages/mars/utils.py in _inner(*args, **kwargs)
455 def _inner(*args, **kwargs):
456 with self:
--> 457 return func(*args, **kwargs)
458
459 return _inner
/home/tops/lib/python3.6/site-packages/mars/learn/metrics/_check_targets.py in __call__(self, y_true, y_pred)
82 if isinstance(y_pred, (Base, Entity)):
83 inputs.append(y_pred)
---> 84 self._type_true = type_of_target(y_true)
85 self._type_pred = type_of_target(y_pred)
86 inputs.extend([self._type_true, self._type_pred])
/home/tops/lib/python3.6/site-packages/mars/learn/utils/multiclass.py in type_of_target(y)
403
404 if not valid:
--> 405 raise ValueError(f'Expected array-like (array or non-string sequence), got {y}')
406
407 sparse_pandas = (y.__class__.__name__ in ['SparseSeries', 'SparseArray'])
ValueError: Expected array-like (array or non-string sequence), got 159615 0
134010 0
199100 0
137756 0
200050 0
..
219880 0
82326 0
184439 0
198518 0
109959 0
Name: label_bigint, Length: 1146788, dtype: int64
|
ValueError
|
def tile(cls, op):
y_true, y_pred = op.y_true, op.y_pred
for y in (op.y_true, op.y_pred):
if isinstance(y, (Base, Entity)):
if np.isnan(y.size): # pragma: no cover
raise TilesError("input has unknown shape")
check_consistent_length(y_true, y_pred)
ctx = get_context()
try:
type_true, type_pred = ctx.get_chunk_results(
[op.type_true.chunks[0].key, op.type_pred.chunks[0].key]
)
except (KeyError, AttributeError):
raise TilesError("type_true and type_pred needs to be executed first")
y_type = {type_true, type_pred}
if y_type == {"binary", "multiclass"}:
y_type = {"multiclass"}
if len(y_type) > 1:
raise ValueError(
f"Classification metrics can't handle a mix of {type_true} "
f"and {type_pred} targets"
)
# We can't have more than one value on y_type => The set is no more needed
y_type = y_type.pop()
# No metrics support "multiclass-multioutput" format
if y_type not in ["binary", "multiclass", "multilabel-indicator"]:
raise ValueError(f"{y_type} is not supported")
if y_type in ["binary", "multiclass"]:
y_true = column_or_1d(y_true)
y_pred = column_or_1d(y_pred)
if y_type == "binary":
unique_values = mt.union1d(y_true, y_pred)
y_type = mt.where(mt.count_nonzero(unique_values) > 2, "multiclass", y_type)
elif y_type.startswith("multilabel"):
y_true = mt.tensor(y_true).tosparse()
y_pred = mt.tensor(y_pred).tosparse()
y_type = "multilabel-indicator"
if not isinstance(y_true, (Base, Entity)):
y_true = mt.tensor(y_true)
if not isinstance(y_pred, (Base, Entity)):
y_pred = mt.tensor(y_pred)
if not isinstance(y_type, TENSOR_TYPE):
y_type = mt.tensor(y_type, dtype=object)
y_type = recursive_tile(y_type)
y_true = recursive_tile(y_true)
y_pred = recursive_tile(y_pred)
kws = [out.params for out in op.outputs]
kws[0].update(dict(nsplits=(), chunks=[y_type.chunks[0]]))
kws[1].update(
dict(
nsplits=y_true.nsplits,
chunks=y_true.chunks,
shape=tuple(sum(sp) for sp in y_true.nsplits),
)
)
kws[2].update(
dict(
nsplits=y_pred.nsplits,
chunks=y_pred.chunks,
shape=tuple(sum(sp) for sp in y_pred.nsplits),
)
)
new_op = op.copy()
return new_op.new_tileables(op.inputs, kws=kws)
|
def tile(cls, op):
y_true, y_pred = op.y_true, op.y_pred
for y in (op.y_true, op.y_pred):
if isinstance(y, (Base, Entity)):
if np.isnan(y.size): # pragma: no cover
raise TilesError("input has unknown shape")
check_consistent_length(y_true, y_pred)
ctx = get_context()
try:
type_true, type_pred = ctx.get_chunk_results(
[op.type_true.chunks[0].key, op.type_pred.chunks[0].key]
)
except KeyError:
raise TilesError("type_true and type_pred needs to be executed first")
y_type = {type_true, type_pred}
if y_type == {"binary", "multiclass"}:
y_type = {"multiclass"}
if len(y_type) > 1:
raise ValueError(
f"Classification metrics can't handle a mix of {type_true} "
f"and {type_pred} targets"
)
# We can't have more than one value on y_type => The set is no more needed
y_type = y_type.pop()
# No metrics support "multiclass-multioutput" format
if y_type not in ["binary", "multiclass", "multilabel-indicator"]:
raise ValueError(f"{y_type} is not supported")
if y_type in ["binary", "multiclass"]:
y_true = column_or_1d(y_true)
y_pred = column_or_1d(y_pred)
if y_type == "binary":
unique_values = mt.union1d(y_true, y_pred)
y_type = mt.where(mt.count_nonzero(unique_values) > 2, "multiclass", y_type)
elif y_type.startswith("multilabel"):
y_true = mt.tensor(y_true).tosparse()
y_pred = mt.tensor(y_pred).tosparse()
y_type = "multilabel-indicator"
if not isinstance(y_true, (Base, Entity)):
y_true = mt.tensor(y_true)
if not isinstance(y_pred, (Base, Entity)):
y_pred = mt.tensor(y_pred)
if not isinstance(y_type, TENSOR_TYPE):
y_type = mt.tensor(y_type, dtype=object)
y_type = recursive_tile(y_type)
y_true = recursive_tile(y_true)
y_pred = recursive_tile(y_pred)
kws = [out.params for out in op.outputs]
kws[0].update(dict(nsplits=(), chunks=[y_type.chunks[0]]))
kws[1].update(
dict(
nsplits=y_true.nsplits,
chunks=y_true.chunks,
shape=tuple(sum(sp) for sp in y_true.nsplits),
)
)
kws[2].update(
dict(
nsplits=y_pred.nsplits,
chunks=y_pred.chunks,
shape=tuple(sum(sp) for sp in y_pred.nsplits),
)
)
new_op = op.copy()
return new_op.new_tileables(op.inputs, kws=kws)
|
https://github.com/mars-project/mars/issues/1943
|
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-52-f2912f2e33a0> in <module>
1 from mars.learn.metrics import accuracy_score
----> 2 accuracy_score(y_test,y_pred).execute()
/home/tops/lib/python3.6/site-packages/mars/learn/metrics/_classification.py in accuracy_score(y_true, y_pred, normalize, sample_weight, session, run_kwargs)
178 op = AccuracyScore(y_true=y_true, y_pred=y_pred, normalize=normalize,
179 sample_weight=sample_weight)
--> 180 score = op(y_true, y_pred)
181 return score.execute(session=session, **(run_kwargs or dict()))
/home/tops/lib/python3.6/site-packages/mars/utils.py in _inner(*args, **kwargs)
455 def _inner(*args, **kwargs):
456 with self:
--> 457 return func(*args, **kwargs)
458
459 return _inner
/home/tops/lib/python3.6/site-packages/mars/learn/metrics/_classification.py in __call__(self, y_true, y_pred)
76
77 def __call__(self, y_true, y_pred):
---> 78 type_true, y_true, y_pred = _check_targets(y_true, y_pred)
79 self._type_true = type_true
80 inputs = [y_true, y_pred, type_true]
/home/tops/lib/python3.6/site-packages/mars/learn/metrics/_check_targets.py in _check_targets(y_true, y_pred)
193 """
194 op = CheckTargets(y_true=y_true, y_pred=y_pred)
--> 195 return op(y_true, y_pred)
/home/tops/lib/python3.6/site-packages/mars/utils.py in _inner(*args, **kwargs)
455 def _inner(*args, **kwargs):
456 with self:
--> 457 return func(*args, **kwargs)
458
459 return _inner
/home/tops/lib/python3.6/site-packages/mars/learn/metrics/_check_targets.py in __call__(self, y_true, y_pred)
82 if isinstance(y_pred, (Base, Entity)):
83 inputs.append(y_pred)
---> 84 self._type_true = type_of_target(y_true)
85 self._type_pred = type_of_target(y_pred)
86 inputs.extend([self._type_true, self._type_pred])
/home/tops/lib/python3.6/site-packages/mars/learn/utils/multiclass.py in type_of_target(y)
403
404 if not valid:
--> 405 raise ValueError(f'Expected array-like (array or non-string sequence), got {y}')
406
407 sparse_pandas = (y.__class__.__name__ in ['SparseSeries', 'SparseArray'])
ValueError: Expected array-like (array or non-string sequence), got 159615 0
134010 0
199100 0
137756 0
200050 0
..
219880 0
82326 0
184439 0
198518 0
109959 0
Name: label_bigint, Length: 1146788, dtype: int64
|
ValueError
|
def tile(cls, op):
ctx = get_context()
try:
type_true = ctx.get_chunk_results([op.type_true.chunks[0].key])[0]
except (KeyError, AttributeError):
raise TilesError("type_true needed to be executed first")
y_true, y_pred = op.y_true, op.y_pred
if type_true.item().startswith("multilabel"):
differing_labels = mt.count_nonzero(y_true - y_pred, axis=1)
score = mt.equal(differing_labels, 0)
else:
score = mt.equal(y_true, y_pred)
result = _weighted_sum(score, op.sample_weight, op.normalize)
return [recursive_tile(result)]
|
def tile(cls, op):
ctx = get_context()
try:
type_true = ctx.get_chunk_results([op.type_true.chunks[0].key])[0]
except KeyError:
raise TilesError("type_true needed to be executed first")
y_true, y_pred = op.y_true, op.y_pred
if type_true.item().startswith("multilabel"):
differing_labels = mt.count_nonzero(y_true - y_pred, axis=1)
score = mt.equal(differing_labels, 0)
else:
score = mt.equal(y_true, y_pred)
result = _weighted_sum(score, op.sample_weight, op.normalize)
return [recursive_tile(result)]
|
https://github.com/mars-project/mars/issues/1943
|
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-52-f2912f2e33a0> in <module>
1 from mars.learn.metrics import accuracy_score
----> 2 accuracy_score(y_test,y_pred).execute()
/home/tops/lib/python3.6/site-packages/mars/learn/metrics/_classification.py in accuracy_score(y_true, y_pred, normalize, sample_weight, session, run_kwargs)
178 op = AccuracyScore(y_true=y_true, y_pred=y_pred, normalize=normalize,
179 sample_weight=sample_weight)
--> 180 score = op(y_true, y_pred)
181 return score.execute(session=session, **(run_kwargs or dict()))
/home/tops/lib/python3.6/site-packages/mars/utils.py in _inner(*args, **kwargs)
455 def _inner(*args, **kwargs):
456 with self:
--> 457 return func(*args, **kwargs)
458
459 return _inner
/home/tops/lib/python3.6/site-packages/mars/learn/metrics/_classification.py in __call__(self, y_true, y_pred)
76
77 def __call__(self, y_true, y_pred):
---> 78 type_true, y_true, y_pred = _check_targets(y_true, y_pred)
79 self._type_true = type_true
80 inputs = [y_true, y_pred, type_true]
/home/tops/lib/python3.6/site-packages/mars/learn/metrics/_check_targets.py in _check_targets(y_true, y_pred)
193 """
194 op = CheckTargets(y_true=y_true, y_pred=y_pred)
--> 195 return op(y_true, y_pred)
/home/tops/lib/python3.6/site-packages/mars/utils.py in _inner(*args, **kwargs)
455 def _inner(*args, **kwargs):
456 with self:
--> 457 return func(*args, **kwargs)
458
459 return _inner
/home/tops/lib/python3.6/site-packages/mars/learn/metrics/_check_targets.py in __call__(self, y_true, y_pred)
82 if isinstance(y_pred, (Base, Entity)):
83 inputs.append(y_pred)
---> 84 self._type_true = type_of_target(y_true)
85 self._type_pred = type_of_target(y_pred)
86 inputs.extend([self._type_true, self._type_pred])
/home/tops/lib/python3.6/site-packages/mars/learn/utils/multiclass.py in type_of_target(y)
403
404 if not valid:
--> 405 raise ValueError(f'Expected array-like (array or non-string sequence), got {y}')
406
407 sparse_pandas = (y.__class__.__name__ in ['SparseSeries', 'SparseArray'])
ValueError: Expected array-like (array or non-string sequence), got 159615 0
134010 0
199100 0
137756 0
200050 0
..
219880 0
82326 0
184439 0
198518 0
109959 0
Name: label_bigint, Length: 1146788, dtype: int64
|
ValueError
|
def type_of_target(y):
"""Determine the type of data indicated by the target.
Note that this type is the most specific type that can be inferred.
For example:
* ``binary`` is more specific but compatible with ``multiclass``.
* ``multiclass`` of integers is more specific but compatible with
``continuous``.
* ``multilabel-indicator`` is more specific but compatible with
``multiclass-multioutput``.
Parameters
----------
y : array-like
Returns
-------
target_type : string
One of:
* 'continuous': `y` is an array-like of floats that are not all
integers, and is 1d or a column vector.
* 'continuous-multioutput': `y` is a 2d tensor of floats that are
not all integers, and both dimensions are of size > 1.
* 'binary': `y` contains <= 2 discrete values and is 1d or a column
vector.
* 'multiclass': `y` contains more than two discrete values, is not a
sequence of sequences, and is 1d or a column vector.
* 'multiclass-multioutput': `y` is a 2d tensor that contains more
than two discrete values, is not a sequence of sequences, and both
dimensions are of size > 1.
* 'multilabel-indicator': `y` is a label indicator matrix, a tensor
of two dimensions with at least two columns, and at most 2 unique
values.
* 'unknown': `y` is array-like but none of the above, such as a 3d
tensor, sequence of sequences, or a tensor of non-sequence objects.
Examples
--------
>>> import mars.tensor as mt
>>> from mars.learn.utils.multiclass import type_of_target
>>> type_of_target([0.1, 0.6]).execute()
'continuous'
>>> type_of_target([1, -1, -1, 1]).execute()
'binary'
>>> type_of_target(['a', 'b', 'a']).execute()
'binary'
>>> type_of_target([1.0, 2.0]).execute()
'binary'
>>> type_of_target([1, 0, 2]).execute()
'multiclass'
>>> type_of_target([1.0, 0.0, 3.0]).execute()
'multiclass'
>>> type_of_target(['a', 'b', 'c']).execute()
'multiclass'
>>> type_of_target(mt.array([[1, 2], [3, 1]])).execute()
'multiclass-multioutput'
>>> type_of_target([[1, 2]]).execute()
'multiclass-multioutput'
>>> type_of_target(mt.array([[1.5, 2.0], [3.0, 1.6]])).execute()
'continuous-multioutput'
>>> type_of_target(mt.array([[0, 1], [1, 1]])).execute()
'multilabel-indicator'
"""
valid_types = (Sequence, spmatrix) if spmatrix is not None else (Sequence,)
valid = (
isinstance(y, valid_types)
or hasattr(y, "__array__")
or hasattr(y, "__mars_tensor__")
) and not isinstance(y, str)
if not valid:
raise ValueError(f"Expected array-like (array or non-string sequence), got {y}")
sparse_pandas = y.__class__.__name__ in ["SparseSeries", "SparseArray"]
if sparse_pandas: # pragma: no cover
raise ValueError("y cannot be class 'SparseSeries' or 'SparseArray'")
if isinstance(y, (Base, Entity)):
y = mt.tensor(y)
op = TypeOfTarget(y=y)
return op(y)
|
def type_of_target(y):
"""Determine the type of data indicated by the target.
Note that this type is the most specific type that can be inferred.
For example:
* ``binary`` is more specific but compatible with ``multiclass``.
* ``multiclass`` of integers is more specific but compatible with
``continuous``.
* ``multilabel-indicator`` is more specific but compatible with
``multiclass-multioutput``.
Parameters
----------
y : array-like
Returns
-------
target_type : string
One of:
* 'continuous': `y` is an array-like of floats that are not all
integers, and is 1d or a column vector.
* 'continuous-multioutput': `y` is a 2d tensor of floats that are
not all integers, and both dimensions are of size > 1.
* 'binary': `y` contains <= 2 discrete values and is 1d or a column
vector.
* 'multiclass': `y` contains more than two discrete values, is not a
sequence of sequences, and is 1d or a column vector.
* 'multiclass-multioutput': `y` is a 2d tensor that contains more
than two discrete values, is not a sequence of sequences, and both
dimensions are of size > 1.
* 'multilabel-indicator': `y` is a label indicator matrix, a tensor
of two dimensions with at least two columns, and at most 2 unique
values.
* 'unknown': `y` is array-like but none of the above, such as a 3d
tensor, sequence of sequences, or a tensor of non-sequence objects.
Examples
--------
>>> import mars.tensor as mt
>>> from mars.learn.utils.multiclass import type_of_target
>>> type_of_target([0.1, 0.6]).execute()
'continuous'
>>> type_of_target([1, -1, -1, 1]).execute()
'binary'
>>> type_of_target(['a', 'b', 'a']).execute()
'binary'
>>> type_of_target([1.0, 2.0]).execute()
'binary'
>>> type_of_target([1, 0, 2]).execute()
'multiclass'
>>> type_of_target([1.0, 0.0, 3.0]).execute()
'multiclass'
>>> type_of_target(['a', 'b', 'c']).execute()
'multiclass'
>>> type_of_target(mt.array([[1, 2], [3, 1]])).execute()
'multiclass-multioutput'
>>> type_of_target([[1, 2]]).execute()
'multiclass-multioutput'
>>> type_of_target(mt.array([[1.5, 2.0], [3.0, 1.6]])).execute()
'continuous-multioutput'
>>> type_of_target(mt.array([[0, 1], [1, 1]])).execute()
'multilabel-indicator'
"""
valid_types = (Sequence, spmatrix) if spmatrix is not None else (Sequence,)
valid = (isinstance(y, valid_types) or hasattr(y, "__array__")) and not isinstance(
y, str
)
if not valid:
raise ValueError(f"Expected array-like (array or non-string sequence), got {y}")
sparse_pandas = y.__class__.__name__ in ["SparseSeries", "SparseArray"]
if sparse_pandas: # pragma: no cover
raise ValueError("y cannot be class 'SparseSeries' or 'SparseArray'")
if isinstance(y, (Base, Entity)):
y = mt.tensor(y)
op = TypeOfTarget(y=y)
return op(y)
|
https://github.com/mars-project/mars/issues/1943
|
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-52-f2912f2e33a0> in <module>
1 from mars.learn.metrics import accuracy_score
----> 2 accuracy_score(y_test,y_pred).execute()
/home/tops/lib/python3.6/site-packages/mars/learn/metrics/_classification.py in accuracy_score(y_true, y_pred, normalize, sample_weight, session, run_kwargs)
178 op = AccuracyScore(y_true=y_true, y_pred=y_pred, normalize=normalize,
179 sample_weight=sample_weight)
--> 180 score = op(y_true, y_pred)
181 return score.execute(session=session, **(run_kwargs or dict()))
/home/tops/lib/python3.6/site-packages/mars/utils.py in _inner(*args, **kwargs)
455 def _inner(*args, **kwargs):
456 with self:
--> 457 return func(*args, **kwargs)
458
459 return _inner
/home/tops/lib/python3.6/site-packages/mars/learn/metrics/_classification.py in __call__(self, y_true, y_pred)
76
77 def __call__(self, y_true, y_pred):
---> 78 type_true, y_true, y_pred = _check_targets(y_true, y_pred)
79 self._type_true = type_true
80 inputs = [y_true, y_pred, type_true]
/home/tops/lib/python3.6/site-packages/mars/learn/metrics/_check_targets.py in _check_targets(y_true, y_pred)
193 """
194 op = CheckTargets(y_true=y_true, y_pred=y_pred)
--> 195 return op(y_true, y_pred)
/home/tops/lib/python3.6/site-packages/mars/utils.py in _inner(*args, **kwargs)
455 def _inner(*args, **kwargs):
456 with self:
--> 457 return func(*args, **kwargs)
458
459 return _inner
/home/tops/lib/python3.6/site-packages/mars/learn/metrics/_check_targets.py in __call__(self, y_true, y_pred)
82 if isinstance(y_pred, (Base, Entity)):
83 inputs.append(y_pred)
---> 84 self._type_true = type_of_target(y_true)
85 self._type_pred = type_of_target(y_pred)
86 inputs.extend([self._type_true, self._type_pred])
/home/tops/lib/python3.6/site-packages/mars/learn/utils/multiclass.py in type_of_target(y)
403
404 if not valid:
--> 405 raise ValueError(f'Expected array-like (array or non-string sequence), got {y}')
406
407 sparse_pandas = (y.__class__.__name__ in ['SparseSeries', 'SparseArray'])
ValueError: Expected array-like (array or non-string sequence), got 159615 0
134010 0
199100 0
137756 0
200050 0
..
219880 0
82326 0
184439 0
198518 0
109959 0
Name: label_bigint, Length: 1146788, dtype: int64
|
ValueError
|
def __init__(self, n_workers=None, output_types=None, pure_depends=None, **kw):
super().__init__(
_n_workers=n_workers,
_output_types=output_types,
_pure_depends=pure_depends,
**kw,
)
if self.output_types is None:
self.output_types = [OutputType.object]
|
def __init__(self, n_workers=None, output_types=None, **kw):
super().__init__(_n_workers=n_workers, _output_types=output_types, **kw)
if self.output_types is None:
self.output_types = [OutputType.object]
|
https://github.com/mars-project/mars/issues/1932
|
2021-01-26 15:39:43,801 mars.scheduler.operands.base 474 DEBUG Operand e3036e9fb2b4f84911306f8fa9cf3e03(StartTracker) state from OperandState.UNSCHEDULED to OperandState.READY.
2021-01-26 15:39:43,802 mars.scheduler.assigner 476 DEBUG Operand e3036e9fb2b4f84911306f8fa9cf3e03 enqueued
2021-01-26 15:49:43,403 mars.scheduler.assigner 473 ERROR Unexpected exception occurred in AssignEvaluationActor._allocate_resource. op_key=e3036e9fb2b4f84911306f8fa9cf3e03
Traceback (most recent call last):
File "/home/admin/work/_public-mars-0.6.2.zip/mars/utils.py", line 377, in _wrapped
return func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/scheduler/assigner.py", line 414, in _allocate_resource
raise TimeoutError(f'Assign resources to operand {op_key} timed out')
TimeoutError: Assign resources to operand e3036e9fb2b4f84911306f8fa9cf3e03 timed out
2021-01-26 15:49:43,404 mars.scheduler.assigner 473 ERROR Unexpected error occurred in s:0:AssignEvaluationActor$eb2a35c62f8cfb405cc15245e0d1cf02
Traceback (most recent call last):
File "/home/admin/work/_public-mars-0.6.2.zip/mars/scheduler/assigner.py", line 315, in allocate_top_resources
reject_workers=reject_workers)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/utils.py", line 377, in _wrapped
return func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/scheduler/assigner.py", line 414, in _allocate_resource
raise TimeoutError(f'Assign resources to operand {op_key} timed out')
TimeoutError: Assign resources to operand e3036e9fb2b4f84911306f8fa9cf3e03 timed out
2021-01-26 15:49:43,407 mars.scheduler.operands.common 474 ERROR Attempt 1: Unexpected error TimeoutError occurred in executing operand e3036e9fb2b4f84911306f8fa9cf3e03 in None
Traceback (most recent call last):
File "/home/admin/work/_public-mars-0.6.2.zip/mars/scheduler/assigner.py", line 315, in allocate_top_resources
reject_workers=reject_workers)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/utils.py", line 377, in _wrapped
return func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/scheduler/assigner.py", line 414, in _allocate_resource
raise TimeoutError(f'Assign resources to operand {op_key} timed out')
TimeoutError: Assign resources to operand e3036e9fb2b4f84911306f8fa9cf3e03 timed out
|
TimeoutError
|
def tile(cls, op):
ctx = get_context()
if ctx.running_mode != RunningMode.distributed:
assert all(len(inp.chunks) == 1 for inp in op.inputs)
chunk_op = op.copy().reset_key()
out_chunk = chunk_op.new_chunk(
[inp.chunks[0] for inp in op.inputs], shape=(1,), index=(0,)
)
new_op = op.copy()
return new_op.new_tileables(op.inputs, chunks=[out_chunk], nsplits=((1,),))
else:
inp = op.inputs[0]
in_chunks = inp.chunks
workers = cls._get_dmatrix_chunks_workers(ctx, inp)
n_chunk = len(in_chunks)
tracker_chunk = StartTracker(
n_workers=n_chunk, pure_depends=[True] * n_chunk
).new_chunk(in_chunks, shape=())
out_chunks = []
worker_to_evals = defaultdict(list)
if op.evals is not None:
for dm, ev in op.evals:
worker_to_chunk = cls._get_dmatrix_worker_to_chunk(dm, workers, ctx)
for worker, chunk in worker_to_chunk.items():
worker_to_evals[worker].append((chunk, ev))
for in_chunk, worker in zip(in_chunks, workers):
chunk_op = op.copy().reset_key()
chunk_op._expect_worker = worker
chunk_op._tracker = tracker_chunk
chunk_evals = list(worker_to_evals.get(worker, list()))
chunk_op._evals = chunk_evals
input_chunks = (
[in_chunk] + [pair[0] for pair in chunk_evals] + [tracker_chunk]
)
out_chunk = chunk_op.new_chunk(
input_chunks, shape=(np.nan,), index=in_chunk.index[:1]
)
out_chunks.append(out_chunk)
new_op = op.copy()
return new_op.new_tileables(
op.inputs, chunks=out_chunks, nsplits=((np.nan for _ in out_chunks),)
)
|
def tile(cls, op):
ctx = get_context()
if ctx.running_mode != RunningMode.distributed:
assert all(len(inp.chunks) == 1 for inp in op.inputs)
chunk_op = op.copy().reset_key()
out_chunk = chunk_op.new_chunk(
[inp.chunks[0] for inp in op.inputs], shape=(1,), index=(0,)
)
new_op = op.copy()
return new_op.new_tileables(op.inputs, chunks=[out_chunk], nsplits=((1,),))
else:
inp = op.inputs[0]
in_chunks = inp.chunks
workers = cls._get_dmatrix_chunks_workers(ctx, inp)
tracker_chunk = StartTracker(n_workers=len(in_chunks)).new_chunk(
in_chunks, shape=()
)
out_chunks = []
worker_to_evals = defaultdict(list)
if op.evals is not None:
for dm, ev in op.evals:
worker_to_chunk = cls._get_dmatrix_worker_to_chunk(dm, workers, ctx)
for worker, chunk in worker_to_chunk.items():
worker_to_evals[worker].append((chunk, ev))
for in_chunk, worker in zip(in_chunks, workers):
chunk_op = op.copy().reset_key()
chunk_op._expect_worker = worker
chunk_op._tracker = tracker_chunk
chunk_evals = list(worker_to_evals.get(worker, list()))
chunk_op._evals = chunk_evals
input_chunks = (
[in_chunk] + [pair[0] for pair in chunk_evals] + [tracker_chunk]
)
out_chunk = chunk_op.new_chunk(
input_chunks, shape=(np.nan,), index=in_chunk.index[:1]
)
out_chunks.append(out_chunk)
new_op = op.copy()
return new_op.new_tileables(
op.inputs, chunks=out_chunks, nsplits=((np.nan for _ in out_chunks),)
)
|
https://github.com/mars-project/mars/issues/1932
|
2021-01-26 15:39:43,801 mars.scheduler.operands.base 474 DEBUG Operand e3036e9fb2b4f84911306f8fa9cf3e03(StartTracker) state from OperandState.UNSCHEDULED to OperandState.READY.
2021-01-26 15:39:43,802 mars.scheduler.assigner 476 DEBUG Operand e3036e9fb2b4f84911306f8fa9cf3e03 enqueued
2021-01-26 15:49:43,403 mars.scheduler.assigner 473 ERROR Unexpected exception occurred in AssignEvaluationActor._allocate_resource. op_key=e3036e9fb2b4f84911306f8fa9cf3e03
Traceback (most recent call last):
File "/home/admin/work/_public-mars-0.6.2.zip/mars/utils.py", line 377, in _wrapped
return func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/scheduler/assigner.py", line 414, in _allocate_resource
raise TimeoutError(f'Assign resources to operand {op_key} timed out')
TimeoutError: Assign resources to operand e3036e9fb2b4f84911306f8fa9cf3e03 timed out
2021-01-26 15:49:43,404 mars.scheduler.assigner 473 ERROR Unexpected error occurred in s:0:AssignEvaluationActor$eb2a35c62f8cfb405cc15245e0d1cf02
Traceback (most recent call last):
File "/home/admin/work/_public-mars-0.6.2.zip/mars/scheduler/assigner.py", line 315, in allocate_top_resources
reject_workers=reject_workers)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/utils.py", line 377, in _wrapped
return func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/scheduler/assigner.py", line 414, in _allocate_resource
raise TimeoutError(f'Assign resources to operand {op_key} timed out')
TimeoutError: Assign resources to operand e3036e9fb2b4f84911306f8fa9cf3e03 timed out
2021-01-26 15:49:43,407 mars.scheduler.operands.common 474 ERROR Attempt 1: Unexpected error TimeoutError occurred in executing operand e3036e9fb2b4f84911306f8fa9cf3e03 in None
Traceback (most recent call last):
File "/home/admin/work/_public-mars-0.6.2.zip/mars/scheduler/assigner.py", line 315, in allocate_top_resources
reject_workers=reject_workers)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/utils.py", line 377, in _wrapped
return func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/scheduler/assigner.py", line 414, in _allocate_resource
raise TimeoutError(f'Assign resources to operand {op_key} timed out')
TimeoutError: Assign resources to operand e3036e9fb2b4f84911306f8fa9cf3e03 timed out
|
TimeoutError
|
def tile(cls, op):
inputs = op.inputs
check_chunks_unknown_shape(inputs, TilesError)
axis_to_nsplits = defaultdict(list)
has_dataframe = any(
output_type == OutputType.dataframe for output_type in op.output_types
)
for ax in op.axes:
if has_dataframe and ax == 1:
# if DataFrame exists, for the columns axis,
# we only allow 1 chunk to ensure the columns consistent
axis_to_nsplits[ax].append((inputs[0].shape[ax],))
continue
for inp in inputs:
if ax < inp.ndim:
axis_to_nsplits[ax].append(inp.nsplits[ax])
ax_nsplit = {ax: decide_unify_split(*ns) for ax, ns in axis_to_nsplits.items()}
inputs = [cls._safe_rechunk(inp, ax_nsplit) for inp in inputs]
mapper_seeds = [None] * len(op.axes)
reducer_seeds = [None] * len(op.axes)
for i, ax in enumerate(op.axes):
rs = np.random.RandomState(op.seeds[i])
size = len(ax_nsplit[ax])
if size > 1:
mapper_seeds[i] = gen_random_seeds(size, rs)
reducer_seeds[i] = gen_random_seeds(size, rs)
else:
mapper_seeds[i] = reducer_seeds[i] = [op.seeds[i]] * size
out_chunks = []
out_nsplits = []
for output_type, inp, oup in zip(op.output_types, inputs, op.outputs):
inp_axes = tuple(ax for ax in op.axes if ax < inp.ndim)
reduce_sizes = tuple(inp.chunk_shape[ax] for ax in inp_axes)
output_types = [output_type]
if len(inp_axes) == 0:
continue
nsplits = list(inp.nsplits)
for ax in inp_axes:
cs = len(nsplits[ax])
if cs > 1:
nsplits[ax] = (np.nan,) * cs
out_nsplits.append(tuple(nsplits))
if all(reduce_size == 1 for reduce_size in reduce_sizes):
# no need to do shuffle
chunks = []
for c in inp.chunks:
chunk_op = LearnShuffle(
axes=inp_axes,
seeds=op.seeds[: len(inp_axes)],
output_types=output_types,
)
params = cls._calc_chunk_params(
c, inp_axes, inp.chunk_shape, oup, output_type, chunk_op, True
)
out_chunk = chunk_op.new_chunk([c], kws=[params])
chunks.append(out_chunk)
out_chunks.append(chunks)
continue
if inp.ndim > 1:
left_chunk_shape = [
s for ax, s in enumerate(inp.chunk_shape) if ax not in inp_axes
]
idx_iter = itertools.product(*[range(s) for s in left_chunk_shape])
else:
idx_iter = [()]
reduce_chunks = []
out_chunks.append(reduce_chunks)
for idx in idx_iter:
map_chunks = []
for reducer_inds in itertools.product(*[range(s) for s in reduce_sizes]):
inp_index = list(idx)
for ax, reducer_ind in zip(inp_axes, reducer_inds):
inp_index.insert(ax, reducer_ind)
inp_index = tuple(inp_index)
in_chunk = inp.cix[inp_index]
params = in_chunk.params
map_chunk_op = LearnShuffle(
stage=OperandStage.map,
output_types=output_types,
axes=inp_axes,
seeds=tuple(
mapper_seeds[j][in_chunk.index[ax]]
for j, ax in enumerate(inp_axes)
),
reduce_sizes=reduce_sizes,
)
map_chunk = map_chunk_op.new_chunk([in_chunk], **params)
map_chunks.append(map_chunk)
proxy_chunk = LearnShuffleProxy(
_tensor_keys=[inp.key], output_types=[output_type]
).new_chunk(map_chunks)
reduce_axes = tuple(
ax for j, ax in enumerate(inp_axes) if reduce_sizes[j] > 1
)
reduce_sizes_ = tuple(rs for rs in reduce_sizes if rs > 1)
for c in map_chunks:
shuffle_key = ",".join(str(idx) for idx in c.index)
chunk_op = LearnShuffle(
stage=OperandStage.reduce,
output_types=output_types,
axes=reduce_axes,
seeds=tuple(
reducer_seeds[j][c.index[ax]]
for j, ax in enumerate(inp_axes)
if reduce_sizes[j] > 1
),
reduce_sizes=reduce_sizes_,
shuffle_key=shuffle_key,
)
params = cls._calc_chunk_params(
c, inp_axes, inp.chunk_shape, oup, output_type, chunk_op, False
)
reduce_chunk = chunk_op.new_chunk([proxy_chunk], kws=[params])
reduce_chunks.append(reduce_chunk)
new_op = op.copy()
params = [out.params for out in op.outputs]
if len(out_chunks) < len(op.outputs):
# axes are all higher than its ndim
for i, inp in enumerate(op.inputs):
if all(ax >= inp.ndim for ax in op.axes):
out_chunks.insert(i, inp.chunks)
out_nsplits.insert(i, inp.nsplits)
assert len(out_chunks) == len(op.outputs)
for i, param, chunks, ns in zip(itertools.count(), params, out_chunks, out_nsplits):
param["chunks"] = chunks
param["nsplits"] = ns
param["_position_"] = i
return new_op.new_tileables(op.inputs, kws=params)
|
def tile(cls, op):
inputs = op.inputs
check_chunks_unknown_shape(inputs, TilesError)
axis_to_nsplits = defaultdict(list)
has_dataframe = any(
output_type == OutputType.dataframe for output_type in op.output_types
)
for ax in op.axes:
if has_dataframe and ax == 1:
# if DataFrame exists, for the columns axis,
# we only allow 1 chunk to ensure the columns consistent
axis_to_nsplits[ax].append((inputs[0].shape[ax],))
continue
for inp in inputs:
if ax < inp.ndim:
axis_to_nsplits[ax].append(inp.nsplits[ax])
ax_nsplit = {ax: decide_unify_split(*ns) for ax, ns in axis_to_nsplits.items()}
inputs = [cls._safe_rechunk(inp, ax_nsplit) for inp in inputs]
mapper_seeds = [None] * len(op.axes)
reducer_seeds = [None] * len(op.axes)
for i, ax in enumerate(op.axes):
rs = np.random.RandomState(op.seeds[i])
size = len(ax_nsplit[ax])
if size > 1:
mapper_seeds[i] = gen_random_seeds(size, rs)
reducer_seeds[i] = gen_random_seeds(size, rs)
else:
mapper_seeds[i] = reducer_seeds[i] = [op.seeds[i]] * size
out_chunks = []
out_nsplits = []
for output_type, inp, oup in zip(op.output_types, inputs, op.outputs):
inp_axes = tuple(ax for ax in op.axes if ax < inp.ndim)
reduce_sizes = tuple(inp.chunk_shape[ax] for ax in inp_axes)
output_types = [output_type]
if len(inp_axes) == 0:
continue
nsplits = list(inp.nsplits)
for ax in inp_axes:
cs = len(nsplits[ax])
if cs > 1:
nsplits[ax] = (np.nan,) * cs
out_nsplits.append(tuple(nsplits))
if all(reduce_size == 1 for reduce_size in reduce_sizes):
# no need to do shuffle
chunks = []
for c in inp.chunks:
chunk_op = LearnShuffle(
axes=inp_axes,
seeds=op.seeds[: len(inp_axes)],
output_types=output_types,
)
params = cls._calc_chunk_params(
c, inp_axes, inp.chunk_shape, oup, output_type, chunk_op, True
)
out_chunk = chunk_op.new_chunk([c], kws=[params])
chunks.append(out_chunk)
out_chunks.append(chunks)
continue
if inp.ndim > 1:
left_chunk_shape = [
s for ax, s in enumerate(inp.chunk_shape) if ax not in inp_axes
]
idx_iter = itertools.product(*[range(s) for s in left_chunk_shape])
else:
idx_iter = [()]
reduce_chunks = []
out_chunks.append(reduce_chunks)
for idx in idx_iter:
map_chunks = []
for reducer_inds in itertools.product(*[range(s) for s in reduce_sizes]):
inp_index = list(idx)
for ax, reducer_ind in zip(inp_axes, reducer_inds):
inp_index.insert(ax, reducer_ind)
inp_index = tuple(inp_index)
in_chunk = inp.cix[inp_index]
params = in_chunk.params
map_chunk_op = LearnShuffle(
stage=OperandStage.map,
output_types=output_types,
axes=inp_axes,
seeds=tuple(
mapper_seeds[j][in_chunk.index[ax]]
for j, ax in enumerate(inp_axes)
),
reduce_sizes=reduce_sizes,
)
map_chunk = map_chunk_op.new_chunk([in_chunk], **params)
map_chunks.append(map_chunk)
proxy_chunk = LearnShuffleProxy(_tensor_keys=[inp.key]).new_chunk(
map_chunks
)
reduce_axes = tuple(
ax for j, ax in enumerate(inp_axes) if reduce_sizes[j] > 1
)
reduce_sizes_ = tuple(rs for rs in reduce_sizes if rs > 1)
for c in map_chunks:
shuffle_key = ",".join(str(idx) for idx in c.index)
chunk_op = LearnShuffle(
stage=OperandStage.reduce,
output_types=output_types,
axes=reduce_axes,
seeds=tuple(
reducer_seeds[j][c.index[ax]]
for j, ax in enumerate(inp_axes)
if reduce_sizes[j] > 1
),
reduce_sizes=reduce_sizes_,
shuffle_key=shuffle_key,
)
params = cls._calc_chunk_params(
c, inp_axes, inp.chunk_shape, oup, output_type, chunk_op, False
)
reduce_chunk = chunk_op.new_chunk([proxy_chunk], kws=[params])
reduce_chunks.append(reduce_chunk)
new_op = op.copy()
params = [out.params for out in op.outputs]
if len(out_chunks) < len(op.outputs):
# axes are all higher than its ndim
for i, inp in enumerate(op.inputs):
if all(ax >= inp.ndim for ax in op.axes):
out_chunks.insert(i, inp.chunks)
out_nsplits.insert(i, inp.nsplits)
assert len(out_chunks) == len(op.outputs)
for i, param, chunks, ns in zip(itertools.count(), params, out_chunks, out_nsplits):
param["chunks"] = chunks
param["nsplits"] = ns
param["_position_"] = i
return new_op.new_tileables(op.inputs, kws=params)
|
https://github.com/mars-project/mars/issues/1930
|
SCH 2021-01-26 14:35:13,477 Mars Scheduler started in standalone mode.
SCH 2021-01-26 14:35:13,477 Actor s:h1:SchedulerClusterInfoActor running in process 72974
SCH 2021-01-26 14:35:13,478 Actor s:h1:ChunkMetaActor running in process 72974 at 127.0.0.1:27341
SCH 2021-01-26 14:35:13,478 Actor s:h1:SessionManagerActor running in process 72974
SCH 2021-01-26 14:35:13,478 Actor s:h1:ResourceActor running in process 72974
SCH 2021-01-26 14:35:13,479 Actor s:h1:NodeInfoActor running in process 72974
WOR1 2021-01-26 14:35:13,484 Setting soft limit to 12.80G.
WOR0 2021-01-26 14:35:13,484 Setting soft limit to 12.80G.
../src/plasma/store.cc:../src/plasma/store.cc1274: Allowing the Plasma store to use up to 0.0104858GB of memory.
:../src/plasma/store.cc:1274: 1297Allowing the Plasma store to use up to : Starting object store with directory /tmp and huge page support disabled
0.0104858GB of memory.
../src/plasma/store.cc:1297: Starting object store with directory /tmp and huge page support disabled
WOR1 2021-01-26 14:35:13,650 Actor w:0:WorkerClusterInfoActor running in process 72983
WOR0 2021-01-26 14:35:13,650 Actor w:0:WorkerClusterInfoActor running in process 72984
WOR0 2021-01-26 14:35:13,654 Actor w:0:WorkerDaemonActor running in process 72984
WOR1 2021-01-26 14:35:13,654 Actor w:0:WorkerDaemonActor running in process 72983
WOR1 2021-01-26 14:35:13,669 Actor w:0:StatusActor running in process 72983
WOR0 2021-01-26 14:35:13,669 Actor w:0:StatusActor running in process 72984
WOR0 2021-01-26 14:35:13,671 Actor w:0:StatusReporterActor running in process 72984
WOR1 2021-01-26 14:35:13,672 Actor w:0:StatusReporterActor running in process 72983
WOR0 2021-01-26 14:35:13,674 Actor w:0:MemQuotaActor running in process 72984
WOR1 2021-01-26 14:35:13,674 Actor w:0:MemQuotaActor running in process 72983
WOR0 2021-01-26 14:35:13,676 Actor w:0:StorageManagerActor running in process 72984
WOR1 2021-01-26 14:35:13,676 Actor w:0:StorageManagerActor running in process 72983
WOR0 2021-01-26 14:35:13,678 Actor w:0:SharedHolderActor running in process 72984
WOR1 2021-01-26 14:35:13,678 Actor w:0:SharedHolderActor running in process 72983
WOR1 2021-01-26 14:35:13,696 Detected actual plasma store size: 10.00M
WOR1 2021-01-26 14:35:13,697 Actor w:0:DispatchActor running in process 72983
WOR0 2021-01-26 14:35:13,697 Detected actual plasma store size: 10.00M
WOR0 2021-01-26 14:35:13,699 Actor w:0:DispatchActor running in process 72984
WOR1 2021-01-26 14:35:13,699 Actor w:0:EventsActor running in process 72983
WOR1 2021-01-26 14:35:13,700 Actor w:0:ReceiverManagerActor running in process 72983
WOR0 2021-01-26 14:35:13,701 Actor w:0:EventsActor running in process 72984
WOR0 2021-01-26 14:35:13,703 Actor w:0:ReceiverManagerActor running in process 72984
WOR1 2021-01-26 14:35:13,703 Actor w:0:ExecutionActor running in process 72983
WOR0 2021-01-26 14:35:13,706 Actor w:0:ExecutionActor running in process 72984
WOR1 2021-01-26 14:35:13,711 Actor w:1:mars-cpu-calc running in process 72986
WOR0 2021-01-26 14:35:13,713 Actor w:1:mars-cpu-calc running in process 72985
WOR1 2021-01-26 14:35:13,722 Slot w:1:mars-cpu-calc registered for queue cpu on 127.0.0.1:58971
WOR0 2021-01-26 14:35:13,723 Slot w:1:mars-cpu-calc registered for queue cpu on 127.0.0.1:49412
WOR1 2021-01-26 14:35:13,726 Actor w:1:mars-inproc-holder running in process 72986
WOR0 2021-01-26 14:35:13,726 Actor w:1:mars-inproc-holder running in process 72985
WOR1 2021-01-26 14:35:13,732 Actor w:1:io_runner_inproc running in process 72986
WOR0 2021-01-26 14:35:13,732 Actor w:1:io_runner_inproc running in process 72985
WOR1 2021-01-26 14:35:13,735 Actor w:2:mars-cpu-calc running in process 72988
WOR0 2021-01-26 14:35:13,738 Actor w:2:mars-cpu-calc running in process 72987
WOR1 2021-01-26 14:35:13,752 Slot w:2:mars-cpu-calc registered for queue cpu on 127.0.0.1:58971
WOR0 2021-01-26 14:35:13,754 Slot w:2:mars-cpu-calc registered for queue cpu on 127.0.0.1:49412
WOR1 2021-01-26 14:35:13,760 Actor w:2:mars-inproc-holder running in process 72988
WOR0 2021-01-26 14:35:13,761 Actor w:2:mars-inproc-holder running in process 72987
WOR1 2021-01-26 14:35:13,767 Actor w:2:io_runner_inproc running in process 72988
WOR0 2021-01-26 14:35:13,768 Actor w:2:io_runner_inproc running in process 72987
WOR1 2021-01-26 14:35:13,773 Actor w:3:mars-sender-0 running in process 72989
WOR0 2021-01-26 14:35:13,776 Actor w:3:mars-sender-0 running in process 72990
WOR0 2021-01-26 14:35:13,795 Slot w:3:mars-sender-0 registered for queue sender on 127.0.0.1:49412
WOR1 2021-01-26 14:35:13,795 Slot w:3:mars-sender-0 registered for queue sender on 127.0.0.1:58971
WOR0 2021-01-26 14:35:13,799 Actor w:4:mars-sender-1 running in process 72991
WOR1 2021-01-26 14:35:13,800 Actor w:4:mars-sender-1 running in process 72992
WOR0 2021-01-26 14:35:13,814 Slot w:4:mars-sender-1 registered for queue sender on 127.0.0.1:49412
WOR1 2021-01-26 14:35:13,815 Slot w:4:mars-sender-1 registered for queue sender on 127.0.0.1:58971
WOR0 2021-01-26 14:35:13,818 Actor w:5:mars-sender-2 running in process 72993
WOR1 2021-01-26 14:35:13,819 Actor w:5:mars-sender-2 running in process 72994
WOR0 2021-01-26 14:35:13,834 Slot w:5:mars-sender-2 registered for queue sender on 127.0.0.1:49412
WOR1 2021-01-26 14:35:13,835 Slot w:5:mars-sender-2 registered for queue sender on 127.0.0.1:58971
WOR0 2021-01-26 14:35:13,838 Actor w:6:mars-sender-3 running in process 72996
WOR1 2021-01-26 14:35:13,841 Actor w:6:mars-sender-3 running in process 72995
WOR0 2021-01-26 14:35:13,854 Slot w:6:mars-sender-3 registered for queue sender on 127.0.0.1:49412
WOR1 2021-01-26 14:35:13,855 Slot w:6:mars-sender-3 registered for queue sender on 127.0.0.1:58971
WOR0 2021-01-26 14:35:13,857 Slot w:3:mars-custom-log-fetch registered for queue custom_log on 127.0.0.1:49412
WOR1 2021-01-26 14:35:13,858 Slot w:3:mars-custom-log-fetch registered for queue custom_log on 127.0.0.1:58971
WOR0 2021-01-26 14:35:13,861 Slot w:4:mars-custom-log-fetch registered for queue custom_log on 127.0.0.1:49412
WOR1 2021-01-26 14:35:13,863 Slot w:4:mars-custom-log-fetch registered for queue custom_log on 127.0.0.1:58971
WOR0 2021-01-26 14:35:13,864 Slot w:5:mars-custom-log-fetch registered for queue custom_log on 127.0.0.1:49412
WOR1 2021-01-26 14:35:13,865 Slot w:5:mars-custom-log-fetch registered for queue custom_log on 127.0.0.1:58971
WOR0 2021-01-26 14:35:13,867 Slot w:6:mars-custom-log-fetch registered for queue custom_log on 127.0.0.1:49412
WOR1 2021-01-26 14:35:13,868 Slot w:6:mars-custom-log-fetch registered for queue custom_log on 127.0.0.1:58971
WOR0 2021-01-26 14:35:13,869 Actor w:3:mars-receiver-0 running in process 72990
WOR1 2021-01-26 14:35:13,870 Actor w:3:mars-receiver-0 running in process 72989
WOR0 2021-01-26 14:35:13,873 Slot w:3:mars-receiver-0 registered for queue receiver on 127.0.0.1:49412
WOR1 2021-01-26 14:35:13,873 Slot w:3:mars-receiver-0 registered for queue receiver on 127.0.0.1:58971
WOR0 2021-01-26 14:35:13,875 Actor w:3:mars-receiver-1 running in process 72990
WOR1 2021-01-26 14:35:13,875 Actor w:3:mars-receiver-1 running in process 72989
WOR0 2021-01-26 14:35:13,878 Slot w:3:mars-receiver-1 registered for queue receiver on 127.0.0.1:49412
WOR1 2021-01-26 14:35:13,878 Slot w:3:mars-receiver-1 registered for queue receiver on 127.0.0.1:58971
WOR0 2021-01-26 14:35:13,879 Actor w:4:mars-receiver-2 running in process 72991
WOR1 2021-01-26 14:35:13,880 Actor w:4:mars-receiver-2 running in process 72992
WOR0 2021-01-26 14:35:13,883 Slot w:4:mars-receiver-2 registered for queue receiver on 127.0.0.1:49412
WOR1 2021-01-26 14:35:13,884 Slot w:4:mars-receiver-2 registered for queue receiver on 127.0.0.1:58971
WOR0 2021-01-26 14:35:13,885 Actor w:4:mars-receiver-3 running in process 72991
WOR1 2021-01-26 14:35:13,885 Actor w:4:mars-receiver-3 running in process 72992
WOR0 2021-01-26 14:35:13,887 Slot w:4:mars-receiver-3 registered for queue receiver on 127.0.0.1:49412
WOR1 2021-01-26 14:35:13,888 Slot w:4:mars-receiver-3 registered for queue receiver on 127.0.0.1:58971
WOR0 2021-01-26 14:35:13,889 Actor w:5:mars-receiver-4 running in process 72993
WOR1 2021-01-26 14:35:13,890 Actor w:5:mars-receiver-4 running in process 72994
WOR0 2021-01-26 14:35:13,892 Slot w:5:mars-receiver-4 registered for queue receiver on 127.0.0.1:49412
WOR1 2021-01-26 14:35:13,893 Slot w:5:mars-receiver-4 registered for queue receiver on 127.0.0.1:58971
WOR0 2021-01-26 14:35:13,894 Actor w:5:mars-receiver-5 running in process 72993
WOR1 2021-01-26 14:35:13,895 Actor w:5:mars-receiver-5 running in process 72994
WOR0 2021-01-26 14:35:13,897 Slot w:5:mars-receiver-5 registered for queue receiver on 127.0.0.1:49412
WOR1 2021-01-26 14:35:13,898 Slot w:5:mars-receiver-5 registered for queue receiver on 127.0.0.1:58971
WOR0 2021-01-26 14:35:13,899 Actor w:6:mars-receiver-6 running in process 72996
WOR1 2021-01-26 14:35:13,899 Actor w:6:mars-receiver-6 running in process 72995
WOR0 2021-01-26 14:35:13,902 Slot w:6:mars-receiver-6 registered for queue receiver on 127.0.0.1:49412
WOR1 2021-01-26 14:35:13,903 Slot w:6:mars-receiver-6 registered for queue receiver on 127.0.0.1:58971
WOR0 2021-01-26 14:35:13,904 Actor w:6:mars-receiver-7 running in process 72996
WOR1 2021-01-26 14:35:13,904 Actor w:6:mars-receiver-7 running in process 72995
WOR0 2021-01-26 14:35:13,908 Slot w:6:mars-receiver-7 registered for queue receiver on 127.0.0.1:49412
WOR1 2021-01-26 14:35:13,908 Slot w:6:mars-receiver-7 registered for queue receiver on 127.0.0.1:58971
WOR1 2021-01-26 14:35:13,910 Actor w:0:mars-process-helper running in process 72983
WOR0 2021-01-26 14:35:13,910 Actor w:0:mars-process-helper running in process 72984
WOR1 2021-01-26 14:35:13,911 Slot w:0:mars-process-helper registered for queue process_helper on 127.0.0.1:58971
WOR0 2021-01-26 14:35:13,911 Slot w:0:mars-process-helper registered for queue process_helper on 127.0.0.1:49412
WOR1 2021-01-26 14:35:13,912 Actor w:1:mars-process-helper running in process 72986
WOR0 2021-01-26 14:35:13,913 Actor w:1:mars-process-helper running in process 72985
WOR0 2021-01-26 14:35:13,915 Slot w:1:mars-process-helper registered for queue process_helper on 127.0.0.1:49412
WOR1 2021-01-26 14:35:13,915 Slot w:1:mars-process-helper registered for queue process_helper on 127.0.0.1:58971
WOR1 2021-01-26 14:35:13,918 Actor w:2:mars-process-helper running in process 72988
WOR0 2021-01-26 14:35:13,918 Actor w:2:mars-process-helper running in process 72987
WOR0 2021-01-26 14:35:13,921 Slot w:2:mars-process-helper registered for queue process_helper on 127.0.0.1:49412
WOR1 2021-01-26 14:35:13,921 Slot w:2:mars-process-helper registered for queue process_helper on 127.0.0.1:58971
WOR0 2021-01-26 14:35:13,924 Actor w:3:mars-process-helper running in process 72990
WOR1 2021-01-26 14:35:13,924 Actor w:3:mars-process-helper running in process 72989
WOR0 2021-01-26 14:35:13,926 Slot w:3:mars-process-helper registered for queue process_helper on 127.0.0.1:49412
WOR1 2021-01-26 14:35:13,926 Slot w:3:mars-process-helper registered for queue process_helper on 127.0.0.1:58971
WOR0 2021-01-26 14:35:13,930 Actor w:4:mars-process-helper running in process 72991
WOR1 2021-01-26 14:35:13,930 Actor w:4:mars-process-helper running in process 72992
WOR0 2021-01-26 14:35:13,937 Slot w:4:mars-process-helper registered for queue process_helper on 127.0.0.1:49412
WOR1 2021-01-26 14:35:13,939 Slot w:4:mars-process-helper registered for queue process_helper on 127.0.0.1:58971
WOR0 2021-01-26 14:35:13,944 Actor w:5:mars-process-helper running in process 72993
WOR1 2021-01-26 14:35:13,945 Actor w:5:mars-process-helper running in process 72994
WOR0 2021-01-26 14:35:13,947 Slot w:5:mars-process-helper registered for queue process_helper on 127.0.0.1:49412
WOR1 2021-01-26 14:35:13,949 Slot w:5:mars-process-helper registered for queue process_helper on 127.0.0.1:58971
WOR0 2021-01-26 14:35:13,954 Actor w:6:mars-process-helper running in process 72996
WOR1 2021-01-26 14:35:13,954 Actor w:6:mars-process-helper running in process 72995
WOR0 2021-01-26 14:35:13,958 Slot w:6:mars-process-helper registered for queue process_helper on 127.0.0.1:49412
WOR1 2021-01-26 14:35:13,959 Slot w:6:mars-process-helper registered for queue process_helper on 127.0.0.1:58971
WOR0 2021-01-26 14:35:13,961 Actor w:0:ResultSenderActor running in process 72984
WOR1 2021-01-26 14:35:13,962 Actor w:0:ResultSenderActor running in process 72983
WOR0 2021-01-26 14:35:13,963 Actor w:0:ResultCopyActor running in process 72984
WOR1 2021-01-26 14:35:13,964 Actor w:0:ResultCopyActor running in process 72983
WARNING:bokeh.server.util:Host wildcard '*' will allow connections originating from multiple (or possibly all) hostnames or IPs. Use non-wildcard values to restrict access explicitly
WEB 2021-01-26 14:35:20,232 Mars UI started at 127.0.0.1:21288
SCH 2021-01-26 14:35:20,874 Actor s:h1:session$b5582d85e3fdfe2b0538c3f4a6751fb5 running in process 72974
SCH 2021-01-26 14:35:20,875 Actor s:h1:assigner$b5582d85e3fdfe2b0538c3f4a6751fb5 running in process 72974
SCH 2021-01-26 14:35:20,875 Actor s:0:AssignEvaluationActor$b5582d85e3fdfe2b0538c3f4a6751fb5 running in process 72974
SCH 2021-01-26 14:35:20,929 Actor s:0:graph$b5582d85e3fdfe2b0538c3f4a6751fb5$9bb221d1340d2ccc125c01ed42330c23 running in process 72974
SCH 2021-01-26 14:35:20,930 Graph 9bb221d1340d2ccc125c01ed42330c23 state from GraphState.UNSCHEDULED to GraphState.PREPARING.
SCH 2021-01-26 14:35:20,946 Begin preparing graph 9bb221d1340d2ccc125c01ed42330c23 with 9 tileables to chunk graph.
SCH 2021-01-26 14:35:20,968 Terminal chunk keys: {'ef47ef4796e60445c544df4932449c98', 'e5d3a577f04edd5ed13e4d1bbedd6835', '5aef6cf6563ec183673159b483273a69', '965705674a7b15205fbe9f303957fe2a', '113cf9a2d1747ee1d168588b6416755f', 'e319d64c44060fb66e67d1c6770d43f9'}
SCH 2021-01-26 14:35:20,971 Placing initial chunks for graph 9bb221d1340d2ccc125c01ed42330c23
SCH 2021-01-26 14:35:20,972 Worker assign quotas: {'127.0.0.1:49412': 1, '127.0.0.1:58971': 1}
SCH 2021-01-26 14:35:20,972 Scan spread ranges: {}
SCH 2021-01-26 14:35:20,973 Creating operand actors for graph 9bb221d1340d2ccc125c01ed42330c23
SCH 2021-01-26 14:35:20,979 Operand actor creation progress: 1 / 18
SCH 2021-01-26 14:35:20,982 Operand actor creation progress: 2 / 18
SCH 2021-01-26 14:35:20,983 Unexpected exception occurred in GraphActor.get_executable_operand_dag. op_key=241a38623560db72c868622ae4cf87da
Traceback (most recent call last):
File "~/Code/mars/mars/scheduler/graph.py", line 830, in get_executable_operand_dag
inp_chunk = input_mapping[(inp.key, inp.id)]
KeyError: ('8501fa868db396f18599c9990f1f1c2a', '140271835165984')
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "~/Code/mars/mars/utils.py", line 383, in _wrapped
return func(*args, **kwargs)
File "~/Code/mars/mars/scheduler/graph.py", line 833, in get_executable_operand_dag
= build_fetch_chunk(inp, input_chunk_keys).data
File "~/Code/mars/mars/utils.py", line 581, in build_fetch_chunk
op = chunk_op.get_fetch_op_cls(chunk)(to_fetch_keys=to_fetch_keys, to_fetch_idxes=to_fetch_idxes)
File "~/Code/mars/mars/operands.py", line 551, in _inner
return cls(output_types=output_types, **kw)
TypeError: 'NoneType' object is not callable
Traceback (most recent call last):
File "~/Code/mars/mars/scheduler/graph.py", line 830, in get_executable_operand_dag
inp_chunk = input_mapping[(inp.key, inp.id)]
KeyError: ('8501fa868db396f18599c9990f1f1c2a', '140271835165984')
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "~/miniconda3/lib/python3.8/site-packages/gevent/threadpool.py", line 167, in __run_task
thread_result.set(func(*args, **kwargs))
File "mars/actors/pool/gevent_pool.pyx", line 171, in mars.actors.pool.gevent_pool.GeventThreadPool._wrap_watch_sem.inner
File "~/Code/mars/mars/utils.py", line 383, in _wrapped
return func(*args, **kwargs)
File "~/Code/mars/mars/scheduler/graph.py", line 833, in get_executable_operand_dag
= build_fetch_chunk(inp, input_chunk_keys).data
File "~/Code/mars/mars/utils.py", line 581, in build_fetch_chunk
op = chunk_op.get_fetch_op_cls(chunk)(to_fetch_keys=to_fetch_keys, to_fetch_idxes=to_fetch_idxes)
File "~/Code/mars/mars/operands.py", line 551, in _inner
return cls(output_types=output_types, **kw)
TypeError: 'NoneType' object is not callable
2021-01-26T06:35:20Z (<ThreadPoolWorker at 0x7f939500b930 thread_ident=0x700007417000 threadpool-hub=<Hub at 0x7f9394dbad00 thread_ident=0x119739dc0>>, <cyfunction GeventThreadPool._wrap_watch_sem.<locals>.inner at 0x7f9394ea2790>) failed with TypeError
|
KeyError
|
def _replace_copied_tilebale(self, graph):
if len(self._optimizer_context) == 0:
return graph
new_graph = DAG()
replaced_tileables = weakref.WeakKeyDictionary()
for n in graph.topological_iter():
if graph.count_predecessors(n) == 0:
if n in self._optimizer_context and all(
suc in self._optimizer_context for suc in graph.successors(n)
):
replaced_tileables[n] = new_node = self._optimizer_context[n]
else:
new_node = n
elif any(inp in replaced_tileables for inp in n.inputs) or any(
inp not in new_graph for inp in n.inputs
):
new_inputs = []
for i in n.inputs:
if i in replaced_tileables:
new_inputs.append(replaced_tileables[i])
elif i not in graph:
new_inputs.append(self._optimizer_context[i])
else:
new_inputs.append(i)
new_tileables = copy_tileables(n.op.outputs, inputs=new_inputs)
for t, new_t in zip(n.op.outputs, new_tileables):
replaced_tileables[t] = new_t.data
if t is n:
new_node = new_t.data
else:
new_node = n
new_graph.add_node(new_node)
for inp in new_node.inputs:
new_graph.add_node(inp)
new_graph.add_edge(inp, new_node)
self._optimizer_context.update(replaced_tileables)
return new_graph
|
def _replace_copied_tilebale(self, graph):
if len(self._optimizer_context) == 0:
return graph
new_graph = DAG()
replaced_tileables = weakref.WeakKeyDictionary()
for n in graph.topological_iter():
if graph.count_predecessors(n) == 0:
if n in self._optimizer_context and all(
suc in self._optimizer_context for suc in graph.successors(n)
):
replaced_tileables[n] = new_node = self._optimizer_context[n]
else:
new_node = n
elif any(inp in replaced_tileables for inp in n.inputs):
new_inputs = [replaced_tileables.get(i, i) for i in n.inputs]
new_tileables = copy_tileables(n.op.outputs, inputs=new_inputs)
for t, new_t in zip(n.op.outputs, new_tileables):
replaced_tileables[t] = new_t.data
if t is n:
new_node = new_t.data
else:
new_node = n
new_graph.add_node(new_node)
for inp in new_node.inputs:
new_graph.add_node(inp)
new_graph.add_edge(inp, new_node)
self._optimizer_context.update(replaced_tileables)
return new_graph
|
https://github.com/mars-project/mars/issues/1923
|
In [9]: d = md.read_csv('Downloads/test.csv')
In [10]: (d.head() + 1).execute()
Unexpected exception occurred in GraphActor.prepare_graph.
Traceback (most recent call last):
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 383, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 457, in _inner
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/scheduler/graph.py", line 666, in prepare_graph
self._target_tileable_datas + fetch_tileables, tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 457, in _inner
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 348, in build
tileables, tileable_graph=tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 457, in _inner
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 262, in build
self._on_tile_failure(tileable_data.op, exc_info)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 301, in inner
raise exc_info[1].with_traceback(exc_info[2]) from None
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 242, in build
tiled = self._tile(tileable_data, tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 337, in _tile
return super()._tile(tileable_data, tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 191, in _tile
inputs=[cache[inp] for inp in tileable_data.inputs],
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 191, in <listcomp>
inputs=[cache[inp] for inp in tileable_data.inputs],
File "/Users/hekaisheng/miniconda3/envs/py3.7.2/lib/python3.7/weakref.py", line 394, in __getitem__
return self.data[ref(key)]
KeyError: <weakref at 0x192f4965e8; to 'DataFrameData' at 0x192f4a2048>
Failed to start graph execution.
Traceback (most recent call last):
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/scheduler/graph.py", line 411, in _execute_graph
self.prepare_graph(compose=compose)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 383, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 457, in _inner
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/scheduler/graph.py", line 666, in prepare_graph
self._target_tileable_datas + fetch_tileables, tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 457, in _inner
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 348, in build
tileables, tileable_graph=tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 457, in _inner
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 262, in build
self._on_tile_failure(tileable_data.op, exc_info)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 301, in inner
raise exc_info[1].with_traceback(exc_info[2]) from None
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 242, in build
tiled = self._tile(tileable_data, tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 337, in _tile
return super()._tile(tileable_data, tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 191, in _tile
inputs=[cache[inp] for inp in tileable_data.inputs],
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 191, in <listcomp>
inputs=[cache[inp] for inp in tileable_data.inputs],
File "/Users/hekaisheng/miniconda3/envs/py3.7.2/lib/python3.7/weakref.py", line 394, in __getitem__
return self.data[ref(key)]
KeyError: <weakref at 0x192f4965e8; to 'DataFrameData' at 0x192f4a2048>
Unexpected exception occurred in GraphActor.get_chunk_graph.
Traceback (most recent call last):
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 383, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/scheduler/graph.py", line 471, in get_chunk_graph
raise GraphNotExists from None
mars.errors.GraphNotExists
Unexpected exception occurred in GraphActor.execute_graph.
Traceback (most recent call last):
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 383, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/scheduler/graph.py", line 383, in execute_graph
self._execute_graph(compose=compose)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/scheduler/graph.py", line 411, in _execute_graph
self.prepare_graph(compose=compose)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 383, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 457, in _inner
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/scheduler/graph.py", line 666, in prepare_graph
self._target_tileable_datas + fetch_tileables, tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 457, in _inner
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 348, in build
tileables, tileable_graph=tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 457, in _inner
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 262, in build
self._on_tile_failure(tileable_data.op, exc_info)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 301, in inner
raise exc_info[1].with_traceback(exc_info[2]) from None
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 242, in build
tiled = self._tile(tileable_data, tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 337, in _tile
return super()._tile(tileable_data, tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 191, in _tile
inputs=[cache[inp] for inp in tileable_data.inputs],
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 191, in <listcomp>
inputs=[cache[inp] for inp in tileable_data.inputs],
File "/Users/hekaisheng/miniconda3/envs/py3.7.2/lib/python3.7/weakref.py", line 394, in __getitem__
return self.data[ref(key)]
KeyError: <weakref at 0x192f4965e8; to 'DataFrameData' at 0x192f4a2048>
Traceback (most recent call last):
File "src/gevent/greenlet.py", line 854, in gevent._gevent_cgreenlet.Greenlet.run
File "mars/actors/pool/gevent_pool.pyx", line 90, in mars.actors.pool.gevent_pool.ActorExecutionContext.fire_run
File "mars/actors/pool/gevent_pool.pyx", line 93, in mars.actors.pool.gevent_pool.ActorExecutionContext.fire_run
File "mars/actors/pool/gevent_pool.pyx", line 104, in mars.actors.pool.gevent_pool.ActorExecutionContext.fire_run
File "mars/actors/pool/gevent_pool.pyx", line 98, in mars.actors.pool.gevent_pool.ActorExecutionContext.fire_run
File "mars/actors/core.pyx", line 112, in mars.actors.core._FunctionActor.on_receive
File "mars/actors/core.pyx", line 114, in mars.actors.core._FunctionActor.on_receive
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 383, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/scheduler/graph.py", line 383, in execute_graph
self._execute_graph(compose=compose)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/scheduler/graph.py", line 411, in _execute_graph
self.prepare_graph(compose=compose)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 383, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 457, in _inner
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/scheduler/graph.py", line 666, in prepare_graph
self._target_tileable_datas + fetch_tileables, tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 457, in _inner
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 348, in build
tileables, tileable_graph=tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 457, in _inner
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 262, in build
self._on_tile_failure(tileable_data.op, exc_info)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 301, in inner
raise exc_info[1].with_traceback(exc_info[2]) from None
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 242, in build
tiled = self._tile(tileable_data, tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 337, in _tile
return super()._tile(tileable_data, tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 191, in _tile
inputs=[cache[inp] for inp in tileable_data.inputs],
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 191, in <listcomp>
inputs=[cache[inp] for inp in tileable_data.inputs],
File "/Users/hekaisheng/miniconda3/envs/py3.7.2/lib/python3.7/weakref.py", line 394, in __getitem__
return self.data[ref(key)]
KeyError: <weakref at 0x192f4965e8; to 'DataFrameData' at 0x192f4a2048>
2021-01-25T06:20:24Z <Greenlet at 0x192f402ae8: <built-in method fire_run of mars.actors.pool.gevent_pool.ActorExecutionContext object at 0x192f391598>> failed with KeyError
Traceback (most recent call last):
File "src/gevent/greenlet.py", line 854, in gevent._gevent_cgreenlet.Greenlet.run
File "mars/actors/pool/gevent_pool.pyx", line 548, in mars.actors.pool.gevent_pool.ActorRemoteHelper._send_remote
File "mars/actors/pool/gevent_pool.pyx", line 552, in mars.actors.pool.gevent_pool.ActorRemoteHelper._send_remote
File "mars/actors/pool/gevent_pool.pyx", line 559, in mars.actors.pool.gevent_pool.ActorRemoteHelper._send_remote
File "mars/actors/pool/gevent_pool.pyx", line 1036, in mars.actors.pool.gevent_pool.Communicator._on_receive_send
File "mars/actors/pool/messages.pyx", line 747, in mars.actors.pool.messages.pack_result_message
_pack_object(result, buf)
File "mars/actors/pool/messages.pyx", line 305, in mars.actors.pool.messages._pack_object
m = dumps(obj)
TypeError: can't pickle weakref objects
2021-01-25T06:20:24Z <Greenlet at 0x1932ad0e18: <built-in method _send_remote of mars.actors.pool.gevent_pool.ActorRemoteHelper object at 0x1932cf5ea8>('0.0.0.0:44189', [bytearray(b'\x05\x01 \x00\x00\x00\x00\x00\x00\x00)> failed with TypeError
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-10-9da2df47dde0> in <module>
----> 1 (d.head() + 1).execute()
~/Documents/mars_dev/mars/mars/core.py in execute(self, session, **kw)
644
645 if wait:
--> 646 return run()
647 else:
648 thread_executor = ThreadPoolExecutor(1)
~/Documents/mars_dev/mars/mars/core.py in run()
640
641 def run():
--> 642 self.data.execute(session, **kw)
643 return self
644
~/Documents/mars_dev/mars/mars/core.py in execute(self, session, **kw)
377
378 if wait:
--> 379 return run()
380 else:
381 # leverage ThreadPoolExecutor to submit task,
~/Documents/mars_dev/mars/mars/core.py in run()
372 def run():
373 # no more fetch, thus just fire run
--> 374 session.run(self, **kw)
375 # return Tileable or ExecutableTuple itself
376 return self
~/Documents/mars_dev/mars/mars/session.py in run(self, *tileables, **kw)
503 tileables = tuple(mt.tensor(t) if not isinstance(t, (Entity, Base)) else t
504 for t in tileables)
--> 505 result = self._sess.run(*tileables, **kw)
506
507 for t in tileables:
~/Documents/mars_dev/mars/mars/session.py in run(self, *tileables, **kw)
316 break
317 if graph_state == GraphState.FAILED:
--> 318 exc_info = self._api.get_graph_exc_info(self._session_id, graph_key)
319 if exc_info is not None:
320 exc = exc_info[1].with_traceback(exc_info[2])
~/Documents/mars_dev/mars/mars/api.py in get_graph_exc_info(self, session_id, graph_key)
193 graph_meta_ref = self.get_graph_meta_ref(session_id, graph_key)
194 try:
--> 195 return graph_meta_ref.get_exc_info()
196 except ActorNotExist:
197 raise GraphNotExists
~/Documents/mars_dev/mars/mars/actors/core.pyx in mars.actors.core.ActorRef.__getattr__._mt_call()
~/Documents/mars_dev/mars/mars/actors/core.pyx in mars.actors.core.ActorRef.send()
~/Documents/mars_dev/mars/mars/actors/pool/gevent_pool.pyx in mars.actors.pool.gevent_pool.ActorRemoteHelper.send()
~/Documents/mars_dev/mars/mars/actors/pool/gevent_pool.pyx in mars.actors.pool.gevent_pool.ActorRemoteHelper.send()
~/Documents/mars_dev/mars/mars/actors/pool/gevent_pool.pyx in mars.actors.pool.gevent_pool.ActorRemoteHelper._send()
~/miniconda3/envs/py3.7.2/lib/python3.7/site-packages/gevent/pool.py in apply(self, func, args, kwds)
159 if self._apply_immediately():
160 return func(*args, **kwds)
--> 161 return self.spawn(func, *args, **kwds).get()
162
163 def __map(self, func, iterable):
~/miniconda3/envs/py3.7.2/lib/python3.7/site-packages/gevent/_gevent_cgreenlet.cpython-37m-darwin.so in gevent._gevent_cgreenlet.Greenlet.get()
~/miniconda3/envs/py3.7.2/lib/python3.7/site-packages/gevent/_gevent_cgreenlet.cpython-37m-darwin.so in gevent._gevent_cgreenlet.Greenlet._raise_exception()
~/miniconda3/envs/py3.7.2/lib/python3.7/site-packages/gevent/_compat.py in reraise(t, value, tb)
63 def reraise(t, value, tb=None): # pylint:disable=unused-argument
64 if value.__traceback__ is not tb and tb is not None:
---> 65 raise value.with_traceback(tb)
66 raise value
67 def exc_clear():
~/miniconda3/envs/py3.7.2/lib/python3.7/site-packages/gevent/_gevent_cgreenlet.cpython-37m-darwin.so in gevent._gevent_cgreenlet.Greenlet.run()
~/Documents/mars_dev/mars/mars/actors/pool/gevent_pool.pyx in mars.actors.pool.gevent_pool.ActorRemoteHelper._send_remote()
~/Documents/mars_dev/mars/mars/actors/pool/gevent_pool.pyx in mars.actors.pool.gevent_pool.ActorRemoteHelper._send_remote()
~/Documents/mars_dev/mars/mars/actors/pool/gevent_pool.pyx in mars.actors.pool.gevent_pool.ActorRemoteHelper._send_remote()
~/Documents/mars_dev/mars/mars/actors/pool/gevent_pool.pyx in mars.actors.pool.gevent_pool.Communicator._on_receive_send()
~/Documents/mars_dev/mars/mars/actors/pool/messages.pyx in mars.actors.pool.messages.pack_result_message()
745 _pack_index(from_index, buf)
746 _pack_index(to_index, buf)
--> 747 _pack_object(result, buf)
748
749 if write is not None:
~/Documents/mars_dev/mars/mars/actors/pool/messages.pyx in mars.actors.pool.messages._pack_object()
303 else:
304 st = PICKLE
--> 305 m = dumps(obj)
306
307 _pack_byte(st, buf)
TypeError: can't pickle weakref objects
|
KeyError
|
def __init__(
self,
model_type=None,
data=None,
label=None,
sample_weight=None,
init_score=None,
eval_datas=None,
eval_labels=None,
eval_sample_weights=None,
eval_init_scores=None,
params=None,
kwds=None,
workers=None,
worker_id=None,
worker_ports=None,
tree_learner=None,
timeout=None,
**kw,
):
super().__init__(
_model_type=model_type,
_params=params,
_data=data,
_label=label,
_sample_weight=sample_weight,
_init_score=init_score,
_eval_datas=eval_datas,
_eval_labels=eval_labels,
_eval_sample_weights=eval_sample_weights,
_eval_init_scores=eval_init_scores,
_kwds=kwds,
_workers=workers,
_worker_id=worker_id,
_worker_ports=worker_ports,
_tree_learner=tree_learner,
_timeout=timeout,
**kw,
)
if self.output_types is None:
self.output_types = [OutputType.object]
|
def __init__(
self,
model_type=None,
data=None,
label=None,
sample_weight=None,
init_score=None,
eval_datas=None,
eval_labels=None,
eval_sample_weights=None,
eval_init_scores=None,
params=None,
kwds=None,
lgbm_endpoints=None,
lgbm_port=None,
tree_learner=None,
timeout=None,
**kw,
):
super().__init__(
_model_type=model_type,
_params=params,
_data=data,
_label=label,
_sample_weight=sample_weight,
_init_score=init_score,
_eval_datas=eval_datas,
_eval_labels=eval_labels,
_eval_sample_weights=eval_sample_weights,
_eval_init_scores=eval_init_scores,
_kwds=kwds,
_lgbm_endpoints=lgbm_endpoints,
_lgbm_port=lgbm_port,
_tree_learner=tree_learner,
_timeout=timeout,
**kw,
)
if self.output_types is None:
self.output_types = [OutputType.object]
|
https://github.com/mars-project/mars/issues/1917
|
Traceback (most recent call last):
File "/home/admin/work/_public-mars-0.6.2.zip/mars/promise.py", line 100, in _wrapped
result = func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/worker/calc.py", line 301, in <lambda>
.then(lambda context_dict: _start_calc(context_dict)) \
File "/home/admin/work/_public-mars-0.6.2.zip/mars/worker/calc.py", line 276, in _start_calc
return self._calc_results(session_id, graph_key, graph, context_dict, chunk_targets)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/utils.py", line 377, in _wrapped
return func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/worker/calc.py", line 200, in _calc_results
chunk_targets, retval=False).result()
File "src/gevent/event.py", line 383, in gevent._gevent_cevent.AsyncResult.result
File "src/gevent/event.py", line 305, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 335, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 323, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 303, in gevent._gevent_cevent.AsyncResult._raise_exception
File "/opt/conda/lib/python3.7/site-packages/gevent/_compat.py", line 65, in reraise
raise value.with_traceback(tb)
File "/opt/conda/lib/python3.7/site-packages/gevent/threadpool.py", line 142, in __run_task
thread_result.set(func(*args, **kwargs))
File "mars/actors/pool/gevent_pool.pyx", line 127, in mars.actors.pool.gevent_pool.GeventThreadPool._wrap_watch.inner
result = fn(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/executor.py", line 698, in execute_graph
res = graph_execution.execute(retval)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/executor.py", line 579, in execute
future.result()
File "/opt/conda/lib/python3.7/concurrent/futures/_base.py", line 435, in result
return self.__get_result()
File "/opt/conda/lib/python3.7/concurrent/futures/_base.py", line 384, in __get_result
raise self._exception
File "/opt/conda/lib/python3.7/concurrent/futures/thread.py", line 57, in run
result = self.fn(*self.args, **self.kwargs)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/executor.py", line 446, in _execute_operand
self.handle_op(first_op, results, self._mock)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/executor.py", line 378, in handle_op
return Executor.handle(*args, **kw)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/executor.py", line 649, in handle
return runner(results, op)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/learn/contrib/lightgbm/_train.py", line 304, in execute
eval_init_score=eval_init_score, **op.kwds)
File "/opt/conda/lib/python3.7/site-packages/lightgbm/sklearn.py", line 800, in fit
callbacks=callbacks)
File "/opt/conda/lib/python3.7/site-packages/lightgbm/sklearn.py", line 595, in fit
callbacks=callbacks)
File "/opt/conda/lib/python3.7/site-packages/lightgbm/engine.py", line 228, in train
booster = Booster(params=params, train_set=train_set)
File "/opt/conda/lib/python3.7/site-packages/lightgbm/basic.py", line 1659, in __init__
num_machines=params.get("num_machines", num_machines))
File "/opt/conda/lib/python3.7/site-packages/lightgbm/basic.py", line 1790, in set_network
ctypes.c_int(num_machines)))
File "/opt/conda/lib/python3.7/site-packages/lightgbm/basic.py", line 47, in _safe_call
raise LightGBMError(decode_string(_LIB.LGBM_GetLastError()))
lightgbm.basic.LightGBMError: Binding port 43458 failed
|
lightgbm.basic.LightGBMError
|
def _set_inputs(self, inputs):
super()._set_inputs(inputs)
it = iter(inputs)
for attr in ["_data", "_label", "_sample_weight", "_init_score"]:
if getattr(self, attr) is not None:
setattr(self, attr, next(it))
for attr in [
"_eval_datas",
"_eval_labels",
"_eval_sample_weights",
"_eval_init_scores",
]:
new_list = []
for c in getattr(self, attr, None) or []:
if c is not None:
new_list.append(next(it))
setattr(self, attr, new_list or None)
if self._worker_ports is not None:
self._worker_ports = next(it)
|
def _set_inputs(self, inputs):
super()._set_inputs(inputs)
it = iter(inputs)
for attr in ["_data", "_label", "_sample_weight", "_init_score"]:
if getattr(self, attr) is not None:
setattr(self, attr, next(it))
for attr in [
"_eval_datas",
"_eval_labels",
"_eval_sample_weights",
"_eval_init_scores",
]:
new_list = []
for c in getattr(self, attr, None) or []:
if c is not None:
new_list.append(next(it))
setattr(self, attr, new_list or None)
|
https://github.com/mars-project/mars/issues/1917
|
Traceback (most recent call last):
File "/home/admin/work/_public-mars-0.6.2.zip/mars/promise.py", line 100, in _wrapped
result = func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/worker/calc.py", line 301, in <lambda>
.then(lambda context_dict: _start_calc(context_dict)) \
File "/home/admin/work/_public-mars-0.6.2.zip/mars/worker/calc.py", line 276, in _start_calc
return self._calc_results(session_id, graph_key, graph, context_dict, chunk_targets)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/utils.py", line 377, in _wrapped
return func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/worker/calc.py", line 200, in _calc_results
chunk_targets, retval=False).result()
File "src/gevent/event.py", line 383, in gevent._gevent_cevent.AsyncResult.result
File "src/gevent/event.py", line 305, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 335, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 323, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 303, in gevent._gevent_cevent.AsyncResult._raise_exception
File "/opt/conda/lib/python3.7/site-packages/gevent/_compat.py", line 65, in reraise
raise value.with_traceback(tb)
File "/opt/conda/lib/python3.7/site-packages/gevent/threadpool.py", line 142, in __run_task
thread_result.set(func(*args, **kwargs))
File "mars/actors/pool/gevent_pool.pyx", line 127, in mars.actors.pool.gevent_pool.GeventThreadPool._wrap_watch.inner
result = fn(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/executor.py", line 698, in execute_graph
res = graph_execution.execute(retval)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/executor.py", line 579, in execute
future.result()
File "/opt/conda/lib/python3.7/concurrent/futures/_base.py", line 435, in result
return self.__get_result()
File "/opt/conda/lib/python3.7/concurrent/futures/_base.py", line 384, in __get_result
raise self._exception
File "/opt/conda/lib/python3.7/concurrent/futures/thread.py", line 57, in run
result = self.fn(*self.args, **self.kwargs)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/executor.py", line 446, in _execute_operand
self.handle_op(first_op, results, self._mock)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/executor.py", line 378, in handle_op
return Executor.handle(*args, **kw)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/executor.py", line 649, in handle
return runner(results, op)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/learn/contrib/lightgbm/_train.py", line 304, in execute
eval_init_score=eval_init_score, **op.kwds)
File "/opt/conda/lib/python3.7/site-packages/lightgbm/sklearn.py", line 800, in fit
callbacks=callbacks)
File "/opt/conda/lib/python3.7/site-packages/lightgbm/sklearn.py", line 595, in fit
callbacks=callbacks)
File "/opt/conda/lib/python3.7/site-packages/lightgbm/engine.py", line 228, in train
booster = Booster(params=params, train_set=train_set)
File "/opt/conda/lib/python3.7/site-packages/lightgbm/basic.py", line 1659, in __init__
num_machines=params.get("num_machines", num_machines))
File "/opt/conda/lib/python3.7/site-packages/lightgbm/basic.py", line 1790, in set_network
ctypes.c_int(num_machines)))
File "/opt/conda/lib/python3.7/site-packages/lightgbm/basic.py", line 47, in _safe_call
raise LightGBMError(decode_string(_LIB.LGBM_GetLastError()))
lightgbm.basic.LightGBMError: Binding port 43458 failed
|
lightgbm.basic.LightGBMError
|
def tile(cls, op: "LGBMTrain"):
ctx = get_context()
if ctx.running_mode != RunningMode.distributed:
assert all(len(inp.chunks) == 1 for inp in op.inputs)
chunk_op = op.copy().reset_key()
out_chunk = chunk_op.new_chunk(
[inp.chunks[0] for inp in op.inputs], shape=(1,), index=(0,)
)
new_op = op.copy()
return new_op.new_tileables(op.inputs, chunks=[out_chunk], nsplits=((1,),))
else:
data = op.data
worker_to_args = defaultdict(dict)
workers = cls._get_data_chunks_workers(ctx, data)
for arg in ["_data", "_label", "_sample_weight", "_init_score"]:
if getattr(op, arg) is not None:
for worker, chunk in cls._concat_chunks_by_worker(
getattr(op, arg).chunks, workers
).items():
worker_to_args[worker][arg] = chunk
if op.eval_datas:
eval_workers_list = [
cls._get_data_chunks_workers(ctx, d) for d in op.eval_datas
]
extra_workers = reduce(
operator.or_, (set(w) for w in eval_workers_list)
) - set(workers)
worker_remap = dict(zip(extra_workers, itertools.cycle(workers)))
if worker_remap:
eval_workers_list = [
[worker_remap.get(w, w) for w in wl] for wl in eval_workers_list
]
for arg in [
"_eval_datas",
"_eval_labels",
"_eval_sample_weights",
"_eval_init_scores",
]:
if getattr(op, arg):
for tileable, eval_workers in zip(
getattr(op, arg), eval_workers_list
):
for worker, chunk in cls._concat_chunks_by_worker(
tileable.chunks, eval_workers
).items():
if arg not in worker_to_args[worker]:
worker_to_args[worker][arg] = []
worker_to_args[worker][arg].append(chunk)
out_chunks = []
workers = list(set(workers))
for worker_id, worker in enumerate(workers):
chunk_op = op.copy().reset_key()
chunk_op._expect_worker = worker
input_chunks = []
concat_args = worker_to_args.get(worker, {})
for arg in [
"_data",
"_label",
"_sample_weight",
"_init_score",
"_eval_datas",
"_eval_labels",
"_eval_sample_weights",
"_eval_init_scores",
]:
arg_val = getattr(op, arg)
if arg_val:
arg_chunk = concat_args.get(arg)
setattr(chunk_op, arg, arg_chunk)
if isinstance(arg_chunk, list):
input_chunks.extend(arg_chunk)
else:
input_chunks.append(arg_chunk)
worker_ports_chunk = (
collect_ports(workers, op.data)._inplace_tile().chunks[0]
)
input_chunks.append(worker_ports_chunk)
chunk_op._workers = workers
chunk_op._worker_ports = worker_ports_chunk
chunk_op._worker_id = worker_id
data_chunk = concat_args["_data"]
out_chunk = chunk_op.new_chunk(
input_chunks, shape=(np.nan,), index=data_chunk.index[:1]
)
out_chunks.append(out_chunk)
new_op = op.copy()
return new_op.new_tileables(
op.inputs, chunks=out_chunks, nsplits=((np.nan for _ in out_chunks),)
)
|
def tile(cls, op: "LGBMTrain"):
ctx = get_context()
if ctx.running_mode != RunningMode.distributed:
assert all(len(inp.chunks) == 1 for inp in op.inputs)
chunk_op = op.copy().reset_key()
out_chunk = chunk_op.new_chunk(
[inp.chunks[0] for inp in op.inputs], shape=(1,), index=(0,)
)
new_op = op.copy()
return new_op.new_tileables(op.inputs, chunks=[out_chunk], nsplits=((1,),))
else:
data = op.data
worker_to_args = defaultdict(dict)
workers = cls._get_data_chunks_workers(ctx, data)
worker_to_endpoint = cls._build_lgbm_endpoints(workers, op.lgbm_port)
worker_endpoints = list(worker_to_endpoint.values())
for arg in ["_data", "_label", "_sample_weight", "_init_score"]:
if getattr(op, arg) is not None:
for worker, chunk in cls._concat_chunks_by_worker(
getattr(op, arg).chunks, workers
).items():
worker_to_args[worker][arg] = chunk
if op.eval_datas:
eval_workers_list = [
cls._get_data_chunks_workers(ctx, d) for d in op.eval_datas
]
extra_workers = reduce(
operator.or_, (set(w) for w in eval_workers_list)
) - set(workers)
worker_remap = dict(zip(extra_workers, itertools.cycle(workers)))
if worker_remap:
eval_workers_list = [
[worker_remap.get(w, w) for w in wl] for wl in eval_workers_list
]
for arg in [
"_eval_datas",
"_eval_labels",
"_eval_sample_weights",
"_eval_init_scores",
]:
if getattr(op, arg):
for tileable, eval_workers in zip(
getattr(op, arg), eval_workers_list
):
for worker, chunk in cls._concat_chunks_by_worker(
tileable.chunks, eval_workers
).items():
if arg not in worker_to_args[worker]:
worker_to_args[worker][arg] = []
worker_to_args[worker][arg].append(chunk)
out_chunks = []
for worker in workers:
chunk_op = op.copy().reset_key()
chunk_op._expect_worker = worker
chunk_op._lgbm_endpoints = worker_endpoints
chunk_op._lgbm_port = int(worker_to_endpoint[worker].rsplit(":", 1)[-1])
input_chunks = []
concat_args = worker_to_args.get(worker, {})
for arg in [
"_data",
"_label",
"_sample_weight",
"_init_score",
"_eval_datas",
"_eval_labels",
"_eval_sample_weights",
"_eval_init_scores",
]:
arg_val = getattr(op, arg)
if arg_val:
arg_chunk = concat_args.get(arg)
setattr(chunk_op, arg, arg_chunk)
if isinstance(arg_chunk, list):
input_chunks.extend(arg_chunk)
else:
input_chunks.append(arg_chunk)
data_chunk = concat_args["_data"]
out_chunk = chunk_op.new_chunk(
input_chunks, shape=(np.nan,), index=data_chunk.index[:1]
)
out_chunks.append(out_chunk)
new_op = op.copy()
return new_op.new_tileables(
op.inputs, chunks=out_chunks, nsplits=((np.nan for _ in out_chunks),)
)
|
https://github.com/mars-project/mars/issues/1917
|
Traceback (most recent call last):
File "/home/admin/work/_public-mars-0.6.2.zip/mars/promise.py", line 100, in _wrapped
result = func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/worker/calc.py", line 301, in <lambda>
.then(lambda context_dict: _start_calc(context_dict)) \
File "/home/admin/work/_public-mars-0.6.2.zip/mars/worker/calc.py", line 276, in _start_calc
return self._calc_results(session_id, graph_key, graph, context_dict, chunk_targets)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/utils.py", line 377, in _wrapped
return func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/worker/calc.py", line 200, in _calc_results
chunk_targets, retval=False).result()
File "src/gevent/event.py", line 383, in gevent._gevent_cevent.AsyncResult.result
File "src/gevent/event.py", line 305, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 335, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 323, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 303, in gevent._gevent_cevent.AsyncResult._raise_exception
File "/opt/conda/lib/python3.7/site-packages/gevent/_compat.py", line 65, in reraise
raise value.with_traceback(tb)
File "/opt/conda/lib/python3.7/site-packages/gevent/threadpool.py", line 142, in __run_task
thread_result.set(func(*args, **kwargs))
File "mars/actors/pool/gevent_pool.pyx", line 127, in mars.actors.pool.gevent_pool.GeventThreadPool._wrap_watch.inner
result = fn(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/executor.py", line 698, in execute_graph
res = graph_execution.execute(retval)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/executor.py", line 579, in execute
future.result()
File "/opt/conda/lib/python3.7/concurrent/futures/_base.py", line 435, in result
return self.__get_result()
File "/opt/conda/lib/python3.7/concurrent/futures/_base.py", line 384, in __get_result
raise self._exception
File "/opt/conda/lib/python3.7/concurrent/futures/thread.py", line 57, in run
result = self.fn(*self.args, **self.kwargs)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/executor.py", line 446, in _execute_operand
self.handle_op(first_op, results, self._mock)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/executor.py", line 378, in handle_op
return Executor.handle(*args, **kw)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/executor.py", line 649, in handle
return runner(results, op)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/learn/contrib/lightgbm/_train.py", line 304, in execute
eval_init_score=eval_init_score, **op.kwds)
File "/opt/conda/lib/python3.7/site-packages/lightgbm/sklearn.py", line 800, in fit
callbacks=callbacks)
File "/opt/conda/lib/python3.7/site-packages/lightgbm/sklearn.py", line 595, in fit
callbacks=callbacks)
File "/opt/conda/lib/python3.7/site-packages/lightgbm/engine.py", line 228, in train
booster = Booster(params=params, train_set=train_set)
File "/opt/conda/lib/python3.7/site-packages/lightgbm/basic.py", line 1659, in __init__
num_machines=params.get("num_machines", num_machines))
File "/opt/conda/lib/python3.7/site-packages/lightgbm/basic.py", line 1790, in set_network
ctypes.c_int(num_machines)))
File "/opt/conda/lib/python3.7/site-packages/lightgbm/basic.py", line 47, in _safe_call
raise LightGBMError(decode_string(_LIB.LGBM_GetLastError()))
lightgbm.basic.LightGBMError: Binding port 43458 failed
|
lightgbm.basic.LightGBMError
|
def execute(cls, ctx, op: "LGBMTrain"):
if op.merge:
return super().execute(ctx, op)
from lightgbm.basic import _safe_call, _LIB
data_val = ctx[op.data.key]
data_val = data_val.spmatrix if hasattr(data_val, "spmatrix") else data_val
label_val = ctx[op.label.key]
sample_weight_val = (
ctx[op.sample_weight.key] if op.sample_weight is not None else None
)
init_score_val = ctx[op.init_score.key] if op.init_score is not None else None
if op.eval_datas is None:
eval_set, eval_sample_weight, eval_init_score = None, None, None
else:
eval_set, eval_sample_weight, eval_init_score = [], [], []
for data, label in zip(op.eval_datas, op.eval_labels):
data_eval = ctx[data.key]
data_eval = (
data_eval.spmatrix if hasattr(data_eval, "spmatrix") else data_eval
)
eval_set.append((data_eval, ctx[label.key]))
for weight in op.eval_sample_weights:
eval_sample_weight.append(ctx[weight.key] if weight is not None else None)
for score in op.eval_init_scores:
eval_init_score.append(ctx[score.key] if score is not None else None)
eval_set = eval_set or None
eval_sample_weight = eval_sample_weight or None
eval_init_score = eval_init_score or None
params = op.params.copy()
# if model is trained, remove unsupported parameters
params.pop("out_dtype_", None)
if ctx.running_mode == RunningMode.distributed:
worker_ports = ctx[op.worker_ports.key]
worker_ips = [worker.split(":", 1)[0] for worker in op.workers]
worker_endpoints = [
f"{worker}:{port}" for worker, port in zip(worker_ips, worker_ports)
]
params["machines"] = ",".join(worker_endpoints)
params["time_out"] = op.timeout
params["num_machines"] = len(worker_endpoints)
params["local_listen_port"] = worker_ports[op.worker_id]
if (op.tree_learner or "").lower() not in {"data", "feature", "voting"}:
logger.warning(
"Parameter tree_learner not set or set to incorrect value "
f'{op.tree_learner}, using "data" as default'
)
params["tree_learner"] = "data"
else:
params["tree_learner"] = op.tree_learner
try:
model_cls = get_model_cls_from_type(op.model_type)
model = model_cls(**params)
model.fit(
data_val,
label_val,
sample_weight=sample_weight_val,
init_score=init_score_val,
eval_set=eval_set,
eval_sample_weight=eval_sample_weight,
eval_init_score=eval_init_score,
**op.kwds,
)
if (
op.model_type == LGBMModelType.RANKER
or op.model_type == LGBMModelType.REGRESSOR
):
model.set_params(out_dtype_=np.dtype("float"))
elif hasattr(label_val, "dtype"):
model.set_params(out_dtype_=label_val.dtype)
else:
model.set_params(out_dtype_=label_val.dtypes[0])
ctx[op.outputs[0].key] = pickle.dumps(model)
finally:
_safe_call(_LIB.LGBM_NetworkFree())
|
def execute(cls, ctx, op: "LGBMTrain"):
if op.merge:
return super().execute(ctx, op)
from lightgbm.basic import _safe_call, _LIB
data_val = ctx[op.data.key]
data_val = data_val.spmatrix if hasattr(data_val, "spmatrix") else data_val
label_val = ctx[op.label.key]
sample_weight_val = (
ctx[op.sample_weight.key] if op.sample_weight is not None else None
)
init_score_val = ctx[op.init_score.key] if op.init_score is not None else None
if op.eval_datas is None:
eval_set, eval_sample_weight, eval_init_score = None, None, None
else:
eval_set, eval_sample_weight, eval_init_score = [], [], []
for data, label in zip(op.eval_datas, op.eval_labels):
data_eval = ctx[data.key]
data_eval = (
data_eval.spmatrix if hasattr(data_eval, "spmatrix") else data_eval
)
eval_set.append((data_eval, ctx[label.key]))
for weight in op.eval_sample_weights:
eval_sample_weight.append(ctx[weight.key] if weight is not None else None)
for score in op.eval_init_scores:
eval_init_score.append(ctx[score.key] if score is not None else None)
eval_set = eval_set or None
eval_sample_weight = eval_sample_weight or None
eval_init_score = eval_init_score or None
params = op.params.copy()
# if model is trained, remove unsupported parameters
params.pop("out_dtype_", None)
if ctx.running_mode == RunningMode.distributed:
params["machines"] = ",".join(op.lgbm_endpoints)
params["time_out"] = op.timeout
params["num_machines"] = len(op.lgbm_endpoints)
params["local_listen_port"] = op.lgbm_port
if (op.tree_learner or "").lower() not in {"data", "feature", "voting"}:
logger.warning(
"Parameter tree_learner not set or set to incorrect value "
f'{op.tree_learner}, using "data" as default'
)
params["tree_learner"] = "data"
else:
params["tree_learner"] = op.tree_learner
try:
model_cls = get_model_cls_from_type(op.model_type)
model = model_cls(**params)
model.fit(
data_val,
label_val,
sample_weight=sample_weight_val,
init_score=init_score_val,
eval_set=eval_set,
eval_sample_weight=eval_sample_weight,
eval_init_score=eval_init_score,
**op.kwds,
)
if (
op.model_type == LGBMModelType.RANKER
or op.model_type == LGBMModelType.REGRESSOR
):
model.set_params(out_dtype_=np.dtype("float"))
elif hasattr(label_val, "dtype"):
model.set_params(out_dtype_=label_val.dtype)
else:
model.set_params(out_dtype_=label_val.dtypes[0])
ctx[op.outputs[0].key] = pickle.dumps(model)
finally:
_safe_call(_LIB.LGBM_NetworkFree())
|
https://github.com/mars-project/mars/issues/1917
|
Traceback (most recent call last):
File "/home/admin/work/_public-mars-0.6.2.zip/mars/promise.py", line 100, in _wrapped
result = func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/worker/calc.py", line 301, in <lambda>
.then(lambda context_dict: _start_calc(context_dict)) \
File "/home/admin/work/_public-mars-0.6.2.zip/mars/worker/calc.py", line 276, in _start_calc
return self._calc_results(session_id, graph_key, graph, context_dict, chunk_targets)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/utils.py", line 377, in _wrapped
return func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/worker/calc.py", line 200, in _calc_results
chunk_targets, retval=False).result()
File "src/gevent/event.py", line 383, in gevent._gevent_cevent.AsyncResult.result
File "src/gevent/event.py", line 305, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 335, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 323, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 303, in gevent._gevent_cevent.AsyncResult._raise_exception
File "/opt/conda/lib/python3.7/site-packages/gevent/_compat.py", line 65, in reraise
raise value.with_traceback(tb)
File "/opt/conda/lib/python3.7/site-packages/gevent/threadpool.py", line 142, in __run_task
thread_result.set(func(*args, **kwargs))
File "mars/actors/pool/gevent_pool.pyx", line 127, in mars.actors.pool.gevent_pool.GeventThreadPool._wrap_watch.inner
result = fn(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/executor.py", line 698, in execute_graph
res = graph_execution.execute(retval)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/executor.py", line 579, in execute
future.result()
File "/opt/conda/lib/python3.7/concurrent/futures/_base.py", line 435, in result
return self.__get_result()
File "/opt/conda/lib/python3.7/concurrent/futures/_base.py", line 384, in __get_result
raise self._exception
File "/opt/conda/lib/python3.7/concurrent/futures/thread.py", line 57, in run
result = self.fn(*self.args, **self.kwargs)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/executor.py", line 446, in _execute_operand
self.handle_op(first_op, results, self._mock)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/executor.py", line 378, in handle_op
return Executor.handle(*args, **kw)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/executor.py", line 649, in handle
return runner(results, op)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/learn/contrib/lightgbm/_train.py", line 304, in execute
eval_init_score=eval_init_score, **op.kwds)
File "/opt/conda/lib/python3.7/site-packages/lightgbm/sklearn.py", line 800, in fit
callbacks=callbacks)
File "/opt/conda/lib/python3.7/site-packages/lightgbm/sklearn.py", line 595, in fit
callbacks=callbacks)
File "/opt/conda/lib/python3.7/site-packages/lightgbm/engine.py", line 228, in train
booster = Booster(params=params, train_set=train_set)
File "/opt/conda/lib/python3.7/site-packages/lightgbm/basic.py", line 1659, in __init__
num_machines=params.get("num_machines", num_machines))
File "/opt/conda/lib/python3.7/site-packages/lightgbm/basic.py", line 1790, in set_network
ctypes.c_int(num_machines)))
File "/opt/conda/lib/python3.7/site-packages/lightgbm/basic.py", line 47, in _safe_call
raise LightGBMError(decode_string(_LIB.LGBM_GetLastError()))
lightgbm.basic.LightGBMError: Binding port 43458 failed
|
lightgbm.basic.LightGBMError
|
def get_next_port(typ=None, occupy=True):
import psutil
try:
conns = psutil.net_connections()
typ = typ or socket.SOCK_STREAM
occupied = set(
sc.laddr.port
for sc in conns
if sc.type == typ and LOW_PORT_BOUND <= sc.laddr.port <= HIGH_PORT_BOUND
)
except psutil.AccessDenied:
occupied = _get_ports_from_netstat()
occupied.update(_local_occupied_ports)
randn = struct.unpack("<Q", os.urandom(8))[0]
idx = int(randn % (1 + HIGH_PORT_BOUND - LOW_PORT_BOUND - len(occupied)))
for i in range(LOW_PORT_BOUND, HIGH_PORT_BOUND + 1):
if i in occupied:
continue
if idx == 0:
if occupy:
_local_occupied_ports.add(i)
return i
idx -= 1
raise SystemError("No ports available.")
|
def get_next_port(typ=None):
import psutil
try:
conns = psutil.net_connections()
typ = typ or socket.SOCK_STREAM
occupied = set(
sc.laddr.port
for sc in conns
if sc.type == typ and LOW_PORT_BOUND <= sc.laddr.port <= HIGH_PORT_BOUND
)
except psutil.AccessDenied:
occupied = _get_ports_from_netstat()
occupied.update(_local_occupied_ports)
randn = struct.unpack("<Q", os.urandom(8))[0]
idx = int(randn % (1 + HIGH_PORT_BOUND - LOW_PORT_BOUND - len(occupied)))
for i in range(LOW_PORT_BOUND, HIGH_PORT_BOUND + 1):
if i in occupied:
continue
if idx == 0:
_local_occupied_ports.add(i)
return i
idx -= 1
raise SystemError("No ports available.")
|
https://github.com/mars-project/mars/issues/1917
|
Traceback (most recent call last):
File "/home/admin/work/_public-mars-0.6.2.zip/mars/promise.py", line 100, in _wrapped
result = func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/worker/calc.py", line 301, in <lambda>
.then(lambda context_dict: _start_calc(context_dict)) \
File "/home/admin/work/_public-mars-0.6.2.zip/mars/worker/calc.py", line 276, in _start_calc
return self._calc_results(session_id, graph_key, graph, context_dict, chunk_targets)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/utils.py", line 377, in _wrapped
return func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/worker/calc.py", line 200, in _calc_results
chunk_targets, retval=False).result()
File "src/gevent/event.py", line 383, in gevent._gevent_cevent.AsyncResult.result
File "src/gevent/event.py", line 305, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 335, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 323, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 303, in gevent._gevent_cevent.AsyncResult._raise_exception
File "/opt/conda/lib/python3.7/site-packages/gevent/_compat.py", line 65, in reraise
raise value.with_traceback(tb)
File "/opt/conda/lib/python3.7/site-packages/gevent/threadpool.py", line 142, in __run_task
thread_result.set(func(*args, **kwargs))
File "mars/actors/pool/gevent_pool.pyx", line 127, in mars.actors.pool.gevent_pool.GeventThreadPool._wrap_watch.inner
result = fn(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/executor.py", line 698, in execute_graph
res = graph_execution.execute(retval)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/executor.py", line 579, in execute
future.result()
File "/opt/conda/lib/python3.7/concurrent/futures/_base.py", line 435, in result
return self.__get_result()
File "/opt/conda/lib/python3.7/concurrent/futures/_base.py", line 384, in __get_result
raise self._exception
File "/opt/conda/lib/python3.7/concurrent/futures/thread.py", line 57, in run
result = self.fn(*self.args, **self.kwargs)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/executor.py", line 446, in _execute_operand
self.handle_op(first_op, results, self._mock)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/executor.py", line 378, in handle_op
return Executor.handle(*args, **kw)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/executor.py", line 649, in handle
return runner(results, op)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/learn/contrib/lightgbm/_train.py", line 304, in execute
eval_init_score=eval_init_score, **op.kwds)
File "/opt/conda/lib/python3.7/site-packages/lightgbm/sklearn.py", line 800, in fit
callbacks=callbacks)
File "/opt/conda/lib/python3.7/site-packages/lightgbm/sklearn.py", line 595, in fit
callbacks=callbacks)
File "/opt/conda/lib/python3.7/site-packages/lightgbm/engine.py", line 228, in train
booster = Booster(params=params, train_set=train_set)
File "/opt/conda/lib/python3.7/site-packages/lightgbm/basic.py", line 1659, in __init__
num_machines=params.get("num_machines", num_machines))
File "/opt/conda/lib/python3.7/site-packages/lightgbm/basic.py", line 1790, in set_network
ctypes.c_int(num_machines)))
File "/opt/conda/lib/python3.7/site-packages/lightgbm/basic.py", line 47, in _safe_call
raise LightGBMError(decode_string(_LIB.LGBM_GetLastError()))
lightgbm.basic.LightGBMError: Binding port 43458 failed
|
lightgbm.basic.LightGBMError
|
def rechunk(
a, chunk_size, threshold=None, chunk_size_limit=None, reassign_worker=False
):
if not any(pd.isna(s) for s in a.shape) and not a.is_coarse():
# do client check only when no unknown shape,
# real nsplits will be recalculated inside `tile`
chunk_size = _get_chunk_size(a, chunk_size)
if chunk_size == a.nsplits:
return a
op = DataFrameRechunk(
chunk_size=chunk_size,
threshold=threshold,
chunk_size_limit=chunk_size_limit,
reassign_worker=reassign_worker,
)
return op(a)
|
def rechunk(
a, chunk_size, threshold=None, chunk_size_limit=None, reassign_worker=False
):
if not any(pd.isna(s) for s in a.shape):
# do client check only when no unknown shape,
# real nsplits will be recalculated inside `tile`
chunk_size = _get_chunk_size(a, chunk_size)
if chunk_size == a.nsplits:
return a
op = DataFrameRechunk(
chunk_size=chunk_size,
threshold=threshold,
chunk_size_limit=chunk_size_limit,
reassign_worker=reassign_worker,
)
return op(a)
|
https://github.com/mars-project/mars/issues/1908
|
Traceback (most recent call last):
File "/usr/local/lib/python3.6/dist-packages/mars/scheduler/graph.py", line 411, in _execute_graph
self.prepare_graph(compose=compose)
File "/usr/local/lib/python3.6/dist-packages/mars/utils.py", line 383, in _wrapped
return func(*args, **kwargs)
File "/usr/local/lib/python3.6/dist-packages/mars/utils.py", line 457, in _inner
return func(*args, **kwargs)
File "/usr/local/lib/python3.6/dist-packages/mars/scheduler/graph.py", line 666, in prepare_graph
self._target_tileable_datas + fetch_tileables, tileable_graph)
File "/usr/local/lib/python3.6/dist-packages/mars/utils.py", line 457, in _inner
return func(*args, **kwargs)
File "/usr/local/lib/python3.6/dist-packages/mars/tiles.py", line 348, in build
tileables, tileable_graph=tileable_graph)
File "/usr/local/lib/python3.6/dist-packages/mars/utils.py", line 457, in _inner
return func(*args, **kwargs)
File "/usr/local/lib/python3.6/dist-packages/mars/tiles.py", line 262, in build
self._on_tile_failure(tileable_data.op, exc_info)
File "/usr/local/lib/python3.6/dist-packages/mars/tiles.py", line 301, in inner
raise exc_info[1].with_traceback(exc_info[2]) from None
File "/usr/local/lib/python3.6/dist-packages/mars/tiles.py", line 242, in build
tiled = self._tile(tileable_data, tileable_graph)
File "/usr/local/lib/python3.6/dist-packages/mars/tiles.py", line 337, in _tile
return super()._tile(tileable_data, tileable_graph)
File "/usr/local/lib/python3.6/dist-packages/mars/tiles.py", line 203, in _tile
tds = on_tile(tileable_data.op.outputs, tds)
File "/usr/local/lib/python3.6/dist-packages/mars/scheduler/graph.py", line 648, in on_tile
return self.context.wraps(handler.dispatch)(first.op)
File "/usr/local/lib/python3.6/dist-packages/mars/context.py", line 72, in h
return func(*args, **kwargs)
File "/usr/local/lib/python3.6/dist-packages/mars/utils.py", line 457, in _inner
return func(*args, **kwargs)
File "/usr/local/lib/python3.6/dist-packages/mars/tiles.py", line 119, in dispatch
tiled = op_cls.tile(op)
File "/usr/local/lib/python3.6/dist-packages/mars/dataframe/window/rolling/aggregation.py", line 308, in tile
inp = cls._check_can_be_tiled(op, is_window_int)
File "/usr/local/lib/python3.6/dist-packages/mars/dataframe/window/rolling/aggregation.py", line 167, in _check_can_be_tiled
inp = inp.rechunk({1: inp.shape[1]})._inplace_tile()
File "/usr/local/lib/python3.6/dist-packages/mars/core.py", line 168, in _inplace_tile
return handler.inplace_tile(self)
File "/usr/local/lib/python3.6/dist-packages/mars/tiles.py", line 136, in inplace_tile
dispatched = self.dispatch(to_tile.op)
File "/usr/local/lib/python3.6/dist-packages/mars/utils.py", line 457, in _inner
return func(*args, **kwargs)
File "/usr/local/lib/python3.6/dist-packages/mars/tiles.py", line 119, in dispatch
tiled = op_cls.tile(op)
File "/usr/local/lib/python3.6/dist-packages/mars/dataframe/base/rechunk.py", line 82, in tile
out = compute_rechunk(out.inputs[0], c)
File "/usr/local/lib/python3.6/dist-packages/mars/dataframe/base/rechunk.py", line 176, in compute_rechunk
index_value, columns_value, dtypes = _concat_dataframe_meta(to_merge)
File "/usr/local/lib/python3.6/dist-packages/mars/dataframe/base/rechunk.py", line 112, in _concat_dataframe_meta
columns_value = merge_index_value(idx_to_columns_value, store_data=True)
File "/usr/local/lib/python3.6/dist-packages/mars/dataframe/utils.py", line 730, in merge_index_value
if min_val is None or min_val > chunk_index_value.min_val:
ValueError: invalid literal for int() with base 10: 'minus'
|
ValueError
|
def merge_index_value(to_merge_index_values, store_data=False):
"""
Merge index value according to their chunk index.
:param to_merge_index_values: Dict object. {index: index_value}
:return: Merged index_value
"""
index_value = None
min_val, min_val_close, max_val, max_val_close = None, None, None, None
for _, chunk_index_value in sorted(to_merge_index_values.items()):
if index_value is None:
index_value = chunk_index_value.to_pandas()
min_val, min_val_close, max_val, max_val_close = (
chunk_index_value.min_val,
chunk_index_value.min_val_close,
chunk_index_value.max_val,
chunk_index_value.max_val_close,
)
else:
index_value = index_value.append(chunk_index_value.to_pandas())
if chunk_index_value.min_val is not None:
try:
if min_val is None or min_val > chunk_index_value.min_val:
min_val = chunk_index_value.min_val
min_val_close = chunk_index_value.min_val_close
except TypeError:
# min_value has different types that cannot compare
# just stop compare
continue
if chunk_index_value.max_val is not None:
if max_val is None or max_val < chunk_index_value.max_val:
max_val = chunk_index_value.max_val
max_val_close = chunk_index_value.max_val_close
new_index_value = parse_index(index_value, store_data=store_data)
if not new_index_value.has_value():
new_index_value._index_value._min_val = min_val
new_index_value._index_value._min_val_close = min_val_close
new_index_value._index_value._max_val = max_val
new_index_value._index_value._max_val_close = max_val_close
return new_index_value
|
def merge_index_value(to_merge_index_values, store_data=False):
"""
Merge index value according to their chunk index.
:param to_merge_index_values: Dict object. {index: index_value}
:return: Merged index_value
"""
index_value = None
min_val, min_val_close, max_val, max_val_close = None, None, None, None
for _, chunk_index_value in sorted(to_merge_index_values.items()):
if index_value is None:
index_value = chunk_index_value.to_pandas()
min_val, min_val_close, max_val, max_val_close = (
chunk_index_value.min_val,
chunk_index_value.min_val_close,
chunk_index_value.max_val,
chunk_index_value.max_val_close,
)
else:
index_value = index_value.append(chunk_index_value.to_pandas())
if chunk_index_value.min_val is not None:
if min_val is None or min_val > chunk_index_value.min_val:
min_val = chunk_index_value.min_val
min_val_close = chunk_index_value.min_val_close
if chunk_index_value.max_val is not None:
if max_val is None or max_val < chunk_index_value.max_val:
max_val = chunk_index_value.max_val
max_val_close = chunk_index_value.max_val_close
new_index_value = parse_index(index_value, store_data=store_data)
if not new_index_value.has_value():
new_index_value._index_value._min_val = min_val
new_index_value._index_value._min_val_close = min_val_close
new_index_value._index_value._max_val = max_val
new_index_value._index_value._max_val_close = max_val_close
return new_index_value
|
https://github.com/mars-project/mars/issues/1908
|
Traceback (most recent call last):
File "/usr/local/lib/python3.6/dist-packages/mars/scheduler/graph.py", line 411, in _execute_graph
self.prepare_graph(compose=compose)
File "/usr/local/lib/python3.6/dist-packages/mars/utils.py", line 383, in _wrapped
return func(*args, **kwargs)
File "/usr/local/lib/python3.6/dist-packages/mars/utils.py", line 457, in _inner
return func(*args, **kwargs)
File "/usr/local/lib/python3.6/dist-packages/mars/scheduler/graph.py", line 666, in prepare_graph
self._target_tileable_datas + fetch_tileables, tileable_graph)
File "/usr/local/lib/python3.6/dist-packages/mars/utils.py", line 457, in _inner
return func(*args, **kwargs)
File "/usr/local/lib/python3.6/dist-packages/mars/tiles.py", line 348, in build
tileables, tileable_graph=tileable_graph)
File "/usr/local/lib/python3.6/dist-packages/mars/utils.py", line 457, in _inner
return func(*args, **kwargs)
File "/usr/local/lib/python3.6/dist-packages/mars/tiles.py", line 262, in build
self._on_tile_failure(tileable_data.op, exc_info)
File "/usr/local/lib/python3.6/dist-packages/mars/tiles.py", line 301, in inner
raise exc_info[1].with_traceback(exc_info[2]) from None
File "/usr/local/lib/python3.6/dist-packages/mars/tiles.py", line 242, in build
tiled = self._tile(tileable_data, tileable_graph)
File "/usr/local/lib/python3.6/dist-packages/mars/tiles.py", line 337, in _tile
return super()._tile(tileable_data, tileable_graph)
File "/usr/local/lib/python3.6/dist-packages/mars/tiles.py", line 203, in _tile
tds = on_tile(tileable_data.op.outputs, tds)
File "/usr/local/lib/python3.6/dist-packages/mars/scheduler/graph.py", line 648, in on_tile
return self.context.wraps(handler.dispatch)(first.op)
File "/usr/local/lib/python3.6/dist-packages/mars/context.py", line 72, in h
return func(*args, **kwargs)
File "/usr/local/lib/python3.6/dist-packages/mars/utils.py", line 457, in _inner
return func(*args, **kwargs)
File "/usr/local/lib/python3.6/dist-packages/mars/tiles.py", line 119, in dispatch
tiled = op_cls.tile(op)
File "/usr/local/lib/python3.6/dist-packages/mars/dataframe/window/rolling/aggregation.py", line 308, in tile
inp = cls._check_can_be_tiled(op, is_window_int)
File "/usr/local/lib/python3.6/dist-packages/mars/dataframe/window/rolling/aggregation.py", line 167, in _check_can_be_tiled
inp = inp.rechunk({1: inp.shape[1]})._inplace_tile()
File "/usr/local/lib/python3.6/dist-packages/mars/core.py", line 168, in _inplace_tile
return handler.inplace_tile(self)
File "/usr/local/lib/python3.6/dist-packages/mars/tiles.py", line 136, in inplace_tile
dispatched = self.dispatch(to_tile.op)
File "/usr/local/lib/python3.6/dist-packages/mars/utils.py", line 457, in _inner
return func(*args, **kwargs)
File "/usr/local/lib/python3.6/dist-packages/mars/tiles.py", line 119, in dispatch
tiled = op_cls.tile(op)
File "/usr/local/lib/python3.6/dist-packages/mars/dataframe/base/rechunk.py", line 82, in tile
out = compute_rechunk(out.inputs[0], c)
File "/usr/local/lib/python3.6/dist-packages/mars/dataframe/base/rechunk.py", line 176, in compute_rechunk
index_value, columns_value, dtypes = _concat_dataframe_meta(to_merge)
File "/usr/local/lib/python3.6/dist-packages/mars/dataframe/base/rechunk.py", line 112, in _concat_dataframe_meta
columns_value = merge_index_value(idx_to_columns_value, store_data=True)
File "/usr/local/lib/python3.6/dist-packages/mars/dataframe/utils.py", line 730, in merge_index_value
if min_val is None or min_val > chunk_index_value.min_val:
ValueError: invalid literal for int() with base 10: 'minus'
|
ValueError
|
def tile(cls, op):
check_chunks_unknown_shape(op.inputs, TilesError)
tensor = astensor(op.input)
chunk_size = get_nsplits(tensor, op.chunk_size, tensor.dtype.itemsize)
if chunk_size == tensor.nsplits:
return [tensor]
new_chunk_size = chunk_size
steps = plan_rechunks(
op.inputs[0],
new_chunk_size,
op.inputs[0].dtype.itemsize,
threshold=op.threshold,
chunk_size_limit=op.chunk_size_limit,
)
tensor = op.outputs[0]
for c in steps:
tensor = compute_rechunk(tensor.inputs[0], c)
if op.reassign_worker:
for c in tensor.chunks:
c.op._reassign_worker = True
return [tensor]
|
def tile(cls, op):
check_chunks_unknown_shape(op.inputs, TilesError)
tensor = astensor(op.input)
chunk_size = get_nsplits(tensor, op.chunk_size, tensor.dtype.itemsize)
if chunk_size == tensor.nsplits:
return [tensor]
new_chunk_size = op.chunk_size
steps = plan_rechunks(
op.inputs[0],
new_chunk_size,
op.inputs[0].dtype.itemsize,
threshold=op.threshold,
chunk_size_limit=op.chunk_size_limit,
)
tensor = op.outputs[0]
for c in steps:
tensor = compute_rechunk(tensor.inputs[0], c)
if op.reassign_worker:
for c in tensor.chunks:
c.op._reassign_worker = True
return [tensor]
|
https://github.com/mars-project/mars/issues/1908
|
Traceback (most recent call last):
File "/usr/local/lib/python3.6/dist-packages/mars/scheduler/graph.py", line 411, in _execute_graph
self.prepare_graph(compose=compose)
File "/usr/local/lib/python3.6/dist-packages/mars/utils.py", line 383, in _wrapped
return func(*args, **kwargs)
File "/usr/local/lib/python3.6/dist-packages/mars/utils.py", line 457, in _inner
return func(*args, **kwargs)
File "/usr/local/lib/python3.6/dist-packages/mars/scheduler/graph.py", line 666, in prepare_graph
self._target_tileable_datas + fetch_tileables, tileable_graph)
File "/usr/local/lib/python3.6/dist-packages/mars/utils.py", line 457, in _inner
return func(*args, **kwargs)
File "/usr/local/lib/python3.6/dist-packages/mars/tiles.py", line 348, in build
tileables, tileable_graph=tileable_graph)
File "/usr/local/lib/python3.6/dist-packages/mars/utils.py", line 457, in _inner
return func(*args, **kwargs)
File "/usr/local/lib/python3.6/dist-packages/mars/tiles.py", line 262, in build
self._on_tile_failure(tileable_data.op, exc_info)
File "/usr/local/lib/python3.6/dist-packages/mars/tiles.py", line 301, in inner
raise exc_info[1].with_traceback(exc_info[2]) from None
File "/usr/local/lib/python3.6/dist-packages/mars/tiles.py", line 242, in build
tiled = self._tile(tileable_data, tileable_graph)
File "/usr/local/lib/python3.6/dist-packages/mars/tiles.py", line 337, in _tile
return super()._tile(tileable_data, tileable_graph)
File "/usr/local/lib/python3.6/dist-packages/mars/tiles.py", line 203, in _tile
tds = on_tile(tileable_data.op.outputs, tds)
File "/usr/local/lib/python3.6/dist-packages/mars/scheduler/graph.py", line 648, in on_tile
return self.context.wraps(handler.dispatch)(first.op)
File "/usr/local/lib/python3.6/dist-packages/mars/context.py", line 72, in h
return func(*args, **kwargs)
File "/usr/local/lib/python3.6/dist-packages/mars/utils.py", line 457, in _inner
return func(*args, **kwargs)
File "/usr/local/lib/python3.6/dist-packages/mars/tiles.py", line 119, in dispatch
tiled = op_cls.tile(op)
File "/usr/local/lib/python3.6/dist-packages/mars/dataframe/window/rolling/aggregation.py", line 308, in tile
inp = cls._check_can_be_tiled(op, is_window_int)
File "/usr/local/lib/python3.6/dist-packages/mars/dataframe/window/rolling/aggregation.py", line 167, in _check_can_be_tiled
inp = inp.rechunk({1: inp.shape[1]})._inplace_tile()
File "/usr/local/lib/python3.6/dist-packages/mars/core.py", line 168, in _inplace_tile
return handler.inplace_tile(self)
File "/usr/local/lib/python3.6/dist-packages/mars/tiles.py", line 136, in inplace_tile
dispatched = self.dispatch(to_tile.op)
File "/usr/local/lib/python3.6/dist-packages/mars/utils.py", line 457, in _inner
return func(*args, **kwargs)
File "/usr/local/lib/python3.6/dist-packages/mars/tiles.py", line 119, in dispatch
tiled = op_cls.tile(op)
File "/usr/local/lib/python3.6/dist-packages/mars/dataframe/base/rechunk.py", line 82, in tile
out = compute_rechunk(out.inputs[0], c)
File "/usr/local/lib/python3.6/dist-packages/mars/dataframe/base/rechunk.py", line 176, in compute_rechunk
index_value, columns_value, dtypes = _concat_dataframe_meta(to_merge)
File "/usr/local/lib/python3.6/dist-packages/mars/dataframe/base/rechunk.py", line 112, in _concat_dataframe_meta
columns_value = merge_index_value(idx_to_columns_value, store_data=True)
File "/usr/local/lib/python3.6/dist-packages/mars/dataframe/utils.py", line 730, in merge_index_value
if min_val is None or min_val > chunk_index_value.min_val:
ValueError: invalid literal for int() with base 10: 'minus'
|
ValueError
|
def rechunk(
tensor, chunk_size, threshold=None, chunk_size_limit=None, reassign_worker=False
):
if not any(np.isnan(s) for s in tensor.shape) and not tensor.is_coarse():
# do client check only when tensor has no unknown shape,
# otherwise, recalculate chunk_size in `tile`
chunk_size = get_nsplits(tensor, chunk_size, tensor.dtype.itemsize)
if chunk_size == tensor.nsplits:
return tensor
op = TensorRechunk(
chunk_size,
threshold,
chunk_size_limit,
reassign_worker=reassign_worker,
dtype=tensor.dtype,
sparse=tensor.issparse(),
)
return op(tensor)
|
def rechunk(
tensor, chunk_size, threshold=None, chunk_size_limit=None, reassign_worker=False
):
if not any(np.isnan(s) for s in tensor.shape):
# do client check only when tensor has no unknown shape,
# otherwise, recalculate chunk_size in `tile`
chunk_size = get_nsplits(tensor, chunk_size, tensor.dtype.itemsize)
if chunk_size == tensor.nsplits:
return tensor
op = TensorRechunk(
chunk_size,
threshold,
chunk_size_limit,
reassign_worker=reassign_worker,
dtype=tensor.dtype,
sparse=tensor.issparse(),
)
return op(tensor)
|
https://github.com/mars-project/mars/issues/1908
|
Traceback (most recent call last):
File "/usr/local/lib/python3.6/dist-packages/mars/scheduler/graph.py", line 411, in _execute_graph
self.prepare_graph(compose=compose)
File "/usr/local/lib/python3.6/dist-packages/mars/utils.py", line 383, in _wrapped
return func(*args, **kwargs)
File "/usr/local/lib/python3.6/dist-packages/mars/utils.py", line 457, in _inner
return func(*args, **kwargs)
File "/usr/local/lib/python3.6/dist-packages/mars/scheduler/graph.py", line 666, in prepare_graph
self._target_tileable_datas + fetch_tileables, tileable_graph)
File "/usr/local/lib/python3.6/dist-packages/mars/utils.py", line 457, in _inner
return func(*args, **kwargs)
File "/usr/local/lib/python3.6/dist-packages/mars/tiles.py", line 348, in build
tileables, tileable_graph=tileable_graph)
File "/usr/local/lib/python3.6/dist-packages/mars/utils.py", line 457, in _inner
return func(*args, **kwargs)
File "/usr/local/lib/python3.6/dist-packages/mars/tiles.py", line 262, in build
self._on_tile_failure(tileable_data.op, exc_info)
File "/usr/local/lib/python3.6/dist-packages/mars/tiles.py", line 301, in inner
raise exc_info[1].with_traceback(exc_info[2]) from None
File "/usr/local/lib/python3.6/dist-packages/mars/tiles.py", line 242, in build
tiled = self._tile(tileable_data, tileable_graph)
File "/usr/local/lib/python3.6/dist-packages/mars/tiles.py", line 337, in _tile
return super()._tile(tileable_data, tileable_graph)
File "/usr/local/lib/python3.6/dist-packages/mars/tiles.py", line 203, in _tile
tds = on_tile(tileable_data.op.outputs, tds)
File "/usr/local/lib/python3.6/dist-packages/mars/scheduler/graph.py", line 648, in on_tile
return self.context.wraps(handler.dispatch)(first.op)
File "/usr/local/lib/python3.6/dist-packages/mars/context.py", line 72, in h
return func(*args, **kwargs)
File "/usr/local/lib/python3.6/dist-packages/mars/utils.py", line 457, in _inner
return func(*args, **kwargs)
File "/usr/local/lib/python3.6/dist-packages/mars/tiles.py", line 119, in dispatch
tiled = op_cls.tile(op)
File "/usr/local/lib/python3.6/dist-packages/mars/dataframe/window/rolling/aggregation.py", line 308, in tile
inp = cls._check_can_be_tiled(op, is_window_int)
File "/usr/local/lib/python3.6/dist-packages/mars/dataframe/window/rolling/aggregation.py", line 167, in _check_can_be_tiled
inp = inp.rechunk({1: inp.shape[1]})._inplace_tile()
File "/usr/local/lib/python3.6/dist-packages/mars/core.py", line 168, in _inplace_tile
return handler.inplace_tile(self)
File "/usr/local/lib/python3.6/dist-packages/mars/tiles.py", line 136, in inplace_tile
dispatched = self.dispatch(to_tile.op)
File "/usr/local/lib/python3.6/dist-packages/mars/utils.py", line 457, in _inner
return func(*args, **kwargs)
File "/usr/local/lib/python3.6/dist-packages/mars/tiles.py", line 119, in dispatch
tiled = op_cls.tile(op)
File "/usr/local/lib/python3.6/dist-packages/mars/dataframe/base/rechunk.py", line 82, in tile
out = compute_rechunk(out.inputs[0], c)
File "/usr/local/lib/python3.6/dist-packages/mars/dataframe/base/rechunk.py", line 176, in compute_rechunk
index_value, columns_value, dtypes = _concat_dataframe_meta(to_merge)
File "/usr/local/lib/python3.6/dist-packages/mars/dataframe/base/rechunk.py", line 112, in _concat_dataframe_meta
columns_value = merge_index_value(idx_to_columns_value, store_data=True)
File "/usr/local/lib/python3.6/dist-packages/mars/dataframe/utils.py", line 730, in merge_index_value
if min_val is None or min_val > chunk_index_value.min_val:
ValueError: invalid literal for int() with base 10: 'minus'
|
ValueError
|
def _set_inputs(self, inputs):
super()._set_inputs(inputs)
if len(self._inputs) == 2:
self._lhs = self._inputs[0]
self._rhs = self._inputs[1]
else:
if isinstance(self._lhs, (Base, Entity)):
self._lhs = self._inputs[0]
elif pd.api.types.is_scalar(self._lhs):
self._rhs = self._inputs[0]
|
def _set_inputs(self, inputs):
super()._set_inputs(inputs)
if len(self._inputs) == 2:
self._lhs = self._inputs[0]
self._rhs = self._inputs[1]
else:
if isinstance(self._lhs, (DATAFRAME_TYPE, SERIES_TYPE)):
self._lhs = self._inputs[0]
elif pd.api.types.is_scalar(self._lhs):
self._rhs = self._inputs[0]
|
https://github.com/mars-project/mars/issues/1918
|
Attempt 1: Unexpected error KeyError occurred in executing operand bc8dccd428eb0f6261420866f7206b73 in 0.0.0.0:23252
Traceback (most recent call last):
File "/Users/wenjun.swj/Code/mars/mars/promise.py", line 378, in _wrapped
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 383, in _wrapped
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/worker/execution.py", line 501, in execute_graph
graph_record = self._graph_records[(session_id, graph_key)] = GraphExecutionRecord(
File "/Users/wenjun.swj/Code/mars/mars/worker/execution.py", line 57, in __init__
graph = self.graph = deserialize_graph(graph_serialized)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 317, in deserialize_graph
return graph_cls.from_json(json_obj)
File "mars/graph.pyx", line 440, in mars.graph.DirectedGraph.from_json
return cls.deserialize(SerializableGraph.from_json(json_obj))
File "mars/serialize/core.pyx", line 718, in mars.serialize.core.Serializable.from_json
return cls.deserialize(provider, obj)
File "mars/serialize/core.pyx", line 689, in mars.serialize.core.Serializable.deserialize
[cb(key_to_instance) for cb in callbacks]
File "mars/serialize/jsonserializer.pyx", line 787, in mars.serialize.jsonserializer.JsonSerializeProvider.deserialize_field.cb
o = subs[val.key, val.id]
KeyError: ('b6888c78a929d77f42d7f3953fc813d9', '140581826655232')
|
KeyError
|
def _get_grouped(cls, op: "DataFrameGroupByAgg", df, ctx, copy=False, grouper=None):
if copy:
df = df.copy()
params = op.groupby_params.copy()
params.pop("as_index", None)
selection = params.pop("selection", None)
if grouper is not None:
params["by"] = grouper
params.pop("level", None)
elif isinstance(params.get("by"), list):
new_by = []
for v in params["by"]:
if isinstance(v, Base):
new_by.append(ctx[v.key])
else:
new_by.append(v)
params["by"] = new_by
if op.stage == OperandStage.agg:
grouped = df.groupby(**params)
else:
# for the intermediate phases, do not sort
params["sort"] = False
grouped = df.groupby(**params)
if selection is not None:
grouped = grouped[selection]
return grouped
|
def _get_grouped(cls, op: "DataFrameGroupByAgg", df, ctx, copy=False, grouper=None):
if copy:
df = df.copy()
params = op.groupby_params.copy()
params.pop("as_index", None)
selection = params.pop("selection", None)
if grouper is not None:
params["by"] = grouper
params.pop("level", None)
elif isinstance(params.get("by"), list):
new_by = []
for v in params["by"]:
if isinstance(v, Base):
new_by.append(ctx[v.key])
else:
new_by.append(v)
params["by"] = new_by
if op.stage == OperandStage.agg:
grouped = df.groupby(**params)
else:
# for the intermediate phases, do not sort
params["sort"] = False
grouped = df.groupby(**params)
if selection:
grouped = grouped[selection]
return grouped
|
https://github.com/mars-project/mars/issues/1918
|
Attempt 1: Unexpected error KeyError occurred in executing operand bc8dccd428eb0f6261420866f7206b73 in 0.0.0.0:23252
Traceback (most recent call last):
File "/Users/wenjun.swj/Code/mars/mars/promise.py", line 378, in _wrapped
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 383, in _wrapped
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/worker/execution.py", line 501, in execute_graph
graph_record = self._graph_records[(session_id, graph_key)] = GraphExecutionRecord(
File "/Users/wenjun.swj/Code/mars/mars/worker/execution.py", line 57, in __init__
graph = self.graph = deserialize_graph(graph_serialized)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 317, in deserialize_graph
return graph_cls.from_json(json_obj)
File "mars/graph.pyx", line 440, in mars.graph.DirectedGraph.from_json
return cls.deserialize(SerializableGraph.from_json(json_obj))
File "mars/serialize/core.pyx", line 718, in mars.serialize.core.Serializable.from_json
return cls.deserialize(provider, obj)
File "mars/serialize/core.pyx", line 689, in mars.serialize.core.Serializable.deserialize
[cb(key_to_instance) for cb in callbacks]
File "mars/serialize/jsonserializer.pyx", line 787, in mars.serialize.jsonserializer.JsonSerializeProvider.deserialize_field.cb
o = subs[val.key, val.id]
KeyError: ('b6888c78a929d77f42d7f3953fc813d9', '140581826655232')
|
KeyError
|
def _execute_agg(cls, ctx, op: "DataFrameGroupByAgg"):
xdf = cudf if op.gpu else pd
out_chunk = op.outputs[0]
col_value = (
out_chunk.columns_value.to_pandas()
if hasattr(out_chunk, "columns_value")
else None
)
in_data_tuple = ctx[op.inputs[0].key]
in_data_list = []
for in_data in in_data_tuple:
if (
isinstance(in_data, xdf.Series)
and op.output_types[0] == OutputType.dataframe
):
in_data = cls._series_to_df(in_data, op.gpu)
in_data_list.append(in_data)
in_data_tuple = tuple(in_data_list)
in_data_dict = cls._pack_inputs(op.agg_funcs, in_data_tuple)
for (
_input_key,
_map_func_name,
agg_func_name,
custom_reduction,
output_key,
_output_limit,
kwds,
) in op.agg_funcs:
if agg_func_name == "custom_reduction":
input_obj = tuple(
cls._get_grouped(op, o, ctx) for o in in_data_dict[output_key]
)
in_data_dict[output_key] = cls._do_custom_agg(
op, custom_reduction, *input_obj
)[0]
else:
input_obj = cls._get_grouped(op, in_data_dict[output_key], ctx)
in_data_dict[output_key] = cls._do_predefined_agg(
input_obj, agg_func_name, **kwds
)
aggs = []
for input_keys, _output_key, func_name, cols, func in op.post_funcs:
if cols is None:
func_inputs = [in_data_dict[k] for k in input_keys]
else:
func_inputs = [in_data_dict[k][cols] for k in input_keys]
if (
func_inputs[0].ndim == 2
and len(set(inp.shape[1] for inp in func_inputs)) > 1
):
common_cols = func_inputs[0].columns
for inp in func_inputs[1:]:
common_cols = common_cols.join(inp.columns, how="inner")
func_inputs = [inp[common_cols] for inp in func_inputs]
agg_df = func(*func_inputs, gpu=op.is_gpu())
if isinstance(agg_df, np.ndarray):
agg_df = xdf.DataFrame(agg_df, index=func_inputs[0].index)
new_cols = None
if out_chunk.ndim == 2 and col_value is not None:
if col_value.nlevels > agg_df.columns.nlevels:
new_cols = xdf.MultiIndex.from_product([agg_df.columns, [func_name]])
elif agg_df.shape[-1] == 1 and func_name in col_value:
new_cols = xdf.Index([func_name])
aggs.append((agg_df, new_cols))
for agg_df, new_cols in aggs:
if new_cols is not None:
agg_df.columns = new_cols
aggs = [item[0] for item in aggs]
if out_chunk.ndim == 2:
result = xdf.concat(aggs, axis=1)
if (
not op.groupby_params.get("as_index", True)
and col_value.nlevels == result.columns.nlevels
):
result.reset_index(inplace=True, drop=result.index.name in result.columns)
result = result.reindex(col_value, axis=1)
if result.ndim == 2 and len(result) == 0:
result = result.astype(out_chunk.dtypes)
else:
result = xdf.concat(aggs)
if result.ndim == 2:
result = result.iloc[:, 0]
result.name = out_chunk.name
ctx[out_chunk.key] = result
|
def _execute_agg(cls, ctx, op: "DataFrameGroupByAgg"):
xdf = cudf if op.gpu else pd
out = op.outputs[0]
col_value = out.columns_value.to_pandas() if hasattr(out, "columns_value") else None
in_data_tuple = ctx[op.inputs[0].key]
in_data_list = []
for in_data in in_data_tuple:
if (
isinstance(in_data, xdf.Series)
and op.output_types[0] == OutputType.dataframe
):
in_data = cls._series_to_df(in_data, op.gpu)
in_data_list.append(in_data)
in_data_tuple = tuple(in_data_list)
in_data_dict = cls._pack_inputs(op.agg_funcs, in_data_tuple)
for (
_input_key,
_map_func_name,
agg_func_name,
custom_reduction,
output_key,
_output_limit,
kwds,
) in op.agg_funcs:
if agg_func_name == "custom_reduction":
input_obj = tuple(
cls._get_grouped(op, o, ctx) for o in in_data_dict[output_key]
)
in_data_dict[output_key] = cls._do_custom_agg(
op, custom_reduction, *input_obj
)[0]
else:
input_obj = cls._get_grouped(op, in_data_dict[output_key], ctx)
in_data_dict[output_key] = cls._do_predefined_agg(
input_obj, agg_func_name, **kwds
)
aggs = []
for input_keys, _output_key, func_name, cols, func in op.post_funcs:
if cols is None:
func_inputs = [in_data_dict[k] for k in input_keys]
else:
func_inputs = [in_data_dict[k][cols] for k in input_keys]
if (
func_inputs[0].ndim == 2
and len(set(inp.shape[1] for inp in func_inputs)) > 1
):
common_cols = func_inputs[0].columns
for inp in func_inputs[1:]:
common_cols = common_cols.join(inp.columns, how="inner")
func_inputs = [inp[common_cols] for inp in func_inputs]
agg_df = func(*func_inputs, gpu=op.is_gpu())
if isinstance(agg_df, np.ndarray):
agg_df = xdf.DataFrame(agg_df, index=func_inputs[0].index)
new_cols = None
if out.ndim == 2 and col_value is not None:
if col_value.nlevels > agg_df.columns.nlevels:
new_cols = xdf.MultiIndex.from_product([agg_df.columns, [func_name]])
elif agg_df.shape[-1] == 1 and func_name in col_value:
new_cols = xdf.Index([func_name])
aggs.append((agg_df, new_cols))
for agg_df, new_cols in aggs:
if new_cols is not None:
agg_df.columns = new_cols
aggs = [item[0] for item in aggs]
if out.ndim == 2:
result = xdf.concat(aggs, axis=1)
if (
not op.groupby_params.get("as_index", True)
and col_value.nlevels == result.columns.nlevels
):
result.reset_index(inplace=True, drop=result.index.name in result.columns)
result = result.reindex(col_value, axis=1)
else:
result = xdf.concat(aggs)
if result.ndim == 2:
result = result.iloc[:, 0]
result.name = out.name
ctx[op.outputs[0].key] = result
|
https://github.com/mars-project/mars/issues/1918
|
Attempt 1: Unexpected error KeyError occurred in executing operand bc8dccd428eb0f6261420866f7206b73 in 0.0.0.0:23252
Traceback (most recent call last):
File "/Users/wenjun.swj/Code/mars/mars/promise.py", line 378, in _wrapped
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 383, in _wrapped
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/worker/execution.py", line 501, in execute_graph
graph_record = self._graph_records[(session_id, graph_key)] = GraphExecutionRecord(
File "/Users/wenjun.swj/Code/mars/mars/worker/execution.py", line 57, in __init__
graph = self.graph = deserialize_graph(graph_serialized)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 317, in deserialize_graph
return graph_cls.from_json(json_obj)
File "mars/graph.pyx", line 440, in mars.graph.DirectedGraph.from_json
return cls.deserialize(SerializableGraph.from_json(json_obj))
File "mars/serialize/core.pyx", line 718, in mars.serialize.core.Serializable.from_json
return cls.deserialize(provider, obj)
File "mars/serialize/core.pyx", line 689, in mars.serialize.core.Serializable.deserialize
[cb(key_to_instance) for cb in callbacks]
File "mars/serialize/jsonserializer.pyx", line 787, in mars.serialize.jsonserializer.JsonSerializeProvider.deserialize_field.cb
o = subs[val.key, val.id]
KeyError: ('b6888c78a929d77f42d7f3953fc813d9', '140581826655232')
|
KeyError
|
def tile(cls, op):
in_groupby = op.inputs[0]
out_df = op.outputs[0]
chunks = []
for c in in_groupby.chunks:
new_op = op.copy().reset_key()
new_index = parse_index(pd.RangeIndex(-1), c.key)
if op.output_types[0] == OutputType.dataframe:
chunks.append(
new_op.new_chunk(
[c],
index=c.index,
shape=(np.nan, len(out_df.dtypes)),
dtypes=out_df.dtypes,
columns_value=out_df.columns_value,
index_value=new_index,
)
)
else:
chunks.append(
new_op.new_chunk(
[c],
index=(c.index[0],),
shape=(np.nan,),
dtype=out_df.dtype,
index_value=new_index,
name=out_df.name,
)
)
new_op = op.copy().reset_key()
kw = out_df.params.copy()
kw["chunks"] = chunks
if op.output_types[0] == OutputType.dataframe:
kw["nsplits"] = ((np.nan,) * len(chunks), (len(out_df.dtypes),))
else:
kw["nsplits"] = ((np.nan,) * len(chunks),)
return new_op.new_tileables([in_groupby], **kw)
|
def tile(cls, op):
in_groupby = op.inputs[0]
out_df = op.outputs[0]
chunks = []
for c in in_groupby.chunks:
new_op = op.copy().reset_key()
new_index = parse_index(pd.RangeIndex(-1), c.key)
if op.output_types[0] == OutputType.dataframe:
chunks.append(
new_op.new_chunk(
[c],
index=c.index,
shape=(np.nan, len(out_df.dtypes)),
dtypes=out_df.dtypes,
columns_value=out_df.columns_value,
index_value=new_index,
)
)
else:
chunks.append(
new_op.new_chunk(
[c],
index=(c.index[0],),
shape=(np.nan,),
dtype=out_df.dtype,
index_value=new_index,
)
)
new_op = op.copy().reset_key()
kw = out_df.params.copy()
kw["chunks"] = chunks
if op.output_types[0] == OutputType.dataframe:
kw["nsplits"] = ((np.nan,) * len(chunks), (len(out_df.dtypes),))
else:
kw["nsplits"] = ((np.nan,) * len(chunks),)
return new_op.new_tileables([in_groupby], **kw)
|
https://github.com/mars-project/mars/issues/1918
|
Attempt 1: Unexpected error KeyError occurred in executing operand bc8dccd428eb0f6261420866f7206b73 in 0.0.0.0:23252
Traceback (most recent call last):
File "/Users/wenjun.swj/Code/mars/mars/promise.py", line 378, in _wrapped
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 383, in _wrapped
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/worker/execution.py", line 501, in execute_graph
graph_record = self._graph_records[(session_id, graph_key)] = GraphExecutionRecord(
File "/Users/wenjun.swj/Code/mars/mars/worker/execution.py", line 57, in __init__
graph = self.graph = deserialize_graph(graph_serialized)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 317, in deserialize_graph
return graph_cls.from_json(json_obj)
File "mars/graph.pyx", line 440, in mars.graph.DirectedGraph.from_json
return cls.deserialize(SerializableGraph.from_json(json_obj))
File "mars/serialize/core.pyx", line 718, in mars.serialize.core.Serializable.from_json
return cls.deserialize(provider, obj)
File "mars/serialize/core.pyx", line 689, in mars.serialize.core.Serializable.deserialize
[cb(key_to_instance) for cb in callbacks]
File "mars/serialize/jsonserializer.pyx", line 787, in mars.serialize.jsonserializer.JsonSerializeProvider.deserialize_field.cb
o = subs[val.key, val.id]
KeyError: ('b6888c78a929d77f42d7f3953fc813d9', '140581826655232')
|
KeyError
|
def execute(cls, ctx, op: "GroupByCumReductionOperand"):
in_data = ctx[op.inputs[0].key]
out_chunk = op.outputs[0]
if not in_data or in_data.empty:
ctx[out_chunk.key] = (
build_empty_df(out_chunk.dtypes)
if op.output_types[0] == OutputType.dataframe
else build_empty_series(out_chunk.dtype, name=out_chunk.name)
)
return
func_name = getattr(op, "_func_name")
if func_name == "cumcount":
ctx[out_chunk.key] = in_data.cumcount(ascending=op.ascending)
else:
result = getattr(in_data, func_name)(axis=op.axis)
if result.ndim == 2:
ctx[out_chunk.key] = result.astype(out_chunk.dtypes, copy=False)
else:
ctx[out_chunk.key] = result.astype(out_chunk.dtype, copy=False)
|
def execute(cls, ctx, op: "GroupByCumReductionOperand"):
in_data = ctx[op.inputs[0].key]
out_df = op.outputs[0]
if not in_data or in_data.empty:
ctx[out_df.key] = (
build_empty_df(out_df.dtypes)
if op.output_types[0] == OutputType.dataframe
else build_empty_series(out_df.dtype)
)
return
func_name = getattr(op, "_func_name")
if func_name == "cumcount":
ctx[out_df.key] = in_data.cumcount(ascending=op.ascending)
else:
result = getattr(in_data, func_name)(axis=op.axis)
if result.ndim == 2:
ctx[out_df.key] = result.astype(out_df.dtypes, copy=False)
else:
ctx[out_df.key] = result.astype(out_df.dtype, copy=False)
|
https://github.com/mars-project/mars/issues/1918
|
Attempt 1: Unexpected error KeyError occurred in executing operand bc8dccd428eb0f6261420866f7206b73 in 0.0.0.0:23252
Traceback (most recent call last):
File "/Users/wenjun.swj/Code/mars/mars/promise.py", line 378, in _wrapped
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 383, in _wrapped
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/worker/execution.py", line 501, in execute_graph
graph_record = self._graph_records[(session_id, graph_key)] = GraphExecutionRecord(
File "/Users/wenjun.swj/Code/mars/mars/worker/execution.py", line 57, in __init__
graph = self.graph = deserialize_graph(graph_serialized)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 317, in deserialize_graph
return graph_cls.from_json(json_obj)
File "mars/graph.pyx", line 440, in mars.graph.DirectedGraph.from_json
return cls.deserialize(SerializableGraph.from_json(json_obj))
File "mars/serialize/core.pyx", line 718, in mars.serialize.core.Serializable.from_json
return cls.deserialize(provider, obj)
File "mars/serialize/core.pyx", line 689, in mars.serialize.core.Serializable.deserialize
[cb(key_to_instance) for cb in callbacks]
File "mars/serialize/jsonserializer.pyx", line 787, in mars.serialize.jsonserializer.JsonSerializeProvider.deserialize_field.cb
o = subs[val.key, val.id]
KeyError: ('b6888c78a929d77f42d7f3953fc813d9', '140581826655232')
|
KeyError
|
def execute(cls, ctx, op):
in_data = ctx[op.inputs[0].key]
out_chunk = op.outputs[0]
if not in_data:
if op.output_types[0] == OutputType.dataframe:
ctx[op.outputs[0].key] = build_empty_df(out_chunk.dtypes)
else:
ctx[op.outputs[0].key] = build_empty_series(out_chunk.dtype)
return
if op.call_agg:
result = in_data.agg(op.func, *op.args, **op.kwds)
elif in_data.shape[0] > 0:
# cannot perform groupby-transform over empty dataframe
result = in_data.transform(op.func, *op.args, **op.kwds)
else:
if out_chunk.ndim == 2:
result = pd.DataFrame(columns=out_chunk.dtypes.index)
else:
result = pd.Series([], name=out_chunk.name, dtype=out_chunk.dtype)
if result.ndim == 2:
result = result.astype(out_chunk.dtypes, copy=False)
else:
result = result.astype(out_chunk.dtype, copy=False)
ctx[op.outputs[0].key] = result
|
def execute(cls, ctx, op):
in_data = ctx[op.inputs[0].key]
out_chunk = op.outputs[0]
if not in_data:
if op.output_types[0] == OutputType.dataframe:
ctx[op.outputs[0].key] = build_empty_df(out_chunk.dtypes)
else:
ctx[op.outputs[0].key] = build_empty_series(out_chunk.dtype)
return
if op.call_agg:
result = in_data.agg(op.func, *op.args, **op.kwds)
else:
result = in_data.transform(op.func, *op.args, **op.kwds)
if result.ndim == 2:
result = result.astype(op.outputs[0].dtypes, copy=False)
else:
result = result.astype(op.outputs[0].dtype, copy=False)
ctx[op.outputs[0].key] = result
|
https://github.com/mars-project/mars/issues/1918
|
Attempt 1: Unexpected error KeyError occurred in executing operand bc8dccd428eb0f6261420866f7206b73 in 0.0.0.0:23252
Traceback (most recent call last):
File "/Users/wenjun.swj/Code/mars/mars/promise.py", line 378, in _wrapped
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 383, in _wrapped
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/worker/execution.py", line 501, in execute_graph
graph_record = self._graph_records[(session_id, graph_key)] = GraphExecutionRecord(
File "/Users/wenjun.swj/Code/mars/mars/worker/execution.py", line 57, in __init__
graph = self.graph = deserialize_graph(graph_serialized)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 317, in deserialize_graph
return graph_cls.from_json(json_obj)
File "mars/graph.pyx", line 440, in mars.graph.DirectedGraph.from_json
return cls.deserialize(SerializableGraph.from_json(json_obj))
File "mars/serialize/core.pyx", line 718, in mars.serialize.core.Serializable.from_json
return cls.deserialize(provider, obj)
File "mars/serialize/core.pyx", line 689, in mars.serialize.core.Serializable.deserialize
[cb(key_to_instance) for cb in callbacks]
File "mars/serialize/jsonserializer.pyx", line 787, in mars.serialize.jsonserializer.JsonSerializeProvider.deserialize_field.cb
o = subs[val.key, val.id]
KeyError: ('b6888c78a929d77f42d7f3953fc813d9', '140581826655232')
|
KeyError
|
def _set_inputs(self, inputs):
super()._set_inputs(inputs)
input_iter = iter(inputs)
next(input_iter)
if isinstance(self.to_replace, (SERIES_TYPE, SERIES_CHUNK_TYPE)):
self._to_replace = next(input_iter)
if isinstance(self.value, (SERIES_TYPE, SERIES_CHUNK_TYPE)):
self._value = next(input_iter)
self._fill_chunks = list(input_iter)
|
def _set_inputs(self, inputs):
super()._set_inputs(inputs)
input_iter = iter(inputs)
next(input_iter)
if isinstance(self.to_replace, SERIES_TYPE):
self._to_replace = next(input_iter)
if isinstance(self.value, SERIES_TYPE):
self._value = next(input_iter)
self._fill_chunks = list(input_iter)
|
https://github.com/mars-project/mars/issues/1918
|
Attempt 1: Unexpected error KeyError occurred in executing operand bc8dccd428eb0f6261420866f7206b73 in 0.0.0.0:23252
Traceback (most recent call last):
File "/Users/wenjun.swj/Code/mars/mars/promise.py", line 378, in _wrapped
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 383, in _wrapped
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/worker/execution.py", line 501, in execute_graph
graph_record = self._graph_records[(session_id, graph_key)] = GraphExecutionRecord(
File "/Users/wenjun.swj/Code/mars/mars/worker/execution.py", line 57, in __init__
graph = self.graph = deserialize_graph(graph_serialized)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 317, in deserialize_graph
return graph_cls.from_json(json_obj)
File "mars/graph.pyx", line 440, in mars.graph.DirectedGraph.from_json
return cls.deserialize(SerializableGraph.from_json(json_obj))
File "mars/serialize/core.pyx", line 718, in mars.serialize.core.Serializable.from_json
return cls.deserialize(provider, obj)
File "mars/serialize/core.pyx", line 689, in mars.serialize.core.Serializable.deserialize
[cb(key_to_instance) for cb in callbacks]
File "mars/serialize/jsonserializer.pyx", line 787, in mars.serialize.jsonserializer.JsonSerializeProvider.deserialize_field.cb
o = subs[val.key, val.id]
KeyError: ('b6888c78a929d77f42d7f3953fc813d9', '140581826655232')
|
KeyError
|
def _set_inputs(self, inputs):
super()._set_inputs(inputs)
self._input = self._inputs[0]
if isinstance(self._q, (TENSOR_TYPE, TENSOR_CHUNK_TYPE)):
self._q = self._inputs[-1]
|
def _set_inputs(self, inputs):
super()._set_inputs(inputs)
self._input = self._inputs[0]
if isinstance(self._q, TENSOR_TYPE):
self._q = self._inputs[-1]
|
https://github.com/mars-project/mars/issues/1918
|
Attempt 1: Unexpected error KeyError occurred in executing operand bc8dccd428eb0f6261420866f7206b73 in 0.0.0.0:23252
Traceback (most recent call last):
File "/Users/wenjun.swj/Code/mars/mars/promise.py", line 378, in _wrapped
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 383, in _wrapped
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/worker/execution.py", line 501, in execute_graph
graph_record = self._graph_records[(session_id, graph_key)] = GraphExecutionRecord(
File "/Users/wenjun.swj/Code/mars/mars/worker/execution.py", line 57, in __init__
graph = self.graph = deserialize_graph(graph_serialized)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 317, in deserialize_graph
return graph_cls.from_json(json_obj)
File "mars/graph.pyx", line 440, in mars.graph.DirectedGraph.from_json
return cls.deserialize(SerializableGraph.from_json(json_obj))
File "mars/serialize/core.pyx", line 718, in mars.serialize.core.Serializable.from_json
return cls.deserialize(provider, obj)
File "mars/serialize/core.pyx", line 689, in mars.serialize.core.Serializable.deserialize
[cb(key_to_instance) for cb in callbacks]
File "mars/serialize/jsonserializer.pyx", line 787, in mars.serialize.jsonserializer.JsonSerializeProvider.deserialize_field.cb
o = subs[val.key, val.id]
KeyError: ('b6888c78a929d77f42d7f3953fc813d9', '140581826655232')
|
KeyError
|
def _set_inputs(self, inputs):
super()._set_inputs(inputs)
self._input = self._inputs[0]
if isinstance(self._tree, (OBJECT_TYPE, OBJECT_CHUNK_TYPE)):
self._tree = self._inputs[1]
|
def _set_inputs(self, inputs):
super()._set_inputs(inputs)
self._input = self._inputs[0]
if isinstance(self._tree, OBJECT_TYPE):
self._tree = self._inputs[1]
|
https://github.com/mars-project/mars/issues/1918
|
Attempt 1: Unexpected error KeyError occurred in executing operand bc8dccd428eb0f6261420866f7206b73 in 0.0.0.0:23252
Traceback (most recent call last):
File "/Users/wenjun.swj/Code/mars/mars/promise.py", line 378, in _wrapped
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 383, in _wrapped
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/worker/execution.py", line 501, in execute_graph
graph_record = self._graph_records[(session_id, graph_key)] = GraphExecutionRecord(
File "/Users/wenjun.swj/Code/mars/mars/worker/execution.py", line 57, in __init__
graph = self.graph = deserialize_graph(graph_serialized)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 317, in deserialize_graph
return graph_cls.from_json(json_obj)
File "mars/graph.pyx", line 440, in mars.graph.DirectedGraph.from_json
return cls.deserialize(SerializableGraph.from_json(json_obj))
File "mars/serialize/core.pyx", line 718, in mars.serialize.core.Serializable.from_json
return cls.deserialize(provider, obj)
File "mars/serialize/core.pyx", line 689, in mars.serialize.core.Serializable.deserialize
[cb(key_to_instance) for cb in callbacks]
File "mars/serialize/jsonserializer.pyx", line 787, in mars.serialize.jsonserializer.JsonSerializeProvider.deserialize_field.cb
o = subs[val.key, val.id]
KeyError: ('b6888c78a929d77f42d7f3953fc813d9', '140581826655232')
|
KeyError
|
def _set_inputs(self, inputs):
super()._set_inputs(inputs)
inputs_iter = iter(self._inputs)
self._input = next(inputs_iter)
if isinstance(self._bins, (TENSOR_TYPE, TENSOR_CHUNK_TYPE)):
self._bins = next(inputs_iter)
if self._weights is not None:
self._weights = next(inputs_iter)
if self._input_min is not None:
self._input_min = next(inputs_iter)
if self._input_max is not None:
self._input_max = next(inputs_iter)
|
def _set_inputs(self, inputs):
super()._set_inputs(inputs)
inputs_iter = iter(self._inputs)
self._input = next(inputs_iter)
if isinstance(self._bins, TENSOR_TYPE):
self._bins = next(inputs_iter)
if self._weights is not None:
self._weights = next(inputs_iter)
if self._input_min is not None:
self._input_min = next(inputs_iter)
if self._input_max is not None:
self._input_max = next(inputs_iter)
|
https://github.com/mars-project/mars/issues/1918
|
Attempt 1: Unexpected error KeyError occurred in executing operand bc8dccd428eb0f6261420866f7206b73 in 0.0.0.0:23252
Traceback (most recent call last):
File "/Users/wenjun.swj/Code/mars/mars/promise.py", line 378, in _wrapped
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 383, in _wrapped
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/worker/execution.py", line 501, in execute_graph
graph_record = self._graph_records[(session_id, graph_key)] = GraphExecutionRecord(
File "/Users/wenjun.swj/Code/mars/mars/worker/execution.py", line 57, in __init__
graph = self.graph = deserialize_graph(graph_serialized)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 317, in deserialize_graph
return graph_cls.from_json(json_obj)
File "mars/graph.pyx", line 440, in mars.graph.DirectedGraph.from_json
return cls.deserialize(SerializableGraph.from_json(json_obj))
File "mars/serialize/core.pyx", line 718, in mars.serialize.core.Serializable.from_json
return cls.deserialize(provider, obj)
File "mars/serialize/core.pyx", line 689, in mars.serialize.core.Serializable.deserialize
[cb(key_to_instance) for cb in callbacks]
File "mars/serialize/jsonserializer.pyx", line 787, in mars.serialize.jsonserializer.JsonSerializeProvider.deserialize_field.cb
o = subs[val.key, val.id]
KeyError: ('b6888c78a929d77f42d7f3953fc813d9', '140581826655232')
|
KeyError
|
def _set_inputs(self, inputs):
super()._set_inputs(inputs)
inputs_iter = iter(self._inputs)
self._input = next(inputs_iter)
if isinstance(self._bins, (TENSOR_TYPE, TENSOR_CHUNK_TYPE)):
self._bins = next(inputs_iter)
if self._weights is not None:
self._weights = next(inputs_iter)
|
def _set_inputs(self, inputs):
super()._set_inputs(inputs)
inputs_iter = iter(self._inputs)
self._input = next(inputs_iter)
if isinstance(self._bins, TENSOR_TYPE):
self._bins = next(inputs_iter)
if self._weights is not None:
self._weights = next(inputs_iter)
|
https://github.com/mars-project/mars/issues/1918
|
Attempt 1: Unexpected error KeyError occurred in executing operand bc8dccd428eb0f6261420866f7206b73 in 0.0.0.0:23252
Traceback (most recent call last):
File "/Users/wenjun.swj/Code/mars/mars/promise.py", line 378, in _wrapped
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 383, in _wrapped
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/worker/execution.py", line 501, in execute_graph
graph_record = self._graph_records[(session_id, graph_key)] = GraphExecutionRecord(
File "/Users/wenjun.swj/Code/mars/mars/worker/execution.py", line 57, in __init__
graph = self.graph = deserialize_graph(graph_serialized)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 317, in deserialize_graph
return graph_cls.from_json(json_obj)
File "mars/graph.pyx", line 440, in mars.graph.DirectedGraph.from_json
return cls.deserialize(SerializableGraph.from_json(json_obj))
File "mars/serialize/core.pyx", line 718, in mars.serialize.core.Serializable.from_json
return cls.deserialize(provider, obj)
File "mars/serialize/core.pyx", line 689, in mars.serialize.core.Serializable.deserialize
[cb(key_to_instance) for cb in callbacks]
File "mars/serialize/jsonserializer.pyx", line 787, in mars.serialize.jsonserializer.JsonSerializeProvider.deserialize_field.cb
o = subs[val.key, val.id]
KeyError: ('b6888c78a929d77f42d7f3953fc813d9', '140581826655232')
|
KeyError
|
def deserialize_graph(ser_graph, graph_cls=None):
from google.protobuf.message import DecodeError
from .serialize.protos.graph_pb2 import GraphDef
from .graph import DirectedGraph
graph_cls = graph_cls or DirectedGraph
ser_graph_bin = to_binary(ser_graph)
g = GraphDef()
try:
g.ParseFromString(ser_graph_bin)
return graph_cls.from_pb(g)
except DecodeError:
pass
try:
ser_graph_bin = zlib.decompress(ser_graph_bin)
g.ParseFromString(ser_graph_bin)
return graph_cls.from_pb(g)
except (zlib.error, DecodeError):
pass
json_obj = json.loads(to_str(ser_graph))
return graph_cls.from_json(json_obj)
|
def deserialize_graph(ser_graph, graph_cls=None):
from google.protobuf.message import DecodeError
from .serialize.protos.graph_pb2 import GraphDef
from .graph import DirectedGraph
graph_cls = graph_cls or DirectedGraph
ser_graph_bin = to_binary(ser_graph)
g = GraphDef()
try:
ser_graph = ser_graph
g.ParseFromString(ser_graph_bin)
return graph_cls.from_pb(g)
except DecodeError:
pass
try:
ser_graph_bin = zlib.decompress(ser_graph_bin)
g.ParseFromString(ser_graph_bin)
return graph_cls.from_pb(g)
except (zlib.error, DecodeError):
pass
json_obj = json.loads(to_str(ser_graph))
return graph_cls.from_json(json_obj)
|
https://github.com/mars-project/mars/issues/1918
|
Attempt 1: Unexpected error KeyError occurred in executing operand bc8dccd428eb0f6261420866f7206b73 in 0.0.0.0:23252
Traceback (most recent call last):
File "/Users/wenjun.swj/Code/mars/mars/promise.py", line 378, in _wrapped
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 383, in _wrapped
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/worker/execution.py", line 501, in execute_graph
graph_record = self._graph_records[(session_id, graph_key)] = GraphExecutionRecord(
File "/Users/wenjun.swj/Code/mars/mars/worker/execution.py", line 57, in __init__
graph = self.graph = deserialize_graph(graph_serialized)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 317, in deserialize_graph
return graph_cls.from_json(json_obj)
File "mars/graph.pyx", line 440, in mars.graph.DirectedGraph.from_json
return cls.deserialize(SerializableGraph.from_json(json_obj))
File "mars/serialize/core.pyx", line 718, in mars.serialize.core.Serializable.from_json
return cls.deserialize(provider, obj)
File "mars/serialize/core.pyx", line 689, in mars.serialize.core.Serializable.deserialize
[cb(key_to_instance) for cb in callbacks]
File "mars/serialize/jsonserializer.pyx", line 787, in mars.serialize.jsonserializer.JsonSerializeProvider.deserialize_field.cb
o = subs[val.key, val.id]
KeyError: ('b6888c78a929d77f42d7f3953fc813d9', '140581826655232')
|
KeyError
|
def tile(cls, op):
check_chunks_unknown_shape(op.inputs, TilesError)
a = op.input
a = asdataframe(a) if a.ndim == 2 else asseries(a)
chunk_size = _get_chunk_size(a, op.chunk_size)
if chunk_size == a.nsplits:
return [a]
out = op.outputs[0]
new_chunk_size = chunk_size
if isinstance(out, DATAFRAME_TYPE):
itemsize = max(getattr(dt, "itemsize", 8) for dt in out.dtypes)
else:
itemsize = out.dtype.itemsize
steps = plan_rechunks(
op.inputs[0],
new_chunk_size,
itemsize,
threshold=op.threshold,
chunk_size_limit=op.chunk_size_limit,
)
for c in steps:
out = compute_rechunk(out.inputs[0], c)
if op.reassign_worker:
for c in out.chunks:
c.op._reassign_worker = True
return [out]
|
def tile(cls, op):
check_chunks_unknown_shape(op.inputs, TilesError)
out = op.outputs[0]
new_chunk_size = op.chunk_size
if isinstance(out, DATAFRAME_TYPE):
itemsize = max(getattr(dt, "itemsize", 8) for dt in out.dtypes)
else:
itemsize = out.dtype.itemsize
steps = plan_rechunks(
op.inputs[0],
new_chunk_size,
itemsize,
threshold=op.threshold,
chunk_size_limit=op.chunk_size_limit,
)
for c in steps:
out = compute_rechunk(out.inputs[0], c)
if op.reassign_worker:
for c in out.chunks:
c.op._reassign_worker = True
return [out]
|
https://github.com/mars-project/mars/issues/1910
|
Traceback (most recent call last):
File "/home/admin/work/_public-mars-0.6.2.zip/mars/scheduler/graph.py", line 410, in _execute_graph
self.prepare_graph(compose=compose)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/utils.py", line 377, in _wrapped
return func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/scheduler/graph.py", line 648, in prepare_graph
self._target_tileable_datas + fetch_tileables, tileable_graph)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/tiles.py", line 348, in build
tileables, tileable_graph=tileable_graph)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/tiles.py", line 262, in build
self._on_tile_failure(tileable_data.op, exc_info)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/tiles.py", line 301, in inner
raise exc_info[1].with_traceback(exc_info[2]) from None
File "/home/admin/work/_public-mars-0.6.2.zip/mars/tiles.py", line 242, in build
tiled = self._tile(tileable_data, tileable_graph)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/tiles.py", line 337, in _tile
return super()._tile(tileable_data, tileable_graph)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/tiles.py", line 203, in _tile
tds = on_tile(tileable_data.op.outputs, tds)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/scheduler/graph.py", line 630, in on_tile
return self.context.wraps(handler.dispatch)(first.op)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/context.py", line 72, in h
return func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/tiles.py", line 119, in dispatch
tiled = op_cls.tile(op)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/learn/contrib/xgboost/dmatrix.py", line 255, in tile
return cls._tile_multi_output(op)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/learn/contrib/xgboost/dmatrix.py", line 139, in _tile_multi_output
label = label.rechunk({0: nsplit})._inplace_tile()
File "/home/admin/work/_public-mars-0.6.2.zip/mars/tensor/rechunk/rechunk.py", line 81, in rechunk
chunk_size = get_nsplits(tensor, chunk_size, tensor.dtype.itemsize)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/tensor/rechunk/core.py", line 38, in get_nsplits
return decide_chunk_sizes(tileable.shape, chunk_size, itemsize)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/tensor/utils.py", line 551, in decide_chunk_sizes
return normalize_chunk_sizes(shape, tuple(chunk_size[j] for j in range(len(shape))))
File "/home/admin/work/_public-mars-0.6.2.zip/mars/tensor/utils.py", line 66, in normalize_chunk_sizes
raise ValueError('chunks shape should be of the same length, '
ValueError: chunks shape should be of the same length, got shape: 1000000, chunks: (nan,)
|
ValueError
|
def rechunk(
a, chunk_size, threshold=None, chunk_size_limit=None, reassign_worker=False
):
if not any(pd.isna(s) for s in a.shape):
# do client check only when no unknown shape,
# real nsplits will be recalculated inside `tile`
chunk_size = _get_chunk_size(a, chunk_size)
if chunk_size == a.nsplits:
return a
op = DataFrameRechunk(
chunk_size=chunk_size,
threshold=threshold,
chunk_size_limit=chunk_size_limit,
reassign_worker=reassign_worker,
)
return op(a)
|
def rechunk(
a, chunk_size, threshold=None, chunk_size_limit=None, reassign_worker=False
):
if isinstance(a, DATAFRAME_TYPE):
itemsize = max(getattr(dt, "itemsize", 8) for dt in a.dtypes)
else:
itemsize = a.dtype.itemsize
chunk_size = get_nsplits(a, chunk_size, itemsize)
if chunk_size == a.nsplits:
return a
op = DataFrameRechunk(
chunk_size, threshold, chunk_size_limit, reassign_worker=reassign_worker
)
return op(a)
|
https://github.com/mars-project/mars/issues/1910
|
Traceback (most recent call last):
File "/home/admin/work/_public-mars-0.6.2.zip/mars/scheduler/graph.py", line 410, in _execute_graph
self.prepare_graph(compose=compose)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/utils.py", line 377, in _wrapped
return func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/scheduler/graph.py", line 648, in prepare_graph
self._target_tileable_datas + fetch_tileables, tileable_graph)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/tiles.py", line 348, in build
tileables, tileable_graph=tileable_graph)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/tiles.py", line 262, in build
self._on_tile_failure(tileable_data.op, exc_info)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/tiles.py", line 301, in inner
raise exc_info[1].with_traceback(exc_info[2]) from None
File "/home/admin/work/_public-mars-0.6.2.zip/mars/tiles.py", line 242, in build
tiled = self._tile(tileable_data, tileable_graph)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/tiles.py", line 337, in _tile
return super()._tile(tileable_data, tileable_graph)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/tiles.py", line 203, in _tile
tds = on_tile(tileable_data.op.outputs, tds)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/scheduler/graph.py", line 630, in on_tile
return self.context.wraps(handler.dispatch)(first.op)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/context.py", line 72, in h
return func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/tiles.py", line 119, in dispatch
tiled = op_cls.tile(op)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/learn/contrib/xgboost/dmatrix.py", line 255, in tile
return cls._tile_multi_output(op)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/learn/contrib/xgboost/dmatrix.py", line 139, in _tile_multi_output
label = label.rechunk({0: nsplit})._inplace_tile()
File "/home/admin/work/_public-mars-0.6.2.zip/mars/tensor/rechunk/rechunk.py", line 81, in rechunk
chunk_size = get_nsplits(tensor, chunk_size, tensor.dtype.itemsize)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/tensor/rechunk/core.py", line 38, in get_nsplits
return decide_chunk_sizes(tileable.shape, chunk_size, itemsize)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/tensor/utils.py", line 551, in decide_chunk_sizes
return normalize_chunk_sizes(shape, tuple(chunk_size[j] for j in range(len(shape))))
File "/home/admin/work/_public-mars-0.6.2.zip/mars/tensor/utils.py", line 66, in normalize_chunk_sizes
raise ValueError('chunks shape should be of the same length, '
ValueError: chunks shape should be of the same length, got shape: 1000000, chunks: (nan,)
|
ValueError
|
def tile(cls, op: "DataFrameToCSV"):
in_df = op.input
out_df = op.outputs[0]
if in_df.ndim == 2 and in_df.chunk_shape[1] > 1:
# make sure only 1 chunk on the column axis
in_df = in_df.rechunk({1: in_df.shape[1]})._inplace_tile()
one_file = op.one_file
out_chunks = [], []
for chunk in in_df.chunks:
chunk_op = op.copy().reset_key()
if not one_file:
index_value = parse_index(chunk.index_value.to_pandas()[:0], chunk)
if chunk.ndim == 2:
out_chunk = chunk_op.new_chunk(
[chunk],
shape=(0, 0),
index_value=index_value,
columns_value=out_df.columns_value,
dtypes=out_df.dtypes,
index=chunk.index,
)
else:
out_chunk = chunk_op.new_chunk(
[chunk],
shape=(0,),
index_value=index_value,
dtype=out_df.dtype,
index=chunk.index,
)
out_chunks[0].append(out_chunk)
else:
chunk_op._output_stat = True
chunk_op._stage = OperandStage.map
# bytes of csv
kws = [
{
"shape": (),
"dtype": np.dtype(np.str_),
"index": chunk.index,
"order": TensorOrder.C_ORDER,
"output_type": OutputType.scalar,
"type": "csv",
},
{
"shape": (),
"dtype": np.dtype(np.intp),
"index": chunk.index,
"order": TensorOrder.C_ORDER,
"output_type": OutputType.scalar,
"type": "stat",
},
]
chunks = chunk_op.new_chunks([chunk], kws=kws, output_limit=len(kws))
out_chunks[0].append(chunks[0])
out_chunks[1].append(chunks[1])
if not one_file:
out_chunks = out_chunks[0]
else:
stat_chunk = DataFrameToCSVStat(
path=op.path, dtype=np.dtype(np.int64), storage_options=op.storage_options
).new_chunk(
out_chunks[1], shape=(len(out_chunks[0]),), order=TensorOrder.C_ORDER
)
new_out_chunks = []
for c in out_chunks[0]:
op = DataFrameToCSV(
stage=OperandStage.agg,
path=op.path,
storage_options=op.storage_options,
output_types=op.output_types,
)
if out_df.ndim == 2:
out_chunk = op.new_chunk(
[c, stat_chunk],
shape=(0, 0),
dtypes=out_df.dtypes,
index_value=out_df.index_value,
columns_value=out_df.columns_value,
index=c.index,
)
else:
out_chunk = op.new_chunk(
[c, stat_chunk],
shape=(0,),
dtype=out_df.dtype,
index_value=out_df.index_value,
index=c.index,
)
new_out_chunks.append(out_chunk)
out_chunks = new_out_chunks
new_op = op.copy()
params = out_df.params.copy()
if out_df.ndim == 2:
params.update(
dict(chunks=out_chunks, nsplits=((0,) * in_df.chunk_shape[0], (0,)))
)
else:
params.update(dict(chunks=out_chunks, nsplits=((0,) * in_df.chunk_shape[0],)))
return new_op.new_tileables([in_df], **params)
|
def tile(cls, op: "DataFrameToCSV"):
in_df = op.input
out_df = op.outputs[0]
if in_df.ndim == 2:
# make sure only 1 chunk on the column axis
in_df = in_df.rechunk({1: in_df.shape[1]})._inplace_tile()
one_file = op.one_file
out_chunks = [], []
for chunk in in_df.chunks:
chunk_op = op.copy().reset_key()
if not one_file:
index_value = parse_index(chunk.index_value.to_pandas()[:0], chunk)
if chunk.ndim == 2:
out_chunk = chunk_op.new_chunk(
[chunk],
shape=(0, 0),
index_value=index_value,
columns_value=out_df.columns_value,
dtypes=out_df.dtypes,
index=chunk.index,
)
else:
out_chunk = chunk_op.new_chunk(
[chunk],
shape=(0,),
index_value=index_value,
dtype=out_df.dtype,
index=chunk.index,
)
out_chunks[0].append(out_chunk)
else:
chunk_op._output_stat = True
chunk_op._stage = OperandStage.map
# bytes of csv
kws = [
{
"shape": (),
"dtype": np.dtype(np.str_),
"index": chunk.index,
"order": TensorOrder.C_ORDER,
"output_type": OutputType.scalar,
"type": "csv",
},
{
"shape": (),
"dtype": np.dtype(np.intp),
"index": chunk.index,
"order": TensorOrder.C_ORDER,
"output_type": OutputType.scalar,
"type": "stat",
},
]
chunks = chunk_op.new_chunks([chunk], kws=kws, output_limit=len(kws))
out_chunks[0].append(chunks[0])
out_chunks[1].append(chunks[1])
if not one_file:
out_chunks = out_chunks[0]
else:
stat_chunk = DataFrameToCSVStat(
path=op.path, dtype=np.dtype(np.int64), storage_options=op.storage_options
).new_chunk(
out_chunks[1], shape=(len(out_chunks[0]),), order=TensorOrder.C_ORDER
)
new_out_chunks = []
for c in out_chunks[0]:
op = DataFrameToCSV(
stage=OperandStage.agg,
path=op.path,
storage_options=op.storage_options,
output_types=op.output_types,
)
if out_df.ndim == 2:
out_chunk = op.new_chunk(
[c, stat_chunk],
shape=(0, 0),
dtypes=out_df.dtypes,
index_value=out_df.index_value,
columns_value=out_df.columns_value,
index=c.index,
)
else:
out_chunk = op.new_chunk(
[c, stat_chunk],
shape=(0,),
dtype=out_df.dtype,
index_value=out_df.index_value,
index=c.index,
)
new_out_chunks.append(out_chunk)
out_chunks = new_out_chunks
new_op = op.copy()
params = out_df.params.copy()
if out_df.ndim == 2:
params.update(
dict(chunks=out_chunks, nsplits=((0,) * in_df.chunk_shape[0], (0,)))
)
else:
params.update(dict(chunks=out_chunks, nsplits=((0,) * in_df.chunk_shape[0],)))
return new_op.new_tileables([in_df], **params)
|
https://github.com/mars-project/mars/issues/1910
|
Traceback (most recent call last):
File "/home/admin/work/_public-mars-0.6.2.zip/mars/scheduler/graph.py", line 410, in _execute_graph
self.prepare_graph(compose=compose)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/utils.py", line 377, in _wrapped
return func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/scheduler/graph.py", line 648, in prepare_graph
self._target_tileable_datas + fetch_tileables, tileable_graph)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/tiles.py", line 348, in build
tileables, tileable_graph=tileable_graph)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/tiles.py", line 262, in build
self._on_tile_failure(tileable_data.op, exc_info)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/tiles.py", line 301, in inner
raise exc_info[1].with_traceback(exc_info[2]) from None
File "/home/admin/work/_public-mars-0.6.2.zip/mars/tiles.py", line 242, in build
tiled = self._tile(tileable_data, tileable_graph)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/tiles.py", line 337, in _tile
return super()._tile(tileable_data, tileable_graph)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/tiles.py", line 203, in _tile
tds = on_tile(tileable_data.op.outputs, tds)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/scheduler/graph.py", line 630, in on_tile
return self.context.wraps(handler.dispatch)(first.op)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/context.py", line 72, in h
return func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/tiles.py", line 119, in dispatch
tiled = op_cls.tile(op)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/learn/contrib/xgboost/dmatrix.py", line 255, in tile
return cls._tile_multi_output(op)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/learn/contrib/xgboost/dmatrix.py", line 139, in _tile_multi_output
label = label.rechunk({0: nsplit})._inplace_tile()
File "/home/admin/work/_public-mars-0.6.2.zip/mars/tensor/rechunk/rechunk.py", line 81, in rechunk
chunk_size = get_nsplits(tensor, chunk_size, tensor.dtype.itemsize)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/tensor/rechunk/core.py", line 38, in get_nsplits
return decide_chunk_sizes(tileable.shape, chunk_size, itemsize)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/tensor/utils.py", line 551, in decide_chunk_sizes
return normalize_chunk_sizes(shape, tuple(chunk_size[j] for j in range(len(shape))))
File "/home/admin/work/_public-mars-0.6.2.zip/mars/tensor/utils.py", line 66, in normalize_chunk_sizes
raise ValueError('chunks shape should be of the same length, '
ValueError: chunks shape should be of the same length, got shape: 1000000, chunks: (nan,)
|
ValueError
|
def tile(cls, op):
check_chunks_unknown_shape(op.inputs, TilesError)
tensor = astensor(op.input)
chunk_size = get_nsplits(tensor, op.chunk_size, tensor.dtype.itemsize)
if chunk_size == tensor.nsplits:
return [tensor]
new_chunk_size = op.chunk_size
steps = plan_rechunks(
op.inputs[0],
new_chunk_size,
op.inputs[0].dtype.itemsize,
threshold=op.threshold,
chunk_size_limit=op.chunk_size_limit,
)
tensor = op.outputs[0]
for c in steps:
tensor = compute_rechunk(tensor.inputs[0], c)
if op.reassign_worker:
for c in tensor.chunks:
c.op._reassign_worker = True
return [tensor]
|
def tile(cls, op):
check_chunks_unknown_shape(op.inputs, TilesError)
new_chunk_size = op.chunk_size
steps = plan_rechunks(
op.inputs[0],
new_chunk_size,
op.inputs[0].dtype.itemsize,
threshold=op.threshold,
chunk_size_limit=op.chunk_size_limit,
)
tensor = op.outputs[0]
for c in steps:
tensor = compute_rechunk(tensor.inputs[0], c)
if op.reassign_worker:
for c in tensor.chunks:
c.op._reassign_worker = True
return [tensor]
|
https://github.com/mars-project/mars/issues/1910
|
Traceback (most recent call last):
File "/home/admin/work/_public-mars-0.6.2.zip/mars/scheduler/graph.py", line 410, in _execute_graph
self.prepare_graph(compose=compose)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/utils.py", line 377, in _wrapped
return func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/scheduler/graph.py", line 648, in prepare_graph
self._target_tileable_datas + fetch_tileables, tileable_graph)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/tiles.py", line 348, in build
tileables, tileable_graph=tileable_graph)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/tiles.py", line 262, in build
self._on_tile_failure(tileable_data.op, exc_info)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/tiles.py", line 301, in inner
raise exc_info[1].with_traceback(exc_info[2]) from None
File "/home/admin/work/_public-mars-0.6.2.zip/mars/tiles.py", line 242, in build
tiled = self._tile(tileable_data, tileable_graph)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/tiles.py", line 337, in _tile
return super()._tile(tileable_data, tileable_graph)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/tiles.py", line 203, in _tile
tds = on_tile(tileable_data.op.outputs, tds)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/scheduler/graph.py", line 630, in on_tile
return self.context.wraps(handler.dispatch)(first.op)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/context.py", line 72, in h
return func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/tiles.py", line 119, in dispatch
tiled = op_cls.tile(op)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/learn/contrib/xgboost/dmatrix.py", line 255, in tile
return cls._tile_multi_output(op)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/learn/contrib/xgboost/dmatrix.py", line 139, in _tile_multi_output
label = label.rechunk({0: nsplit})._inplace_tile()
File "/home/admin/work/_public-mars-0.6.2.zip/mars/tensor/rechunk/rechunk.py", line 81, in rechunk
chunk_size = get_nsplits(tensor, chunk_size, tensor.dtype.itemsize)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/tensor/rechunk/core.py", line 38, in get_nsplits
return decide_chunk_sizes(tileable.shape, chunk_size, itemsize)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/tensor/utils.py", line 551, in decide_chunk_sizes
return normalize_chunk_sizes(shape, tuple(chunk_size[j] for j in range(len(shape))))
File "/home/admin/work/_public-mars-0.6.2.zip/mars/tensor/utils.py", line 66, in normalize_chunk_sizes
raise ValueError('chunks shape should be of the same length, '
ValueError: chunks shape should be of the same length, got shape: 1000000, chunks: (nan,)
|
ValueError
|
def rechunk(
tensor, chunk_size, threshold=None, chunk_size_limit=None, reassign_worker=False
):
if not any(np.isnan(s) for s in tensor.shape):
# do client check only when tensor has no unknown shape,
# otherwise, recalculate chunk_size in `tile`
chunk_size = get_nsplits(tensor, chunk_size, tensor.dtype.itemsize)
if chunk_size == tensor.nsplits:
return tensor
op = TensorRechunk(
chunk_size,
threshold,
chunk_size_limit,
reassign_worker=reassign_worker,
dtype=tensor.dtype,
sparse=tensor.issparse(),
)
return op(tensor)
|
def rechunk(
tensor, chunk_size, threshold=None, chunk_size_limit=None, reassign_worker=False
):
chunk_size = get_nsplits(tensor, chunk_size, tensor.dtype.itemsize)
if chunk_size == tensor.nsplits:
return tensor
op = TensorRechunk(
chunk_size,
threshold,
chunk_size_limit,
reassign_worker=reassign_worker,
dtype=tensor.dtype,
sparse=tensor.issparse(),
)
return op(tensor)
|
https://github.com/mars-project/mars/issues/1910
|
Traceback (most recent call last):
File "/home/admin/work/_public-mars-0.6.2.zip/mars/scheduler/graph.py", line 410, in _execute_graph
self.prepare_graph(compose=compose)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/utils.py", line 377, in _wrapped
return func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/scheduler/graph.py", line 648, in prepare_graph
self._target_tileable_datas + fetch_tileables, tileable_graph)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/tiles.py", line 348, in build
tileables, tileable_graph=tileable_graph)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/tiles.py", line 262, in build
self._on_tile_failure(tileable_data.op, exc_info)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/tiles.py", line 301, in inner
raise exc_info[1].with_traceback(exc_info[2]) from None
File "/home/admin/work/_public-mars-0.6.2.zip/mars/tiles.py", line 242, in build
tiled = self._tile(tileable_data, tileable_graph)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/tiles.py", line 337, in _tile
return super()._tile(tileable_data, tileable_graph)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/tiles.py", line 203, in _tile
tds = on_tile(tileable_data.op.outputs, tds)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/scheduler/graph.py", line 630, in on_tile
return self.context.wraps(handler.dispatch)(first.op)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/context.py", line 72, in h
return func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/tiles.py", line 119, in dispatch
tiled = op_cls.tile(op)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/learn/contrib/xgboost/dmatrix.py", line 255, in tile
return cls._tile_multi_output(op)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/learn/contrib/xgboost/dmatrix.py", line 139, in _tile_multi_output
label = label.rechunk({0: nsplit})._inplace_tile()
File "/home/admin/work/_public-mars-0.6.2.zip/mars/tensor/rechunk/rechunk.py", line 81, in rechunk
chunk_size = get_nsplits(tensor, chunk_size, tensor.dtype.itemsize)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/tensor/rechunk/core.py", line 38, in get_nsplits
return decide_chunk_sizes(tileable.shape, chunk_size, itemsize)
File "/home/admin/work/_public-mars-0.6.2.zip/mars/tensor/utils.py", line 551, in decide_chunk_sizes
return normalize_chunk_sizes(shape, tuple(chunk_size[j] for j in range(len(shape))))
File "/home/admin/work/_public-mars-0.6.2.zip/mars/tensor/utils.py", line 66, in normalize_chunk_sizes
raise ValueError('chunks shape should be of the same length, '
ValueError: chunks shape should be of the same length, got shape: 1000000, chunks: (nan,)
|
ValueError
|
def __init__(
self,
op=None,
shape=None,
nsplits=None,
dtype=None,
name=None,
names=None,
index_value=None,
chunks=None,
**kw,
):
super().__init__(
_op=op,
_shape=shape,
_nsplits=nsplits,
_dtype=dtype,
_name=name,
_names=names,
_index_value=index_value,
_chunks=chunks,
**kw,
)
|
def __init__(
self,
op=None,
shape=None,
nsplits=None,
dtype=None,
name=None,
index_value=None,
chunks=None,
**kw,
):
super().__init__(
_op=op,
_shape=shape,
_nsplits=nsplits,
_dtype=dtype,
_name=name,
_index_value=index_value,
_chunks=chunks,
**kw,
)
|
https://github.com/mars-project/mars/issues/1890
|
Traceback (most recent call last):
File "/Users/wenjun.swj/miniconda3/lib/python3.8/site-packages/IPython/core/interactiveshell.py", line 3417, in run_code
exec(code_obj, self.user_global_ns, self.user_ns)
File "<ipython-input-6-f86c22f8381d>", line 1, in <module>
mdf[mdf[0] != 0].sort_values(0).execute()
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 646, in execute
return run()
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 642, in run
self.data.execute(session, **kw)
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 379, in execute
return run()
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 374, in run
session.run(self, **kw)
File "/Users/wenjun.swj/Code/mars/mars/session.py", line 505, in run
result = self._sess.run(*tileables, **kw)
File "/Users/wenjun.swj/Code/mars/mars/session.py", line 111, in run
res = self._executor.execute_tileables(tileables, **kw)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 457, in _inner
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/executor.py", line 883, in execute_tileables
self.execute_graph(chunk_graph, list(temp_result_keys),
File "/Users/wenjun.swj/Code/mars/mars/executor.py", line 698, in execute_graph
res = graph_execution.execute(retval)
File "/Users/wenjun.swj/Code/mars/mars/executor.py", line 579, in execute
future.result()
File "/Users/wenjun.swj/miniconda3/lib/python3.8/concurrent/futures/_base.py", line 432, in result
return self.__get_result()
File "/Users/wenjun.swj/miniconda3/lib/python3.8/concurrent/futures/_base.py", line 388, in __get_result
raise self._exception
File "/Users/wenjun.swj/miniconda3/lib/python3.8/concurrent/futures/thread.py", line 57, in run
result = self.fn(*self.args, **self.kwargs)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 457, in _inner
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/executor.py", line 446, in _execute_operand
self.handle_op(first_op, results, self._mock)
File "/Users/wenjun.swj/Code/mars/mars/executor.py", line 378, in handle_op
return Executor.handle(*args, **kw)
File "/Users/wenjun.swj/Code/mars/mars/executor.py", line 649, in handle
return runner(results, op)
File "/Users/wenjun.swj/Code/mars/mars/dataframe/sort/psrs.py", line 355, in execute
num = n // a.shape[op.axis] + 1
ZeroDivisionError: integer division or modulo by zero
|
ZeroDivisionError
|
def __call__(self, shape=None, chunk_size=None, inp=None, name=None, names=None):
if inp is None:
# create from pandas Index
name = name if name is not None else self._data.name
names = names if names is not None else self._data.names
return self.new_index(
None,
shape=shape,
dtype=self._dtype,
index_value=parse_index(self._data),
name=name,
names=names,
raw_chunk_size=chunk_size,
)
elif hasattr(inp, "index_value"):
# get index from Mars DataFrame, Series or Index
name = name if name is not None else inp.index_value.name
names = names if names is not None else [name]
if inp.index_value.has_value():
self._data = data = inp.index_value.to_pandas()
return self.new_index(
None,
shape=(inp.shape[0],),
dtype=data.dtype,
index_value=parse_index(data),
name=name,
names=names,
raw_chunk_size=chunk_size,
)
else:
if self._dtype is None:
self._dtype = inp.index_value.to_pandas().dtype
return self.new_index(
[inp],
shape=(inp.shape[0],),
dtype=self._dtype,
index_value=inp.index_value,
name=name,
names=names,
)
else:
if inp.ndim != 1:
raise ValueError("Index data must be 1-dimensional")
# get index from tensor
dtype = inp.dtype if self._dtype is None else self._dtype
pd_index = pd.Index([], dtype=dtype)
if self._dtype is None:
self._dtype = pd_index.dtype
return self.new_index(
[inp],
shape=inp.shape,
dtype=self._dtype,
index_value=parse_index(pd_index, inp),
name=name,
names=names,
)
|
def __call__(self, shape=None, chunk_size=None, inp=None, name=None, names=None):
if inp is None:
# create from pandas Index
name = name if name is not None else self._data.name
return self.new_index(
None,
shape=shape,
dtype=self._dtype,
index_value=parse_index(self._data),
name=name,
raw_chunk_size=chunk_size,
)
elif hasattr(inp, "index_value"):
# get index from Mars DataFrame, Series or Index
name = name if name is not None else inp.index_value.name
names = names if names is not None else [name]
if inp.index_value.has_value():
self._data = data = inp.index_value.to_pandas()
return self.new_index(
None,
shape=(inp.shape[0],),
dtype=data.dtype,
index_value=parse_index(data),
name=name,
names=names,
raw_chunk_size=chunk_size,
)
else:
if self._dtype is None:
self._dtype = inp.index_value.to_pandas().dtype
return self.new_index(
[inp],
shape=(inp.shape[0],),
dtype=self._dtype,
index_value=inp.index_value,
name=name,
names=names,
)
else:
if inp.ndim != 1:
raise ValueError("Index data must be 1-dimensional")
# get index from tensor
dtype = inp.dtype if self._dtype is None else self._dtype
pd_index = pd.Index([], dtype=dtype)
if self._dtype is None:
self._dtype = pd_index.dtype
return self.new_index(
[inp],
shape=inp.shape,
dtype=self._dtype,
index_value=parse_index(pd_index, inp),
name=name,
)
|
https://github.com/mars-project/mars/issues/1890
|
Traceback (most recent call last):
File "/Users/wenjun.swj/miniconda3/lib/python3.8/site-packages/IPython/core/interactiveshell.py", line 3417, in run_code
exec(code_obj, self.user_global_ns, self.user_ns)
File "<ipython-input-6-f86c22f8381d>", line 1, in <module>
mdf[mdf[0] != 0].sort_values(0).execute()
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 646, in execute
return run()
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 642, in run
self.data.execute(session, **kw)
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 379, in execute
return run()
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 374, in run
session.run(self, **kw)
File "/Users/wenjun.swj/Code/mars/mars/session.py", line 505, in run
result = self._sess.run(*tileables, **kw)
File "/Users/wenjun.swj/Code/mars/mars/session.py", line 111, in run
res = self._executor.execute_tileables(tileables, **kw)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 457, in _inner
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/executor.py", line 883, in execute_tileables
self.execute_graph(chunk_graph, list(temp_result_keys),
File "/Users/wenjun.swj/Code/mars/mars/executor.py", line 698, in execute_graph
res = graph_execution.execute(retval)
File "/Users/wenjun.swj/Code/mars/mars/executor.py", line 579, in execute
future.result()
File "/Users/wenjun.swj/miniconda3/lib/python3.8/concurrent/futures/_base.py", line 432, in result
return self.__get_result()
File "/Users/wenjun.swj/miniconda3/lib/python3.8/concurrent/futures/_base.py", line 388, in __get_result
raise self._exception
File "/Users/wenjun.swj/miniconda3/lib/python3.8/concurrent/futures/thread.py", line 57, in run
result = self.fn(*self.args, **self.kwargs)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 457, in _inner
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/executor.py", line 446, in _execute_operand
self.handle_op(first_op, results, self._mock)
File "/Users/wenjun.swj/Code/mars/mars/executor.py", line 378, in handle_op
return Executor.handle(*args, **kw)
File "/Users/wenjun.swj/Code/mars/mars/executor.py", line 649, in handle
return runner(results, op)
File "/Users/wenjun.swj/Code/mars/mars/dataframe/sort/psrs.py", line 355, in execute
num = n // a.shape[op.axis] + 1
ZeroDivisionError: integer division or modulo by zero
|
ZeroDivisionError
|
def from_tileable(tileable, dtype=None, name=None, names=None):
op = IndexDataSource(gpu=tileable.op.gpu, sparse=tileable.issparse(), dtype=dtype)
return op(inp=tileable, name=name, names=names)
|
def from_tileable(tileable, dtype=None, name=None):
op = IndexDataSource(gpu=tileable.op.gpu, sparse=tileable.issparse(), dtype=dtype)
return op(inp=tileable, name=name)
|
https://github.com/mars-project/mars/issues/1890
|
Traceback (most recent call last):
File "/Users/wenjun.swj/miniconda3/lib/python3.8/site-packages/IPython/core/interactiveshell.py", line 3417, in run_code
exec(code_obj, self.user_global_ns, self.user_ns)
File "<ipython-input-6-f86c22f8381d>", line 1, in <module>
mdf[mdf[0] != 0].sort_values(0).execute()
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 646, in execute
return run()
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 642, in run
self.data.execute(session, **kw)
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 379, in execute
return run()
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 374, in run
session.run(self, **kw)
File "/Users/wenjun.swj/Code/mars/mars/session.py", line 505, in run
result = self._sess.run(*tileables, **kw)
File "/Users/wenjun.swj/Code/mars/mars/session.py", line 111, in run
res = self._executor.execute_tileables(tileables, **kw)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 457, in _inner
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/executor.py", line 883, in execute_tileables
self.execute_graph(chunk_graph, list(temp_result_keys),
File "/Users/wenjun.swj/Code/mars/mars/executor.py", line 698, in execute_graph
res = graph_execution.execute(retval)
File "/Users/wenjun.swj/Code/mars/mars/executor.py", line 579, in execute
future.result()
File "/Users/wenjun.swj/miniconda3/lib/python3.8/concurrent/futures/_base.py", line 432, in result
return self.__get_result()
File "/Users/wenjun.swj/miniconda3/lib/python3.8/concurrent/futures/_base.py", line 388, in __get_result
raise self._exception
File "/Users/wenjun.swj/miniconda3/lib/python3.8/concurrent/futures/thread.py", line 57, in run
result = self.fn(*self.args, **self.kwargs)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 457, in _inner
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/executor.py", line 446, in _execute_operand
self.handle_op(first_op, results, self._mock)
File "/Users/wenjun.swj/Code/mars/mars/executor.py", line 378, in handle_op
return Executor.handle(*args, **kw)
File "/Users/wenjun.swj/Code/mars/mars/executor.py", line 649, in handle
return runner(results, op)
File "/Users/wenjun.swj/Code/mars/mars/dataframe/sort/psrs.py", line 355, in execute
num = n // a.shape[op.axis] + 1
ZeroDivisionError: integer division or modulo by zero
|
ZeroDivisionError
|
def __init__(
self,
data=None,
dtype=None,
copy=False,
name=None,
tupleize_cols=True,
chunk_size=None,
gpu=None,
sparse=None,
names=None,
num_partitions=None,
):
# make sure __getattr__ does not result in stack overflow
self._data = None
need_repart = False
if isinstance(data, INDEX_TYPE):
if not hasattr(data, "data"):
# IndexData
index = _Index(data)
else:
index = data
need_repart = num_partitions is not None
else:
if isinstance(data, (Base, Entity)):
name = name if name is not None else getattr(data, "name", None)
index = from_tileable_index(data, dtype=dtype, name=name, names=names)
need_repart = num_partitions is not None
else:
if not isinstance(data, pd.Index):
name = name if name is not None else getattr(data, "name", None)
pd_index = pd.Index(
data=data,
dtype=dtype,
copy=copy,
name=name,
tupleize_cols=tupleize_cols,
)
else:
pd_index = data
if num_partitions is not None:
chunk_size = ceildiv(len(pd_index), num_partitions)
index = from_pandas_index(
pd_index, chunk_size=chunk_size, gpu=gpu, sparse=sparse
)
if need_repart:
index = index.rebalance(num_partitions=num_partitions)
super().__init__(index.data)
|
def __init__(
self,
data=None,
dtype=None,
copy=False,
name=None,
tupleize_cols=True,
chunk_size=None,
gpu=None,
sparse=None,
names=None,
num_partitions=None,
):
# make sure __getattr__ does not result in stack overflow
self._data = None
need_repart = False
if isinstance(data, INDEX_TYPE):
if not hasattr(data, "data"):
# IndexData
index = _Index(data)
else:
index = data
need_repart = num_partitions is not None
else:
if isinstance(data, (Base, Entity)):
name = name if name is not None else getattr(data, "name", None)
index = from_tileable_index(data, dtype=dtype, name=name)
need_repart = num_partitions is not None
else:
if not isinstance(data, pd.Index):
name = name if name is not None else getattr(data, "name", None)
pd_index = pd.Index(
data=data,
dtype=dtype,
copy=copy,
name=name,
tupleize_cols=tupleize_cols,
)
else:
pd_index = data
if num_partitions is not None:
chunk_size = ceildiv(len(pd_index), num_partitions)
index = from_pandas_index(
pd_index, chunk_size=chunk_size, gpu=gpu, sparse=sparse
)
if need_repart:
index = index.rebalance(num_partitions=num_partitions)
super().__init__(index.data)
|
https://github.com/mars-project/mars/issues/1890
|
Traceback (most recent call last):
File "/Users/wenjun.swj/miniconda3/lib/python3.8/site-packages/IPython/core/interactiveshell.py", line 3417, in run_code
exec(code_obj, self.user_global_ns, self.user_ns)
File "<ipython-input-6-f86c22f8381d>", line 1, in <module>
mdf[mdf[0] != 0].sort_values(0).execute()
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 646, in execute
return run()
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 642, in run
self.data.execute(session, **kw)
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 379, in execute
return run()
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 374, in run
session.run(self, **kw)
File "/Users/wenjun.swj/Code/mars/mars/session.py", line 505, in run
result = self._sess.run(*tileables, **kw)
File "/Users/wenjun.swj/Code/mars/mars/session.py", line 111, in run
res = self._executor.execute_tileables(tileables, **kw)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 457, in _inner
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/executor.py", line 883, in execute_tileables
self.execute_graph(chunk_graph, list(temp_result_keys),
File "/Users/wenjun.swj/Code/mars/mars/executor.py", line 698, in execute_graph
res = graph_execution.execute(retval)
File "/Users/wenjun.swj/Code/mars/mars/executor.py", line 579, in execute
future.result()
File "/Users/wenjun.swj/miniconda3/lib/python3.8/concurrent/futures/_base.py", line 432, in result
return self.__get_result()
File "/Users/wenjun.swj/miniconda3/lib/python3.8/concurrent/futures/_base.py", line 388, in __get_result
raise self._exception
File "/Users/wenjun.swj/miniconda3/lib/python3.8/concurrent/futures/thread.py", line 57, in run
result = self.fn(*self.args, **self.kwargs)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 457, in _inner
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/executor.py", line 446, in _execute_operand
self.handle_op(first_op, results, self._mock)
File "/Users/wenjun.swj/Code/mars/mars/executor.py", line 378, in handle_op
return Executor.handle(*args, **kw)
File "/Users/wenjun.swj/Code/mars/mars/executor.py", line 649, in handle
return runner(results, op)
File "/Users/wenjun.swj/Code/mars/mars/dataframe/sort/psrs.py", line 355, in execute
num = n // a.shape[op.axis] + 1
ZeroDivisionError: integer division or modulo by zero
|
ZeroDivisionError
|
def execute(cls, ctx, op):
a = ctx[op.inputs[0].key]
xdf = pd if isinstance(a, (pd.DataFrame, pd.Series)) else cudf
if len(a) == 0:
# when chunk is empty, return the empty chunk itself
ctx[op.outputs[0].key] = ctx[op.outputs[-1].key] = a
return
if op.sort_type == "sort_values":
ctx[op.outputs[0].key] = res = execute_sort_values(a, op)
else:
ctx[op.outputs[0].key] = res = execute_sort_index(a, op)
by = op.by
add_distinct_col = (
bool(int(os.environ.get("PSRS_DISTINCT_COL", "0")))
or getattr(ctx, "running_mode", None) == RunningMode.distributed
)
if (
add_distinct_col
and isinstance(a, xdf.DataFrame)
and op.sort_type == "sort_values"
):
# when running under distributed mode, we introduce an extra column
# to make sure pivots are distinct
chunk_idx = op.inputs[0].index[0]
distinct_col = (
_PSRS_DISTINCT_COL
if a.columns.nlevels == 1
else (_PSRS_DISTINCT_COL,) + ("",) * (a.columns.nlevels - 1)
)
res[distinct_col] = np.arange(
chunk_idx << 32, (chunk_idx << 32) + len(a), dtype=np.int64
)
by = list(by) + [distinct_col]
n = op.n_partition
if op.sort_type == "sort_values" and a.shape[op.axis] < n:
num = n // a.shape[op.axis] + 1
res = execute_sort_values(xdf.concat([res] * num), op, by=by)
w = res.shape[op.axis] * 1.0 / (n + 1)
slc = np.linspace(
max(w - 1, 0), res.shape[op.axis] - 1, num=n, endpoint=False
).astype(int)
if op.axis == 1:
slc = (slice(None), slc)
if op.sort_type == "sort_values":
# do regular sample
if op.by is not None:
ctx[op.outputs[-1].key] = res[by].iloc[slc]
else:
ctx[op.outputs[-1].key] = res.iloc[slc]
else:
# do regular sample
ctx[op.outputs[-1].key] = res.iloc[slc]
|
def execute(cls, ctx, op):
a = ctx[op.inputs[0].key]
xdf = pd if isinstance(a, (pd.DataFrame, pd.Series)) else cudf
if op.sort_type == "sort_values":
ctx[op.outputs[0].key] = res = execute_sort_values(a, op)
else:
ctx[op.outputs[0].key] = res = execute_sort_index(a, op)
by = op.by
add_distinct_col = (
bool(int(os.environ.get("PSRS_DISTINCT_COL", "0")))
or getattr(ctx, "running_mode", None) == RunningMode.distributed
)
if (
add_distinct_col
and isinstance(a, xdf.DataFrame)
and op.sort_type == "sort_values"
):
# when running under distributed mode, we introduce an extra column
# to make sure pivots are distinct
chunk_idx = op.inputs[0].index[0]
distinct_col = (
_PSRS_DISTINCT_COL
if a.columns.nlevels == 1
else (_PSRS_DISTINCT_COL,) + ("",) * (a.columns.nlevels - 1)
)
res[distinct_col] = np.arange(
chunk_idx << 32, (chunk_idx << 32) + len(a), dtype=np.int64
)
by = list(by) + [distinct_col]
n = op.n_partition
if op.sort_type == "sort_values" and a.shape[op.axis] < n:
num = n // a.shape[op.axis] + 1
res = execute_sort_values(xdf.concat([res] * num), op, by=by)
w = res.shape[op.axis] * 1.0 / (n + 1)
slc = np.linspace(
max(w - 1, 0), res.shape[op.axis] - 1, num=n, endpoint=False
).astype(int)
if op.axis == 1:
slc = (slice(None), slc)
if op.sort_type == "sort_values":
# do regular sample
if op.by is not None:
ctx[op.outputs[-1].key] = res[by].iloc[slc]
else:
ctx[op.outputs[-1].key] = res.iloc[slc]
else:
# do regular sample
ctx[op.outputs[-1].key] = res.iloc[slc]
|
https://github.com/mars-project/mars/issues/1890
|
Traceback (most recent call last):
File "/Users/wenjun.swj/miniconda3/lib/python3.8/site-packages/IPython/core/interactiveshell.py", line 3417, in run_code
exec(code_obj, self.user_global_ns, self.user_ns)
File "<ipython-input-6-f86c22f8381d>", line 1, in <module>
mdf[mdf[0] != 0].sort_values(0).execute()
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 646, in execute
return run()
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 642, in run
self.data.execute(session, **kw)
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 379, in execute
return run()
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 374, in run
session.run(self, **kw)
File "/Users/wenjun.swj/Code/mars/mars/session.py", line 505, in run
result = self._sess.run(*tileables, **kw)
File "/Users/wenjun.swj/Code/mars/mars/session.py", line 111, in run
res = self._executor.execute_tileables(tileables, **kw)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 457, in _inner
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/executor.py", line 883, in execute_tileables
self.execute_graph(chunk_graph, list(temp_result_keys),
File "/Users/wenjun.swj/Code/mars/mars/executor.py", line 698, in execute_graph
res = graph_execution.execute(retval)
File "/Users/wenjun.swj/Code/mars/mars/executor.py", line 579, in execute
future.result()
File "/Users/wenjun.swj/miniconda3/lib/python3.8/concurrent/futures/_base.py", line 432, in result
return self.__get_result()
File "/Users/wenjun.swj/miniconda3/lib/python3.8/concurrent/futures/_base.py", line 388, in __get_result
raise self._exception
File "/Users/wenjun.swj/miniconda3/lib/python3.8/concurrent/futures/thread.py", line 57, in run
result = self.fn(*self.args, **self.kwargs)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 457, in _inner
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/executor.py", line 446, in _execute_operand
self.handle_op(first_op, results, self._mock)
File "/Users/wenjun.swj/Code/mars/mars/executor.py", line 378, in handle_op
return Executor.handle(*args, **kw)
File "/Users/wenjun.swj/Code/mars/mars/executor.py", line 649, in handle
return runner(results, op)
File "/Users/wenjun.swj/Code/mars/mars/dataframe/sort/psrs.py", line 355, in execute
num = n // a.shape[op.axis] + 1
ZeroDivisionError: integer division or modulo by zero
|
ZeroDivisionError
|
def execute(cls, ctx, op):
inputs = [ctx[c.key] for c in op.inputs if len(ctx[c.key]) > 0]
if len(inputs) == 0:
# corner case: nothing sampled, we need to do nothing
ctx[op.outputs[-1].key] = ctx[op.inputs[0].key]
return
xdf = pd if isinstance(inputs[0], (pd.DataFrame, pd.Series)) else cudf
a = xdf.concat(inputs, axis=op.axis)
p = len(inputs)
assert a.shape[op.axis] == p * len(op.inputs)
slc = np.linspace(
p - 1, a.shape[op.axis] - 1, num=len(op.inputs) - 1, endpoint=False
).astype(int)
if op.axis == 1:
slc = (slice(None), slc)
if op.sort_type == "sort_values":
a = execute_sort_values(a, op, inplace=False)
ctx[op.outputs[-1].key] = a.iloc[slc]
else:
a = execute_sort_index(a, op, inplace=False)
ctx[op.outputs[-1].key] = a.index[slc]
|
def execute(cls, ctx, op):
inputs = [ctx[c.key] for c in op.inputs]
xdf = pd if isinstance(inputs[0], (pd.DataFrame, pd.Series)) else cudf
a = xdf.concat(inputs, axis=op.axis)
p = len(inputs)
assert a.shape[op.axis] == p**2
slc = np.linspace(p - 1, a.shape[op.axis] - 1, num=p - 1, endpoint=False).astype(
int
)
if op.axis == 1:
slc = (slice(None), slc)
if op.sort_type == "sort_values":
a = execute_sort_values(a, op, inplace=False)
ctx[op.outputs[-1].key] = a.iloc[slc]
else:
a = execute_sort_index(a, op, inplace=False)
ctx[op.outputs[-1].key] = a.index[slc]
|
https://github.com/mars-project/mars/issues/1890
|
Traceback (most recent call last):
File "/Users/wenjun.swj/miniconda3/lib/python3.8/site-packages/IPython/core/interactiveshell.py", line 3417, in run_code
exec(code_obj, self.user_global_ns, self.user_ns)
File "<ipython-input-6-f86c22f8381d>", line 1, in <module>
mdf[mdf[0] != 0].sort_values(0).execute()
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 646, in execute
return run()
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 642, in run
self.data.execute(session, **kw)
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 379, in execute
return run()
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 374, in run
session.run(self, **kw)
File "/Users/wenjun.swj/Code/mars/mars/session.py", line 505, in run
result = self._sess.run(*tileables, **kw)
File "/Users/wenjun.swj/Code/mars/mars/session.py", line 111, in run
res = self._executor.execute_tileables(tileables, **kw)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 457, in _inner
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/executor.py", line 883, in execute_tileables
self.execute_graph(chunk_graph, list(temp_result_keys),
File "/Users/wenjun.swj/Code/mars/mars/executor.py", line 698, in execute_graph
res = graph_execution.execute(retval)
File "/Users/wenjun.swj/Code/mars/mars/executor.py", line 579, in execute
future.result()
File "/Users/wenjun.swj/miniconda3/lib/python3.8/concurrent/futures/_base.py", line 432, in result
return self.__get_result()
File "/Users/wenjun.swj/miniconda3/lib/python3.8/concurrent/futures/_base.py", line 388, in __get_result
raise self._exception
File "/Users/wenjun.swj/miniconda3/lib/python3.8/concurrent/futures/thread.py", line 57, in run
result = self.fn(*self.args, **self.kwargs)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 457, in _inner
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/executor.py", line 446, in _execute_operand
self.handle_op(first_op, results, self._mock)
File "/Users/wenjun.swj/Code/mars/mars/executor.py", line 378, in handle_op
return Executor.handle(*args, **kw)
File "/Users/wenjun.swj/Code/mars/mars/executor.py", line 649, in handle
return runner(results, op)
File "/Users/wenjun.swj/Code/mars/mars/dataframe/sort/psrs.py", line 355, in execute
num = n // a.shape[op.axis] + 1
ZeroDivisionError: integer division or modulo by zero
|
ZeroDivisionError
|
def _execute_dataframe_map(cls, ctx, op):
a, pivots = [ctx[c.key] for c in op.inputs]
out = op.outputs[0]
if len(a) == 0:
# when the chunk is empty, no slices can be produced
for i in range(op.n_partition):
ctx[(out.key, str(i))] = a
return
# use numpy.searchsorted to find split positions.
by = op.by
distinct_col = (
_PSRS_DISTINCT_COL
if a.columns.nlevels == 1
else (_PSRS_DISTINCT_COL,) + ("",) * (a.columns.nlevels - 1)
)
if distinct_col in a.columns:
by = list(by) + [distinct_col]
try:
poses = cls._calc_poses(a[by], pivots, op.ascending)
except TypeError:
poses = cls._calc_poses(
a[by].fillna(_largest), pivots.fillna(_largest), op.ascending
)
poses = (None,) + tuple(poses) + (None,)
for i in range(op.n_partition):
values = a.iloc[poses[i] : poses[i + 1]]
ctx[(out.key, str(i))] = values
|
def _execute_dataframe_map(cls, ctx, op):
a, pivots = [ctx[c.key] for c in op.inputs]
out = op.outputs[0]
# use numpy.searchsorted to find split positions.
by = op.by
distinct_col = (
_PSRS_DISTINCT_COL
if a.columns.nlevels == 1
else (_PSRS_DISTINCT_COL,) + ("",) * (a.columns.nlevels - 1)
)
if distinct_col in a.columns:
by = list(by) + [distinct_col]
records = a[by].to_records(index=False)
p_records = pivots.to_records(index=False)
if op.ascending:
poses = records.searchsorted(p_records, side="right")
else:
poses = len(records) - records[::-1].searchsorted(p_records, side="right")
del records, p_records
poses = (None,) + tuple(poses) + (None,)
for i in range(op.n_partition):
values = a.iloc[poses[i] : poses[i + 1]]
ctx[(out.key, str(i))] = values
|
https://github.com/mars-project/mars/issues/1890
|
Traceback (most recent call last):
File "/Users/wenjun.swj/miniconda3/lib/python3.8/site-packages/IPython/core/interactiveshell.py", line 3417, in run_code
exec(code_obj, self.user_global_ns, self.user_ns)
File "<ipython-input-6-f86c22f8381d>", line 1, in <module>
mdf[mdf[0] != 0].sort_values(0).execute()
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 646, in execute
return run()
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 642, in run
self.data.execute(session, **kw)
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 379, in execute
return run()
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 374, in run
session.run(self, **kw)
File "/Users/wenjun.swj/Code/mars/mars/session.py", line 505, in run
result = self._sess.run(*tileables, **kw)
File "/Users/wenjun.swj/Code/mars/mars/session.py", line 111, in run
res = self._executor.execute_tileables(tileables, **kw)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 457, in _inner
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/executor.py", line 883, in execute_tileables
self.execute_graph(chunk_graph, list(temp_result_keys),
File "/Users/wenjun.swj/Code/mars/mars/executor.py", line 698, in execute_graph
res = graph_execution.execute(retval)
File "/Users/wenjun.swj/Code/mars/mars/executor.py", line 579, in execute
future.result()
File "/Users/wenjun.swj/miniconda3/lib/python3.8/concurrent/futures/_base.py", line 432, in result
return self.__get_result()
File "/Users/wenjun.swj/miniconda3/lib/python3.8/concurrent/futures/_base.py", line 388, in __get_result
raise self._exception
File "/Users/wenjun.swj/miniconda3/lib/python3.8/concurrent/futures/thread.py", line 57, in run
result = self.fn(*self.args, **self.kwargs)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 457, in _inner
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/executor.py", line 446, in _execute_operand
self.handle_op(first_op, results, self._mock)
File "/Users/wenjun.swj/Code/mars/mars/executor.py", line 378, in handle_op
return Executor.handle(*args, **kw)
File "/Users/wenjun.swj/Code/mars/mars/executor.py", line 649, in handle
return runner(results, op)
File "/Users/wenjun.swj/Code/mars/mars/dataframe/sort/psrs.py", line 355, in execute
num = n // a.shape[op.axis] + 1
ZeroDivisionError: integer division or modulo by zero
|
ZeroDivisionError
|
def _execute_reduce(cls, ctx, op):
out_chunk = op.outputs[0]
input_keys, _ = get_shuffle_input_keys_idxes(op.inputs[0])
if getattr(ctx, "running_mode", None) == RunningMode.distributed:
raw_inputs = [ctx.pop((input_key, op.shuffle_key)) for input_key in input_keys]
else:
raw_inputs = [ctx[(input_key, op.shuffle_key)] for input_key in input_keys]
xdf = pd if isinstance(raw_inputs[0], (pd.DataFrame, pd.Series)) else cudf
if xdf is pd:
concat_values = xdf.concat(raw_inputs, axis=op.axis, copy=False)
else:
concat_values = xdf.concat(raw_inputs, axis=op.axis)
del raw_inputs[:]
if isinstance(concat_values, xdf.DataFrame):
concat_values.drop(_PSRS_DISTINCT_COL, axis=1, inplace=True, errors="ignore")
col_index_dtype = out_chunk.columns_value.to_pandas().dtype
if concat_values.columns.dtype != col_index_dtype:
concat_values.columns = concat_values.columns.astype(col_index_dtype)
if op.sort_type == "sort_values":
ctx[op.outputs[0].key] = execute_sort_values(concat_values, op)
else:
ctx[op.outputs[0].key] = execute_sort_index(concat_values, op)
|
def _execute_reduce(cls, ctx, op):
input_keys, _ = get_shuffle_input_keys_idxes(op.inputs[0])
if getattr(ctx, "running_mode", None) == RunningMode.distributed:
raw_inputs = [ctx.pop((input_key, op.shuffle_key)) for input_key in input_keys]
else:
raw_inputs = [ctx[(input_key, op.shuffle_key)] for input_key in input_keys]
xdf = pd if isinstance(raw_inputs[0], (pd.DataFrame, pd.Series)) else cudf
if xdf is pd:
concat_values = xdf.concat(raw_inputs, axis=op.axis, copy=False)
else:
concat_values = xdf.concat(raw_inputs, axis=op.axis)
del raw_inputs[:]
if isinstance(concat_values, xdf.DataFrame):
concat_values.drop(_PSRS_DISTINCT_COL, axis=1, inplace=True, errors="ignore")
if op.sort_type == "sort_values":
ctx[op.outputs[0].key] = execute_sort_values(concat_values, op)
else:
ctx[op.outputs[0].key] = execute_sort_index(concat_values, op)
|
https://github.com/mars-project/mars/issues/1890
|
Traceback (most recent call last):
File "/Users/wenjun.swj/miniconda3/lib/python3.8/site-packages/IPython/core/interactiveshell.py", line 3417, in run_code
exec(code_obj, self.user_global_ns, self.user_ns)
File "<ipython-input-6-f86c22f8381d>", line 1, in <module>
mdf[mdf[0] != 0].sort_values(0).execute()
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 646, in execute
return run()
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 642, in run
self.data.execute(session, **kw)
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 379, in execute
return run()
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 374, in run
session.run(self, **kw)
File "/Users/wenjun.swj/Code/mars/mars/session.py", line 505, in run
result = self._sess.run(*tileables, **kw)
File "/Users/wenjun.swj/Code/mars/mars/session.py", line 111, in run
res = self._executor.execute_tileables(tileables, **kw)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 457, in _inner
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/executor.py", line 883, in execute_tileables
self.execute_graph(chunk_graph, list(temp_result_keys),
File "/Users/wenjun.swj/Code/mars/mars/executor.py", line 698, in execute_graph
res = graph_execution.execute(retval)
File "/Users/wenjun.swj/Code/mars/mars/executor.py", line 579, in execute
future.result()
File "/Users/wenjun.swj/miniconda3/lib/python3.8/concurrent/futures/_base.py", line 432, in result
return self.__get_result()
File "/Users/wenjun.swj/miniconda3/lib/python3.8/concurrent/futures/_base.py", line 388, in __get_result
raise self._exception
File "/Users/wenjun.swj/miniconda3/lib/python3.8/concurrent/futures/thread.py", line 57, in run
result = self.fn(*self.args, **self.kwargs)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 457, in _inner
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/executor.py", line 446, in _execute_operand
self.handle_op(first_op, results, self._mock)
File "/Users/wenjun.swj/Code/mars/mars/executor.py", line 378, in handle_op
return Executor.handle(*args, **kw)
File "/Users/wenjun.swj/Code/mars/mars/executor.py", line 649, in handle
return runner(results, op)
File "/Users/wenjun.swj/Code/mars/mars/dataframe/sort/psrs.py", line 355, in execute
num = n // a.shape[op.axis] + 1
ZeroDivisionError: integer division or modulo by zero
|
ZeroDivisionError
|
def fetch(self, session=None, **kw):
from .indexing.iloc import DataFrameIlocGetItem, SeriesIlocGetItem
batch_size = kw.pop("batch_size", 1000)
if len(kw) > 0: # pragma: no cover
raise TypeError(
f"'{next(iter(kw))}' is an invalid keyword argument for this function"
)
if isinstance(self.op, (DataFrameIlocGetItem, SeriesIlocGetItem)):
# see GH#1871
# already iloc, do not trigger batch fetch
return self._fetch(session=session, **kw)
else:
batches = list(self._iter(batch_size=batch_size, session=session))
return pd.concat(batches) if len(batches) > 1 else batches[0]
|
def fetch(self, session=None, **kw):
batch_size = kw.pop("batch_size", 1000)
if len(kw) > 0: # pragma: no cover
raise TypeError(
f"'{next(iter(kw))}' is an invalid keyword argument for this function"
)
batches = list(self._iter(batch_size=batch_size, session=session))
return pd.concat(batches) if len(batches) > 1 else batches[0]
|
https://github.com/mars-project/mars/issues/1871
|
In [20]: df = md.DataFrame(mt.random.rand(10, 3, chunk_size=3))
In [21]: df.execute()
Out[21]:
0 1 2
0 0.996454 0.199555 0.058095
1 0.856396 0.477889 0.869464
2 0.578357 0.449761 0.313753
3 0.907721 0.887646 0.171193
4 0.727089 0.617502 0.210623
5 0.209806 0.070762 0.183754
6 0.389748 0.779089 0.468244
7 0.506215 0.026623 0.473943
8 0.120368 0.201167 0.367040
9 0.717196 0.199664 0.741672
In [22]: df.iloc[:4].fetch(batch_size=3)
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-22-3025f16f5892> in <module>
----> 1 df.iloc[:4].fetch(batch_size=3)
~/Workspace/mars/mars/dataframe/core.py in fetch(self, session, **kw)
449 raise TypeError(
450 f"'{next(iter(kw))}' is an invalid keyword argument for this function")
--> 451 batches = list(self._iter(batch_size=batch_size, session=session))
452 return pd.concat(batches) if len(batches) > 1 else batches[0]
453
~/Workspace/mars/mars/dataframe/core.py in _iter(self, batch_size, session)
435 for i in range(n_batch):
436 batch_data = iloc(self)[batch_size * i: batch_size * (i + 1)]
--> 437 yield batch_data._fetch(session=session)
438 else:
439 yield self._fetch(session=session)
~/Workspace/mars/mars/core.py in _fetch(self, session, **kw)
406 session = self._get_session(session)
407 self._check_session(session, 'fetch')
--> 408 return session.fetch(self, **kw)
409
410 def fetch(self, session=None, **kw):
~/Workspace/mars/mars/session.py in fetch(self, *tileables, **kw)
532 ret_list = True
533
--> 534 result = self._sess.fetch(*tileables, **kw)
535
536 ret = []
~/Workspace/mars/mars/session.py in fetch(self, n_parallel, *tileables, **kw)
126 if n_parallel is None:
127 kw['n_parallel'] = cpu_count()
--> 128 return self._executor.fetch_tileables(tileables, **kw)
129
130 def fetch_log(self, tileables, offsets=None, sizes=None): # pragma: no cover
~/Workspace/mars/mars/utils.py in _inner(*args, **kwargs)
449 def _inner(*args, **kwargs):
450 with self:
--> 451 return func(*args, **kwargs)
452
453 return _inner
~/Workspace/mars/mars/executor.py in fetch_tileables(self, tileables, **kw)
973 # check if the tileable is executed before
974 raise ValueError(
--> 975 f'Tileable object {tileable.key} must be executed first before being fetched')
976
977 # if chunk executed, fetch chunk mechanism will be triggered in execute_tileables
ValueError: Tileable object 781bdacb33bac80f7e5b8f92ae3923ba must be executed first before being fetched
|
ValueError
|
def tile(cls, op: "DataFrameCartesianChunk"):
left = op.left
right = op.right
out = op.outputs[0]
if left.ndim == 2 and left.chunk_shape[1] > 1:
check_chunks_unknown_shape([left], TilesError)
# if left is a DataFrame, make sure 1 chunk on axis columns
left = left.rechunk({1: left.shape[1]})._inplace_tile()
if right.ndim == 2 and right.chunk_shape[1] > 1:
check_chunks_unknown_shape([right], TilesError)
# if right is a DataFrame, make sure 1 chunk on axis columns
right = right.rechunk({1: right.shape[1]})._inplace_tile()
out_chunks = []
nsplits = [[]] if out.ndim == 1 else [[], [out.shape[1]]]
i = 0
for left_chunk in left.chunks:
for right_chunk in right.chunks:
chunk_op = op.copy().reset_key()
chunk_op._tileable_op_key = op.key
if op.output_types[0] == OutputType.dataframe:
shape = (np.nan, out.shape[1])
index_value = parse_index(
out.index_value.to_pandas(),
left_chunk,
right_chunk,
op.func,
op.args,
op.kwargs,
)
out_chunk = chunk_op.new_chunk(
[left_chunk, right_chunk],
shape=shape,
index_value=index_value,
columns_value=out.columns_value,
dtypes=out.dtypes,
index=(i, 0),
)
out_chunks.append(out_chunk)
nsplits[0].append(out_chunk.shape[0])
else:
shape = (np.nan,)
index_value = parse_index(
out.index_value.to_pandas(),
left_chunk,
right_chunk,
op.func,
op.args,
op.kwargs,
)
out_chunk = chunk_op.new_chunk(
[left_chunk, right_chunk],
shape=shape,
index_value=index_value,
dtype=out.dtype,
name=out.name,
index=(i,),
)
out_chunks.append(out_chunk)
nsplits[0].append(out_chunk.shape[0])
i += 1
params = out.params
params["nsplits"] = tuple(tuple(ns) for ns in nsplits)
params["chunks"] = out_chunks
new_op = op.copy()
return new_op.new_tileables(op.inputs, kws=[params])
|
def tile(cls, op: "DataFrameCartesianChunk"):
left = op.left
right = op.right
out = op.outputs[0]
if left.ndim == 2 and left.chunk_shape[1] > 1:
check_chunks_unknown_shape([left], TilesError)
# if left is a DataFrame, make sure 1 chunk on axis columns
left = left.rechunk({1: left.shape[1]})._inplace_tile()
if right.ndim == 2 and right.chunk_shape[1] > 1:
check_chunks_unknown_shape([right], TilesError)
# if right is a DataFrame, make sure 1 chunk on axis columns
right = right.rechunk({1: right.shape[1]})._inplace_tile()
out_chunks = []
nsplits = [[]] if out.ndim == 1 else [[], [out.shape[1]]]
i = 0
for left_chunk in left.chunks:
for right_chunk in right.chunks:
chunk_op = op.copy().reset_key()
chunk_op._tileable_op_key = op.key
if op.output_types[0] == OutputType.dataframe:
shape = (np.nan, out.shape[1])
index_value = parse_index(
out.index_value.to_pandas(),
left_chunk,
right_chunk,
op.func,
op.args,
op.kwargs,
)
out_chunk = chunk_op.new_chunk(
[left_chunk, right_chunk],
shape=shape,
index_value=index_value,
columns_value=out.columns_value,
index=(i, 0),
)
out_chunks.append(out_chunk)
nsplits[0].append(out_chunk.shape[0])
else:
shape = (np.nan,)
index_value = parse_index(
out.index_value.to_pandas(),
left_chunk,
right_chunk,
op.func,
op.args,
op.kwargs,
)
out_chunk = chunk_op.new_chunk(
[left_chunk, right_chunk],
shape=shape,
index_value=index_value,
name=out.name,
index=(i,),
)
out_chunks.append(out_chunk)
nsplits[0].append(out_chunk.shape[0])
i += 1
params = out.params
params["nsplits"] = tuple(tuple(ns) for ns in nsplits)
params["chunks"] = out_chunks
new_op = op.copy()
return new_op.new_tileables(op.inputs, kws=[params])
|
https://github.com/mars-project/mars/issues/1843
|
Traceback (most recent call last):
File "/home/smartguo/lib/anaconda3/envs/mars/lib/python3.7/site-packages/pandas/core/dtypes/common.py", line 171, in ensure_python_int
new_value = int(value)
ValueError: cannot convert float NaN to integer
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "tensor_loop.py", line 14, in <module>
df_res = md.DataFrame(score, columns=["probability"])
File "/home/smartguo/lib/anaconda3/envs/mars/lib/python3.7/site-packages/mars/dataframe/initializer.py", line 47, in __init__
df = dataframe_from_tensor(data, index=index, columns=columns, gpu=gpu, sparse=sparse)
File "/home/smartguo/lib/anaconda3/envs/mars/lib/python3.7/site-packages/mars/dataframe/datasource/from_tensor.py", line 357, in dataframe_from_tensor
return op(tensor, index, columns)
File "/home/smartguo/lib/anaconda3/envs/mars/lib/python3.7/site-packages/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/home/smartguo/lib/anaconda3/envs/mars/lib/python3.7/site-packages/mars/dataframe/datasource/from_tensor.py", line 83, in __call__
return self._call_input_tensor(input_tensor, index, columns)
File "/home/smartguo/lib/anaconda3/envs/mars/lib/python3.7/site-packages/mars/dataframe/datasource/from_tensor.py", line 156, in _call_input_tensor
index_value = parse_index(pd.RangeIndex(start=0, stop=input_tensor.shape[0]))
File "/home/smartguo/lib/anaconda3/envs/mars/lib/python3.7/site-packages/pandas/core/indexes/range.py", line 107, in __new__
stop = ensure_python_int(stop)
File "/home/smartguo/lib/anaconda3/envs/mars/lib/python3.7/site-packages/pandas/core/dtypes/common.py", line 174, in ensure_python_int
raise TypeError(f"Wrong type {type(value)} for value {value}") from err
TypeError: Wrong type <class 'float'> for value nan
|
ValueError
|
def _call_input_tensor(self, input_tensor, index, columns):
if input_tensor.ndim not in {1, 2}:
raise ValueError("Must pass 1-d or 2-d input")
inputs = [input_tensor]
if index is not None:
if input_tensor.shape[0] != len(index):
raise ValueError(
f"index {index} should have the same shape with tensor: {input_tensor.shape[0]}"
)
index_value = self._process_index(index, inputs)
elif isinstance(input_tensor, SERIES_TYPE):
index_value = input_tensor.index_value
else:
stop = input_tensor.shape[0]
stop = -1 if np.isnan(stop) else stop
index_value = parse_index(pd.RangeIndex(start=0, stop=stop))
if columns is not None:
if not (
input_tensor.ndim == 1
and len(columns) == 1
or input_tensor.shape[1] == len(columns)
):
raise ValueError(
f"columns {columns} should have the same shape with tensor: {input_tensor.shape[1]}"
)
if not isinstance(columns, pd.Index):
if isinstance(columns, Base):
raise NotImplementedError("The columns value cannot be a tileable")
columns = pd.Index(columns)
columns_value = parse_index(columns, store_data=True)
else:
if input_tensor.ndim == 1:
# convert to 1-d DataFrame
columns_value = parse_index(pd.RangeIndex(start=0, stop=1), store_data=True)
else:
columns_value = parse_index(
pd.RangeIndex(start=0, stop=input_tensor.shape[1]), store_data=True
)
if input_tensor.ndim == 1:
shape = (input_tensor.shape[0], 1)
else:
shape = input_tensor.shape
return self.new_dataframe(
inputs,
shape,
dtypes=self.dtypes,
index_value=index_value,
columns_value=columns_value,
)
|
def _call_input_tensor(self, input_tensor, index, columns):
if input_tensor.ndim not in {1, 2}:
raise ValueError("Must pass 1-d or 2-d input")
inputs = [input_tensor]
if index is not None:
if input_tensor.shape[0] != len(index):
raise ValueError(
f"index {index} should have the same shape with tensor: {input_tensor.shape[0]}"
)
index_value = self._process_index(index, inputs)
elif isinstance(input_tensor, SERIES_TYPE):
index_value = input_tensor.index_value
else:
index_value = parse_index(pd.RangeIndex(start=0, stop=input_tensor.shape[0]))
if columns is not None:
if not (
input_tensor.ndim == 1
and len(columns) == 1
or input_tensor.shape[1] == len(columns)
):
raise ValueError(
f"columns {columns} should have the same shape with tensor: {input_tensor.shape[1]}"
)
if not isinstance(columns, pd.Index):
if isinstance(columns, Base):
raise NotImplementedError("The columns value cannot be a tileable")
columns = pd.Index(columns)
columns_value = parse_index(columns, store_data=True)
else:
if input_tensor.ndim == 1:
# convert to 1-d DataFrame
columns_value = parse_index(pd.RangeIndex(start=0, stop=1), store_data=True)
else:
columns_value = parse_index(
pd.RangeIndex(start=0, stop=input_tensor.shape[1]), store_data=True
)
if input_tensor.ndim == 1:
shape = (input_tensor.shape[0], 1)
else:
shape = input_tensor.shape
return self.new_dataframe(
inputs,
shape,
dtypes=self.dtypes,
index_value=index_value,
columns_value=columns_value,
)
|
https://github.com/mars-project/mars/issues/1843
|
Traceback (most recent call last):
File "/home/smartguo/lib/anaconda3/envs/mars/lib/python3.7/site-packages/pandas/core/dtypes/common.py", line 171, in ensure_python_int
new_value = int(value)
ValueError: cannot convert float NaN to integer
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "tensor_loop.py", line 14, in <module>
df_res = md.DataFrame(score, columns=["probability"])
File "/home/smartguo/lib/anaconda3/envs/mars/lib/python3.7/site-packages/mars/dataframe/initializer.py", line 47, in __init__
df = dataframe_from_tensor(data, index=index, columns=columns, gpu=gpu, sparse=sparse)
File "/home/smartguo/lib/anaconda3/envs/mars/lib/python3.7/site-packages/mars/dataframe/datasource/from_tensor.py", line 357, in dataframe_from_tensor
return op(tensor, index, columns)
File "/home/smartguo/lib/anaconda3/envs/mars/lib/python3.7/site-packages/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/home/smartguo/lib/anaconda3/envs/mars/lib/python3.7/site-packages/mars/dataframe/datasource/from_tensor.py", line 83, in __call__
return self._call_input_tensor(input_tensor, index, columns)
File "/home/smartguo/lib/anaconda3/envs/mars/lib/python3.7/site-packages/mars/dataframe/datasource/from_tensor.py", line 156, in _call_input_tensor
index_value = parse_index(pd.RangeIndex(start=0, stop=input_tensor.shape[0]))
File "/home/smartguo/lib/anaconda3/envs/mars/lib/python3.7/site-packages/pandas/core/indexes/range.py", line 107, in __new__
stop = ensure_python_int(stop)
File "/home/smartguo/lib/anaconda3/envs/mars/lib/python3.7/site-packages/pandas/core/dtypes/common.py", line 174, in ensure_python_int
raise TypeError(f"Wrong type {type(value)} for value {value}") from err
TypeError: Wrong type <class 'float'> for value nan
|
ValueError
|
def _tile_input_tensor(cls, op):
out_df = op.outputs[0]
in_tensor = op.input
out_chunks = []
nsplits = in_tensor.nsplits
if op.index is not None:
# rechunk index if it's a tensor
check_chunks_unknown_shape(op.inputs, TilesError)
index_tensor = op.index.rechunk([nsplits[0]])._inplace_tile()
else:
index_tensor = None
cum_size = [np.cumsum(s) for s in nsplits]
for in_chunk in in_tensor.chunks:
out_op = op.copy().reset_key()
chunk_inputs = [in_chunk]
if in_chunk.ndim == 1:
(i,) = in_chunk.index
column_stop = 1
chunk_index = (in_chunk.index[0], 0)
dtypes = out_df.dtypes
columns_value = parse_index(
out_df.columns_value.to_pandas()[0:1], store_data=True
)
chunk_shape = (in_chunk.shape[0], 1)
else:
i, j = in_chunk.index
column_stop = cum_size[1][j]
chunk_index = in_chunk.index
dtypes = out_df.dtypes[column_stop - in_chunk.shape[1] : column_stop]
pd_columns = out_df.columns_value.to_pandas()
chunk_pd_columns = pd_columns[column_stop - in_chunk.shape[1] : column_stop]
columns_value = parse_index(chunk_pd_columns, store_data=True)
chunk_shape = in_chunk.shape
index_stop = cum_size[0][i]
if isinstance(op.index, INDEX_TYPE):
index_chunk = index_tensor.chunks[i]
index_value = index_chunk.index_value
chunk_inputs.append(index_chunk)
elif isinstance(in_chunk, SERIES_CHUNK_TYPE):
index_value = in_chunk.index_value
elif out_df.index_value.has_value():
pd_index = out_df.index_value.to_pandas()
chunk_pd_index = pd_index[index_stop - in_chunk.shape[0] : index_stop]
index_value = parse_index(chunk_pd_index, store_data=True)
elif op.index is None:
# input tensor has unknown shape
index_value = parse_index(pd.RangeIndex(-1), in_chunk)
else:
index_chunk = index_tensor.cix[in_chunk.index[0],]
chunk_inputs.append(index_chunk)
index_value = parse_index(
pd.Index([], dtype=index_tensor.dtype),
index_chunk,
type(out_op).__name__,
)
out_op.extra_params["index_stop"] = index_stop
out_op.extra_params["column_stop"] = column_stop
out_chunk = out_op.new_chunk(
chunk_inputs,
shape=chunk_shape,
index=chunk_index,
dtypes=dtypes,
index_value=index_value,
columns_value=columns_value,
)
out_chunks.append(out_chunk)
if in_tensor.ndim == 1:
nsplits = in_tensor.nsplits + ((1,),)
else:
nsplits = in_tensor.nsplits
new_op = op.copy()
return new_op.new_dataframes(
out_df.inputs,
out_df.shape,
dtypes=out_df.dtypes,
index_value=out_df.index_value,
columns_value=out_df.columns_value,
chunks=out_chunks,
nsplits=nsplits,
)
|
def _tile_input_tensor(cls, op):
out_df = op.outputs[0]
in_tensor = op.input
out_chunks = []
nsplits = in_tensor.nsplits
if op.index is not None:
# rechunk index if it's a tensor
check_chunks_unknown_shape(op.inputs, TilesError)
index_tensor = op.index.rechunk([nsplits[0]])._inplace_tile()
else:
index_tensor = None
cum_size = [np.cumsum(s) for s in nsplits]
for in_chunk in in_tensor.chunks:
out_op = op.copy().reset_key()
chunk_inputs = [in_chunk]
if in_chunk.ndim == 1:
(i,) = in_chunk.index
column_stop = 1
chunk_index = (in_chunk.index[0], 0)
dtypes = out_df.dtypes
columns_value = parse_index(
out_df.columns_value.to_pandas()[0:1], store_data=True
)
chunk_shape = (in_chunk.shape[0], 1)
else:
i, j = in_chunk.index
column_stop = cum_size[1][j]
chunk_index = in_chunk.index
dtypes = out_df.dtypes[column_stop - in_chunk.shape[1] : column_stop]
pd_columns = out_df.columns_value.to_pandas()
chunk_pd_columns = pd_columns[column_stop - in_chunk.shape[1] : column_stop]
columns_value = parse_index(chunk_pd_columns, store_data=True)
chunk_shape = in_chunk.shape
index_stop = cum_size[0][i]
if isinstance(op.index, INDEX_TYPE):
index_chunk = index_tensor.chunks[i]
index_value = index_chunk.index_value
chunk_inputs.append(index_chunk)
elif isinstance(in_chunk, SERIES_CHUNK_TYPE):
index_value = in_chunk.index_value
elif out_df.index_value.has_value():
pd_index = out_df.index_value.to_pandas()
chunk_pd_index = pd_index[index_stop - in_chunk.shape[0] : index_stop]
index_value = parse_index(chunk_pd_index, store_data=True)
else:
assert op.index is not None
index_chunk = index_tensor.cix[in_chunk.index[0],]
chunk_inputs.append(index_chunk)
index_value = parse_index(
pd.Index([], dtype=index_tensor.dtype),
index_chunk,
type(out_op).__name__,
)
out_op.extra_params["index_stop"] = index_stop
out_op.extra_params["column_stop"] = column_stop
out_chunk = out_op.new_chunk(
chunk_inputs,
shape=chunk_shape,
index=chunk_index,
dtypes=dtypes,
index_value=index_value,
columns_value=columns_value,
)
out_chunks.append(out_chunk)
if in_tensor.ndim == 1:
nsplits = in_tensor.nsplits + ((1,),)
else:
nsplits = in_tensor.nsplits
new_op = op.copy()
return new_op.new_dataframes(
out_df.inputs,
out_df.shape,
dtypes=out_df.dtypes,
index_value=out_df.index_value,
columns_value=out_df.columns_value,
chunks=out_chunks,
nsplits=nsplits,
)
|
https://github.com/mars-project/mars/issues/1843
|
Traceback (most recent call last):
File "/home/smartguo/lib/anaconda3/envs/mars/lib/python3.7/site-packages/pandas/core/dtypes/common.py", line 171, in ensure_python_int
new_value = int(value)
ValueError: cannot convert float NaN to integer
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "tensor_loop.py", line 14, in <module>
df_res = md.DataFrame(score, columns=["probability"])
File "/home/smartguo/lib/anaconda3/envs/mars/lib/python3.7/site-packages/mars/dataframe/initializer.py", line 47, in __init__
df = dataframe_from_tensor(data, index=index, columns=columns, gpu=gpu, sparse=sparse)
File "/home/smartguo/lib/anaconda3/envs/mars/lib/python3.7/site-packages/mars/dataframe/datasource/from_tensor.py", line 357, in dataframe_from_tensor
return op(tensor, index, columns)
File "/home/smartguo/lib/anaconda3/envs/mars/lib/python3.7/site-packages/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/home/smartguo/lib/anaconda3/envs/mars/lib/python3.7/site-packages/mars/dataframe/datasource/from_tensor.py", line 83, in __call__
return self._call_input_tensor(input_tensor, index, columns)
File "/home/smartguo/lib/anaconda3/envs/mars/lib/python3.7/site-packages/mars/dataframe/datasource/from_tensor.py", line 156, in _call_input_tensor
index_value = parse_index(pd.RangeIndex(start=0, stop=input_tensor.shape[0]))
File "/home/smartguo/lib/anaconda3/envs/mars/lib/python3.7/site-packages/pandas/core/indexes/range.py", line 107, in __new__
stop = ensure_python_int(stop)
File "/home/smartguo/lib/anaconda3/envs/mars/lib/python3.7/site-packages/pandas/core/dtypes/common.py", line 174, in ensure_python_int
raise TypeError(f"Wrong type {type(value)} for value {value}") from err
TypeError: Wrong type <class 'float'> for value nan
|
ValueError
|
def execute(cls, ctx, op):
chunk = op.outputs[0]
if isinstance(op.input, dict):
d = OrderedDict()
for k, v in op.input.items():
if hasattr(v, "key"):
d[k] = ctx[v.key]
else:
d[k] = v
if op.index is not None:
index_data = ctx[op.index.key]
else:
index_data = chunk.index_value.to_pandas()
ctx[chunk.key] = pd.DataFrame(
d, index=index_data, columns=chunk.columns_value.to_pandas()
)
else:
tensor_data = ctx[op.inputs[0].key]
if isinstance(tensor_data, pd.Series):
ctx[chunk.key] = tensor_data.to_frame(name=chunk.dtypes.index[0])
else:
if op.index is not None:
# index is a tensor
index_data = ctx[op.inputs[1].key]
else:
index_data = chunk.index_value.to_pandas()
if isinstance(index_data, pd.RangeIndex) and len(index_data) == 0:
index_data = None
ctx[chunk.key] = pd.DataFrame(
tensor_data, index=index_data, columns=chunk.columns_value.to_pandas()
)
|
def execute(cls, ctx, op):
chunk = op.outputs[0]
if isinstance(op.input, dict):
d = OrderedDict()
for k, v in op.input.items():
if hasattr(v, "key"):
d[k] = ctx[v.key]
else:
d[k] = v
if op.index is not None:
index_data = ctx[op.index.key]
else:
index_data = chunk.index_value.to_pandas()
ctx[chunk.key] = pd.DataFrame(
d, index=index_data, columns=chunk.columns_value.to_pandas()
)
else:
tensor_data = ctx[op.inputs[0].key]
if isinstance(tensor_data, pd.Series):
ctx[chunk.key] = tensor_data.to_frame(name=chunk.dtypes.index[0])
else:
if op.index is not None:
# index is a tensor
index_data = ctx[op.inputs[1].key]
else:
index_data = chunk.index_value.to_pandas()
ctx[chunk.key] = pd.DataFrame(
tensor_data, index=index_data, columns=chunk.columns_value.to_pandas()
)
|
https://github.com/mars-project/mars/issues/1843
|
Traceback (most recent call last):
File "/home/smartguo/lib/anaconda3/envs/mars/lib/python3.7/site-packages/pandas/core/dtypes/common.py", line 171, in ensure_python_int
new_value = int(value)
ValueError: cannot convert float NaN to integer
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "tensor_loop.py", line 14, in <module>
df_res = md.DataFrame(score, columns=["probability"])
File "/home/smartguo/lib/anaconda3/envs/mars/lib/python3.7/site-packages/mars/dataframe/initializer.py", line 47, in __init__
df = dataframe_from_tensor(data, index=index, columns=columns, gpu=gpu, sparse=sparse)
File "/home/smartguo/lib/anaconda3/envs/mars/lib/python3.7/site-packages/mars/dataframe/datasource/from_tensor.py", line 357, in dataframe_from_tensor
return op(tensor, index, columns)
File "/home/smartguo/lib/anaconda3/envs/mars/lib/python3.7/site-packages/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/home/smartguo/lib/anaconda3/envs/mars/lib/python3.7/site-packages/mars/dataframe/datasource/from_tensor.py", line 83, in __call__
return self._call_input_tensor(input_tensor, index, columns)
File "/home/smartguo/lib/anaconda3/envs/mars/lib/python3.7/site-packages/mars/dataframe/datasource/from_tensor.py", line 156, in _call_input_tensor
index_value = parse_index(pd.RangeIndex(start=0, stop=input_tensor.shape[0]))
File "/home/smartguo/lib/anaconda3/envs/mars/lib/python3.7/site-packages/pandas/core/indexes/range.py", line 107, in __new__
stop = ensure_python_int(stop)
File "/home/smartguo/lib/anaconda3/envs/mars/lib/python3.7/site-packages/pandas/core/dtypes/common.py", line 174, in ensure_python_int
raise TypeError(f"Wrong type {type(value)} for value {value}") from err
TypeError: Wrong type <class 'float'> for value nan
|
ValueError
|
def tile(cls, op):
in_tensor = op.input
tensor = op.outputs[0]
# check unknown shape
check_chunks_unknown_shape(op.inputs, TilesError)
if any(np.isnan(s) for s in tensor.shape):
# -1 exists in newshape and input tensor has unknown shape
# recalculate new shape
shape = tuple(-1 if np.isnan(s) else s for s in tensor.shape)
newshape = calc_shape(in_tensor.size, shape)
tensor._shape = newshape
if op.order == "F":
# do transpose first, then do regular reshape, then transpose back
result = in_tensor.transpose().reshape(op.newshape[::-1])
if getattr(op, "_reshape_with_shuffle", True):
result.op.extra_params["_reshape_with_shuffle"] = True
result = result.transpose()
return [recursive_tile(result)]
if len(in_tensor.chunks) == 1:
# 1 chunk
chunk_op = op.copy().reset_key()
chunk = chunk_op.new_chunk(
in_tensor.chunks,
shape=tensor.shape,
order=tensor.order,
index=(0,) * tensor.ndim,
)
new_op = op.copy()
return new_op.new_tensors(
op.inputs,
shape=tensor.shape,
order=tensor.order,
chunks=[chunk],
nsplits=tuple((s,) for s in tensor.shape),
)
try:
rechunk_nsplits, reshape_nsplits = cls._gen_reshape_rechunk_nsplits(
in_tensor.shape, tensor.shape, in_tensor.nsplits
)
rechunked_tensor = in_tensor.rechunk(rechunk_nsplits)._inplace_tile()
in_idxes = itertools.product(*[range(len(s)) for s in rechunk_nsplits])
out_idxes = itertools.product(*[range(len(s)) for s in reshape_nsplits])
out_shape = itertools.product(*[s for s in reshape_nsplits])
out_chunks = []
for input_idx, out_idx, out_shape in zip(in_idxes, out_idxes, out_shape):
in_chunk = rechunked_tensor.cix[input_idx]
chunk_op = op.copy().reset_key()
chunk_op._newshape = out_shape
out_chunk = chunk_op.new_chunk(
[in_chunk], shape=out_shape, order=tensor.order, index=out_idx
)
out_chunks.append(out_chunk)
new_op = op.copy()
return new_op.new_tensors(
op.inputs,
tensor.shape,
order=tensor.order,
chunks=out_chunks,
nsplits=reshape_nsplits,
)
except ValueError:
# TODO: make this as default when shuffle is mature
if getattr(op.extra_params, "_reshape_with_shuffle", False):
return cls._tile_as_shuffle(op)
# shape incompatible, we will first do flatten, then reshape to the new shape
return [
in_tensor.reshape(-1, order=tensor.op.order)
._inplace_tile()
.reshape(tensor.shape, order=tensor.op.order)
._inplace_tile()
]
|
def tile(cls, op):
in_tensor = op.input
tensor = op.outputs[0]
if op.order == "F":
# do transpose first, then do regular reshape, then transpose back
result = in_tensor.transpose().reshape(op.newshape[::-1])
if getattr(op, "_reshape_with_shuffle", True):
result.op.extra_params["_reshape_with_shuffle"] = True
result = result.transpose()
return [recursive_tile(result)]
check_chunks_unknown_shape(op.inputs, TilesError)
if len(in_tensor.chunks) == 1:
# 1 chunk
chunk_op = op.copy().reset_key()
chunk = chunk_op.new_chunk(
in_tensor.chunks,
shape=tensor.shape,
order=tensor.order,
index=(0,) * tensor.ndim,
)
new_op = op.copy()
return new_op.new_tensors(
op.inputs,
shape=tensor.shape,
order=tensor.order,
chunks=[chunk],
nsplits=tuple((s,) for s in tensor.shape),
)
try:
rechunk_nsplits, reshape_nsplits = cls._gen_reshape_rechunk_nsplits(
in_tensor.shape, tensor.shape, in_tensor.nsplits
)
rechunked_tensor = in_tensor.rechunk(rechunk_nsplits)._inplace_tile()
in_idxes = itertools.product(*[range(len(s)) for s in rechunk_nsplits])
out_idxes = itertools.product(*[range(len(s)) for s in reshape_nsplits])
out_shape = itertools.product(*[s for s in reshape_nsplits])
out_chunks = []
for input_idx, out_idx, out_shape in zip(in_idxes, out_idxes, out_shape):
in_chunk = rechunked_tensor.cix[input_idx]
chunk_op = op.copy().reset_key()
chunk_op._newshape = out_shape
out_chunk = chunk_op.new_chunk(
[in_chunk], shape=out_shape, order=tensor.order, index=out_idx
)
out_chunks.append(out_chunk)
new_op = op.copy()
return new_op.new_tensors(
op.inputs,
tensor.shape,
order=tensor.order,
chunks=out_chunks,
nsplits=reshape_nsplits,
)
except ValueError:
# TODO: make this as default when shuffle is mature
if getattr(op.extra_params, "_reshape_with_shuffle", False):
return cls._tile_as_shuffle(op)
# shape incompatible, we will first do flatten, then reshape to the new shape
return [
in_tensor.reshape(-1, order=tensor.op.order)
._inplace_tile()
.reshape(tensor.shape, order=tensor.op.order)
._inplace_tile()
]
|
https://github.com/mars-project/mars/issues/1843
|
Traceback (most recent call last):
File "/home/smartguo/lib/anaconda3/envs/mars/lib/python3.7/site-packages/pandas/core/dtypes/common.py", line 171, in ensure_python_int
new_value = int(value)
ValueError: cannot convert float NaN to integer
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "tensor_loop.py", line 14, in <module>
df_res = md.DataFrame(score, columns=["probability"])
File "/home/smartguo/lib/anaconda3/envs/mars/lib/python3.7/site-packages/mars/dataframe/initializer.py", line 47, in __init__
df = dataframe_from_tensor(data, index=index, columns=columns, gpu=gpu, sparse=sparse)
File "/home/smartguo/lib/anaconda3/envs/mars/lib/python3.7/site-packages/mars/dataframe/datasource/from_tensor.py", line 357, in dataframe_from_tensor
return op(tensor, index, columns)
File "/home/smartguo/lib/anaconda3/envs/mars/lib/python3.7/site-packages/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/home/smartguo/lib/anaconda3/envs/mars/lib/python3.7/site-packages/mars/dataframe/datasource/from_tensor.py", line 83, in __call__
return self._call_input_tensor(input_tensor, index, columns)
File "/home/smartguo/lib/anaconda3/envs/mars/lib/python3.7/site-packages/mars/dataframe/datasource/from_tensor.py", line 156, in _call_input_tensor
index_value = parse_index(pd.RangeIndex(start=0, stop=input_tensor.shape[0]))
File "/home/smartguo/lib/anaconda3/envs/mars/lib/python3.7/site-packages/pandas/core/indexes/range.py", line 107, in __new__
stop = ensure_python_int(stop)
File "/home/smartguo/lib/anaconda3/envs/mars/lib/python3.7/site-packages/pandas/core/dtypes/common.py", line 174, in ensure_python_int
raise TypeError(f"Wrong type {type(value)} for value {value}") from err
TypeError: Wrong type <class 'float'> for value nan
|
ValueError
|
def reshape(a, newshape, order="C"):
"""
Gives a new shape to a tensor without changing its data.
Parameters
----------
a : array_like
Tensor to be reshaped.
newshape : int or tuple of ints
The new shape should be compatible with the original shape. If
an integer, then the result will be a 1-D tensor of that length.
One shape dimension can be -1. In this case, the value is
inferred from the length of the tensor and remaining dimensions.
order : {'C', 'F', 'A'}, optional
Read the elements of `a` using this index order, and place the
elements into the reshaped array using this index order. 'C'
means to read / write the elements using C-like index order,
with the last axis index changing fastest, back to the first
axis index changing slowest. 'F' means to read / write the
elements using Fortran-like index order, with the first index
changing fastest, and the last index changing slowest. Note that
the 'C' and 'F' options take no account of the memory layout of
the underlying array, and only refer to the order of indexing.
'A' means to read / write the elements in Fortran-like index
order if `a` is Fortran *contiguous* in memory, C-like order
otherwise.
Returns
-------
reshaped_array : Tensor
This will be a new view object if possible; otherwise, it will
be a copy.
See Also
--------
Tensor.reshape : Equivalent method.
Notes
-----
It is not always possible to change the shape of a tensor without
copying the data. If you want an error to be raised when the data is copied,
you should assign the new shape to the shape attribute of the array::
>>> import mars.tensor as mt
>>> a = mt.arange(6).reshape((3, 2))
>>> a.execute()
array([[0, 1],
[2, 3],
[4, 5]])
You can think of reshaping as first raveling the tensor (using the given
index order), then inserting the elements from the raveled tensor into the
new tensor using the same kind of index ordering as was used for the
raveling.
>>> mt.reshape(a, (2, 3)).execute()
array([[0, 1, 2],
[3, 4, 5]])
>>> mt.reshape(mt.ravel(a), (2, 3)).execute()
array([[0, 1, 2],
[3, 4, 5]])
Examples
--------
>>> a = mt.array([[1,2,3], [4,5,6]])
>>> mt.reshape(a, 6).execute()
array([1, 2, 3, 4, 5, 6])
>>> mt.reshape(a, (3,-1)).execute() # the unspecified value is inferred to be 2
array([[1, 2],
[3, 4],
[5, 6]])
"""
a = astensor(a)
if np.isnan(sum(a.shape)):
# some shape is nan
new_shape = [newshape] if isinstance(newshape, int) else list(newshape)
# if -1 exists in newshape, just treat it as unknown shape
new_shape = [s if s != -1 else np.nan for s in new_shape]
newshape = tuple(new_shape)
else:
newshape = calc_shape(a.size, newshape)
if a.size != np.prod(newshape):
raise ValueError(
f"cannot reshape array of size {a.size} into shape {newshape}"
)
tensor_order = get_order(order, a.order, available_options="CFA")
if a.shape == newshape and tensor_order == a.order:
# does not need to reshape
return a
return _reshape(a, newshape, order=order, tensor_order=tensor_order)
|
def reshape(a, newshape, order="C"):
"""
Gives a new shape to a tensor without changing its data.
Parameters
----------
a : array_like
Tensor to be reshaped.
newshape : int or tuple of ints
The new shape should be compatible with the original shape. If
an integer, then the result will be a 1-D tensor of that length.
One shape dimension can be -1. In this case, the value is
inferred from the length of the tensor and remaining dimensions.
order : {'C', 'F', 'A'}, optional
Read the elements of `a` using this index order, and place the
elements into the reshaped array using this index order. 'C'
means to read / write the elements using C-like index order,
with the last axis index changing fastest, back to the first
axis index changing slowest. 'F' means to read / write the
elements using Fortran-like index order, with the first index
changing fastest, and the last index changing slowest. Note that
the 'C' and 'F' options take no account of the memory layout of
the underlying array, and only refer to the order of indexing.
'A' means to read / write the elements in Fortran-like index
order if `a` is Fortran *contiguous* in memory, C-like order
otherwise.
Returns
-------
reshaped_array : Tensor
This will be a new view object if possible; otherwise, it will
be a copy.
See Also
--------
Tensor.reshape : Equivalent method.
Notes
-----
It is not always possible to change the shape of a tensor without
copying the data. If you want an error to be raised when the data is copied,
you should assign the new shape to the shape attribute of the array::
>>> import mars.tensor as mt
>>> a = mt.arange(6).reshape((3, 2))
>>> a.execute()
array([[0, 1],
[2, 3],
[4, 5]])
You can think of reshaping as first raveling the tensor (using the given
index order), then inserting the elements from the raveled tensor into the
new tensor using the same kind of index ordering as was used for the
raveling.
>>> mt.reshape(a, (2, 3)).execute()
array([[0, 1, 2],
[3, 4, 5]])
>>> mt.reshape(mt.ravel(a), (2, 3)).execute()
array([[0, 1, 2],
[3, 4, 5]])
Examples
--------
>>> a = mt.array([[1,2,3], [4,5,6]])
>>> mt.reshape(a, 6).execute()
array([1, 2, 3, 4, 5, 6])
>>> mt.reshape(a, (3,-1)).execute() # the unspecified value is inferred to be 2
array([[1, 2],
[3, 4],
[5, 6]])
"""
a = astensor(a)
if np.isnan(sum(a.shape)):
raise ValueError(f"tensor shape is unknown, {a.shape}")
newshape = calc_shape(a.size, newshape)
if a.size != np.prod(newshape):
raise ValueError(f"cannot reshape array of size {a.size} into shape {newshape}")
tensor_order = get_order(order, a.order, available_options="CFA")
if a.shape == newshape and tensor_order == a.order:
# does not need to reshape
return a
return _reshape(a, newshape, order=order, tensor_order=tensor_order)
|
https://github.com/mars-project/mars/issues/1843
|
Traceback (most recent call last):
File "/home/smartguo/lib/anaconda3/envs/mars/lib/python3.7/site-packages/pandas/core/dtypes/common.py", line 171, in ensure_python_int
new_value = int(value)
ValueError: cannot convert float NaN to integer
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "tensor_loop.py", line 14, in <module>
df_res = md.DataFrame(score, columns=["probability"])
File "/home/smartguo/lib/anaconda3/envs/mars/lib/python3.7/site-packages/mars/dataframe/initializer.py", line 47, in __init__
df = dataframe_from_tensor(data, index=index, columns=columns, gpu=gpu, sparse=sparse)
File "/home/smartguo/lib/anaconda3/envs/mars/lib/python3.7/site-packages/mars/dataframe/datasource/from_tensor.py", line 357, in dataframe_from_tensor
return op(tensor, index, columns)
File "/home/smartguo/lib/anaconda3/envs/mars/lib/python3.7/site-packages/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/home/smartguo/lib/anaconda3/envs/mars/lib/python3.7/site-packages/mars/dataframe/datasource/from_tensor.py", line 83, in __call__
return self._call_input_tensor(input_tensor, index, columns)
File "/home/smartguo/lib/anaconda3/envs/mars/lib/python3.7/site-packages/mars/dataframe/datasource/from_tensor.py", line 156, in _call_input_tensor
index_value = parse_index(pd.RangeIndex(start=0, stop=input_tensor.shape[0]))
File "/home/smartguo/lib/anaconda3/envs/mars/lib/python3.7/site-packages/pandas/core/indexes/range.py", line 107, in __new__
stop = ensure_python_int(stop)
File "/home/smartguo/lib/anaconda3/envs/mars/lib/python3.7/site-packages/pandas/core/dtypes/common.py", line 174, in ensure_python_int
raise TypeError(f"Wrong type {type(value)} for value {value}") from err
TypeError: Wrong type <class 'float'> for value nan
|
ValueError
|
def __call__(self, a, bins, range, weights):
if range is not None:
_check_range(range)
if isinstance(bins, str):
# string, 'auto', 'stone', ...
# shape is unknown
bin_name = bins
# if `bins` is a string for an automatic method,
# this will replace it with the number of bins calculated
if bin_name not in _hist_bin_selectors:
raise ValueError(f"{bin_name!r} is not a valid estimator for `bins`")
if weights is not None:
raise TypeError(
"Automated estimation of the number of "
"bins is not supported for weighted data"
)
if isinstance(range, tuple) and len(range) == 2:
# if `bins` is a string, e.g. 'auto', 'stone'...,
# and `range` provided as well,
# `a` should be trimmed first
first_edge, last_edge = _get_outer_edges(a, range)
a = a[(a >= first_edge) & (a <= last_edge)]
shape = (np.nan,)
elif mt.ndim(bins) == 0:
try:
n_equal_bins = operator.index(bins)
except TypeError: # pragma: no cover
raise TypeError("`bins` must be an integer, a string, or an array")
if n_equal_bins < 1:
raise ValueError("`bins` must be positive, when an integer")
shape = (bins + 1,)
elif mt.ndim(bins) == 1:
if not isinstance(bins, TENSOR_TYPE):
bins = np.asarray(bins)
if not is_asc_sorted(bins):
raise ValueError("`bins` must increase monotonically, when an array")
shape = astensor(bins).shape
else:
raise ValueError("`bins` must be 1d, when an array")
inputs = [a]
if isinstance(bins, TENSOR_TYPE):
inputs.append(bins)
if weights is not None:
inputs.append(weights)
if (
(a.size > 0 or np.isnan(a.size))
and (isinstance(bins, str) or mt.ndim(bins) == 0)
and not range
):
# for bins that is str or integer,
# requires min max calculated first
input_min = self._input_min = a.min()
inputs.append(input_min)
input_max = self._input_max = a.max()
inputs.append(input_max)
return self.new_tensor(inputs, shape=shape, order=TensorOrder.C_ORDER)
|
def __call__(self, a, bins, range, weights):
if range is not None:
_check_range(range)
if isinstance(bins, str):
# string, 'auto', 'stone', ...
# shape is unknown
bin_name = bins
# if `bins` is a string for an automatic method,
# this will replace it with the number of bins calculated
if bin_name not in _hist_bin_selectors:
raise ValueError(f"{bin_name!r} is not a valid estimator for `bins`")
if weights is not None:
raise TypeError(
"Automated estimation of the number of "
"bins is not supported for weighted data"
)
if isinstance(range, tuple) and len(range) == 2:
# if `bins` is a string, e.g. 'auto', 'stone'...,
# and `range` provided as well,
# `a` should be trimmed first
first_edge, last_edge = _get_outer_edges(a, range)
a = a[(a >= first_edge) & (a <= last_edge)]
shape = (np.nan,)
elif mt.ndim(bins) == 0:
try:
n_equal_bins = operator.index(bins)
except TypeError: # pragma: no cover
raise TypeError("`bins` must be an integer, a string, or an array")
if n_equal_bins < 1:
raise ValueError("`bins` must be positive, when an integer")
shape = (bins + 1,)
elif mt.ndim(bins) == 1:
if not isinstance(bins, TENSOR_TYPE):
bins = np.asarray(bins)
if not is_asc_sorted(bins):
raise ValueError("`bins` must increase monotonically, when an array")
shape = astensor(bins).shape
else:
raise ValueError("`bins` must be 1d, when an array")
inputs = [a]
if isinstance(bins, TENSOR_TYPE):
inputs.append(bins)
if weights is not None:
inputs.append(weights)
if a.size > 0 and (isinstance(bins, str) or mt.ndim(bins) == 0) and not range:
# for bins that is str or integer,
# requires min max calculated first
input_min = self._input_min = a.min()
inputs.append(input_min)
input_max = self._input_max = a.max()
inputs.append(input_max)
return self.new_tensor(inputs, shape=shape, order=TensorOrder.C_ORDER)
|
https://github.com/mars-project/mars/issues/1843
|
Traceback (most recent call last):
File "/home/smartguo/lib/anaconda3/envs/mars/lib/python3.7/site-packages/pandas/core/dtypes/common.py", line 171, in ensure_python_int
new_value = int(value)
ValueError: cannot convert float NaN to integer
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "tensor_loop.py", line 14, in <module>
df_res = md.DataFrame(score, columns=["probability"])
File "/home/smartguo/lib/anaconda3/envs/mars/lib/python3.7/site-packages/mars/dataframe/initializer.py", line 47, in __init__
df = dataframe_from_tensor(data, index=index, columns=columns, gpu=gpu, sparse=sparse)
File "/home/smartguo/lib/anaconda3/envs/mars/lib/python3.7/site-packages/mars/dataframe/datasource/from_tensor.py", line 357, in dataframe_from_tensor
return op(tensor, index, columns)
File "/home/smartguo/lib/anaconda3/envs/mars/lib/python3.7/site-packages/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/home/smartguo/lib/anaconda3/envs/mars/lib/python3.7/site-packages/mars/dataframe/datasource/from_tensor.py", line 83, in __call__
return self._call_input_tensor(input_tensor, index, columns)
File "/home/smartguo/lib/anaconda3/envs/mars/lib/python3.7/site-packages/mars/dataframe/datasource/from_tensor.py", line 156, in _call_input_tensor
index_value = parse_index(pd.RangeIndex(start=0, stop=input_tensor.shape[0]))
File "/home/smartguo/lib/anaconda3/envs/mars/lib/python3.7/site-packages/pandas/core/indexes/range.py", line 107, in __new__
stop = ensure_python_int(stop)
File "/home/smartguo/lib/anaconda3/envs/mars/lib/python3.7/site-packages/pandas/core/dtypes/common.py", line 174, in ensure_python_int
raise TypeError(f"Wrong type {type(value)} for value {value}") from err
TypeError: Wrong type <class 'float'> for value nan
|
ValueError
|
def tile_with_columns(cls, op):
in_df = op.inputs[0]
out_df = op.outputs[0]
col_names = op.col_names
if not isinstance(col_names, list):
column_index = calc_columns_index(col_names, in_df)
out_chunks = []
dtype = in_df.dtypes[col_names]
for i in range(in_df.chunk_shape[0]):
c = in_df.cix[(i, column_index)]
chunk_op = DataFrameIndex(col_names=col_names)
out_chunks.append(
chunk_op.new_chunk(
[c],
shape=(c.shape[0],),
index=(i,),
dtype=dtype,
index_value=c.index_value,
name=col_names,
)
)
new_op = op.copy()
return new_op.new_seriess(
op.inputs,
shape=out_df.shape,
dtype=out_df.dtype,
index_value=out_df.index_value,
name=out_df.name,
nsplits=(in_df.nsplits[0],),
chunks=out_chunks,
)
else:
# combine columns into one chunk and keep the columns order at the same time.
# When chunk columns are ['c1', 'c2', 'c3'], ['c4', 'c5'],
# selected columns are ['c2', 'c3', 'c4', 'c2'], `column_splits` will be
# [(['c2', 'c3'], 0), ('c4', 1), ('c2', 0)].
selected_index = [calc_columns_index(col, in_df) for col in col_names]
condition = np.where(np.diff(selected_index))[0] + 1
column_splits = np.split(col_names, condition)
column_indexes = np.split(selected_index, condition)
out_chunks = [[] for _ in range(in_df.chunk_shape[0])]
column_nsplits = []
for i, (columns, column_idx) in enumerate(zip(column_splits, column_indexes)):
dtypes = in_df.dtypes[columns]
column_nsplits.append(len(columns))
for j in range(in_df.chunk_shape[0]):
c = in_df.cix[(j, column_idx[0])]
index_op = DataFrameIndex(
col_names=list(columns), output_types=[OutputType.dataframe]
)
out_chunk = index_op.new_chunk(
[c],
shape=(c.shape[0], len(columns)),
index=(j, i),
dtypes=dtypes,
index_value=c.index_value,
columns_value=parse_index(pd.Index(columns), store_data=True),
)
out_chunks[j].append(out_chunk)
out_chunks = [item for cl in out_chunks for item in cl]
new_op = op.copy()
nsplits = (in_df.nsplits[0], tuple(column_nsplits))
return new_op.new_dataframes(
op.inputs,
shape=out_df.shape,
dtypes=out_df.dtypes,
index_value=out_df.index_value,
columns_value=out_df.columns_value,
chunks=out_chunks,
nsplits=nsplits,
)
|
def tile_with_columns(cls, op):
in_df = op.inputs[0]
out_df = op.outputs[0]
col_names = op.col_names
if not isinstance(col_names, list):
column_index = calc_columns_index(col_names, in_df)
out_chunks = []
dtype = in_df.dtypes[col_names]
for i in range(in_df.chunk_shape[0]):
c = in_df.cix[(i, column_index)]
op = DataFrameIndex(col_names=col_names)
out_chunks.append(
op.new_chunk(
[c],
shape=(c.shape[0],),
index=(i,),
dtype=dtype,
index_value=c.index_value,
name=col_names,
)
)
new_op = op.copy()
return new_op.new_seriess(
op.inputs,
shape=out_df.shape,
dtype=out_df.dtype,
index_value=out_df.index_value,
name=out_df.name,
nsplits=(in_df.nsplits[0],),
chunks=out_chunks,
)
else:
# combine columns into one chunk and keep the columns order at the same time.
# When chunk columns are ['c1', 'c2', 'c3'], ['c4', 'c5'],
# selected columns are ['c2', 'c3', 'c4', 'c2'], `column_splits` will be
# [(['c2', 'c3'], 0), ('c4', 1), ('c2', 0)].
selected_index = [calc_columns_index(col, in_df) for col in col_names]
condition = np.where(np.diff(selected_index))[0] + 1
column_splits = np.split(col_names, condition)
column_indexes = np.split(selected_index, condition)
out_chunks = [[] for _ in range(in_df.chunk_shape[0])]
column_nsplits = []
for i, (columns, column_idx) in enumerate(zip(column_splits, column_indexes)):
dtypes = in_df.dtypes[columns]
column_nsplits.append(len(columns))
for j in range(in_df.chunk_shape[0]):
c = in_df.cix[(j, column_idx[0])]
index_op = DataFrameIndex(
col_names=list(columns), output_types=[OutputType.dataframe]
)
out_chunk = index_op.new_chunk(
[c],
shape=(c.shape[0], len(columns)),
index=(j, i),
dtypes=dtypes,
index_value=c.index_value,
columns_value=parse_index(pd.Index(columns), store_data=True),
)
out_chunks[j].append(out_chunk)
out_chunks = [item for cl in out_chunks for item in cl]
new_op = op.copy()
nsplits = (in_df.nsplits[0], tuple(column_nsplits))
return new_op.new_dataframes(
op.inputs,
shape=out_df.shape,
dtypes=out_df.dtypes,
index_value=out_df.index_value,
columns_value=out_df.columns_value,
chunks=out_chunks,
nsplits=nsplits,
)
|
https://github.com/mars-project/mars/issues/1786
|
In [1]: import mars.dataframe as md
In [2]: import pandas as pd
In [4]: md.Series(pd.Series([], dtype=object)).execute()
---------------------------------------------------------------------------
ZeroDivisionError Traceback (most recent call last)
<ipython-input-4-665db81dbad7> in <module>
----> 1 md.Series(pd.Series([], dtype=object)).execute()
~/Workspace/mars/mars/core.py in execute(self, session, **kw)
641
642 if wait:
--> 643 return run()
644 else:
645 thread_executor = ThreadPoolExecutor(1)
~/Workspace/mars/mars/core.py in run()
637
638 def run():
--> 639 self.data.execute(session, **kw)
640 return self
641
~/Workspace/mars/mars/core.py in execute(self, session, **kw)
377
378 if wait:
--> 379 return run()
380 else:
381 # leverage ThreadPoolExecutor to submit task,
~/Workspace/mars/mars/core.py in run()
372 def run():
373 # no more fetch, thus just fire run
--> 374 session.run(self, **kw)
375 # return Tileable or ExecutableTuple itself
376 return self
~/Workspace/mars/mars/session.py in run(self, *tileables, **kw)
503 tileables = tuple(mt.tensor(t) if not isinstance(t, (Entity, Base)) else t
504 for t in tileables)
--> 505 result = self._sess.run(*tileables, **kw)
506
507 for t in tileables:
~/Workspace/mars/mars/session.py in run(self, *tileables, **kw)
109 # set number of running cores
110 self.context.set_ncores(kw['n_parallel'])
--> 111 res = self._executor.execute_tileables(tileables, **kw)
112 return res
113
~/Workspace/mars/mars/utils.py in _inner(*args, **kwargs)
449 def _inner(*args, **kwargs):
450 with self:
--> 451 return func(*args, **kwargs)
452
453 return _inner
~/Workspace/mars/mars/executor.py in execute_tileables(self, tileables, fetch, n_parallel, n_thread, print_progress, mock, compose, name)
859 # build chunk graph, tile will be done during building
860 chunk_graph = chunk_graph_builder.build(
--> 861 tileables, tileable_graph=tileable_graph)
862 tileable_graph = chunk_graph_builder.prev_tileable_graph
863 temp_result_keys = set(result_keys)
~/Workspace/mars/mars/utils.py in _inner(*args, **kwargs)
449 def _inner(*args, **kwargs):
450 with self:
--> 451 return func(*args, **kwargs)
452
453 return _inner
~/Workspace/mars/mars/tiles.py in build(self, tileables, tileable_graph)
346
347 chunk_graph = super().build(
--> 348 tileables, tileable_graph=tileable_graph)
349 self._iterative_chunk_graphs.append(chunk_graph)
350 if len(self._interrupted_ops) == 0:
~/Workspace/mars/mars/utils.py in _inner(*args, **kwargs)
449 def _inner(*args, **kwargs):
450 with self:
--> 451 return func(*args, **kwargs)
452
453 return _inner
~/Workspace/mars/mars/tiles.py in build(self, tileables, tileable_graph)
260 # for further execution
261 partial_tiled_chunks = \
--> 262 self._on_tile_failure(tileable_data.op, exc_info)
263 if partial_tiled_chunks is not None and \
264 len(partial_tiled_chunks) > 0:
~/Workspace/mars/mars/tiles.py in inner(op, exc_info)
299 on_tile_failure(op, exc_info)
300 else:
--> 301 raise exc_info[1].with_traceback(exc_info[2]) from None
302 return inner
303
~/Workspace/mars/mars/tiles.py in build(self, tileables, tileable_graph)
240 continue
241 try:
--> 242 tiled = self._tile(tileable_data, tileable_graph)
243 tiled_op.add(tileable_data.op)
244 for t, td in zip(tileable_data.op.outputs, tiled):
~/Workspace/mars/mars/tiles.py in _tile(self, tileable_data, tileable_graph)
335 if any(inp.op in self._interrupted_ops for inp in tileable_data.inputs):
336 raise TilesError('Tile fail due to failure of inputs')
--> 337 return super()._tile(tileable_data, tileable_graph)
338
339 @enter_mode(build=True, kernel=True)
~/Workspace/mars/mars/tiles.py in _tile(self, tileable_data, tileable_graph)
199 t._nsplits = o.nsplits
200 elif on_tile is None:
--> 201 tds[0]._inplace_tile()
202 else:
203 tds = on_tile(tileable_data.op.outputs, tds)
~/Workspace/mars/mars/core.py in _inplace_tile(self)
166
167 def _inplace_tile(self):
--> 168 return handler.inplace_tile(self)
169
170 def __getattr__(self, attr):
~/Workspace/mars/mars/tiles.py in inplace_tile(self, to_tile)
134 if not to_tile.is_coarse():
135 return to_tile
--> 136 dispatched = self.dispatch(to_tile.op)
137 self._assign_to([d.data for d in dispatched], to_tile.op.outputs)
138 return to_tile
~/Workspace/mars/mars/utils.py in _inner(*args, **kwargs)
449 def _inner(*args, **kwargs):
450 with self:
--> 451 return func(*args, **kwargs)
452
453 return _inner
~/Workspace/mars/mars/tiles.py in dispatch(self, op)
117 else:
118 try:
--> 119 tiled = op_cls.tile(op)
120 except NotImplementedError as ex:
121 cause = ex
~/Workspace/mars/mars/dataframe/datasource/series.py in tile(cls, op)
60 memory_usage = raw_series.memory_usage(index=False, deep=True)
61 chunk_size = series.extra_params.raw_chunk_size or options.chunk_size
---> 62 chunk_size = decide_series_chunk_size(series.shape, chunk_size, memory_usage)
63 chunk_size_idxes = (range(len(size)) for size in chunk_size)
64
~/Workspace/mars/mars/dataframe/utils.py in decide_series_chunk_size(shape, chunk_size, memory_usage)
193
194 max_chunk_size = options.chunk_store_limit
--> 195 series_chunk_size = max_chunk_size / average_memory_usage
196 return normalize_chunk_sizes(shape, int(series_chunk_size))
197
ZeroDivisionError: division by zero
|
ZeroDivisionError
|
def decide_series_chunk_size(shape, chunk_size, memory_usage):
from ..config import options
chunk_size = dictify_chunk_size(shape, chunk_size)
average_memory_usage = memory_usage / shape[0] if shape[0] != 0 else memory_usage
if len(chunk_size) == len(shape):
return normalize_chunk_sizes(shape, chunk_size[0])
if all(s == 0 for s in shape):
# skip when shape is 0
return tuple((s,) for s in shape)
max_chunk_size = options.chunk_store_limit
series_chunk_size = max_chunk_size / average_memory_usage
return normalize_chunk_sizes(shape, int(series_chunk_size))
|
def decide_series_chunk_size(shape, chunk_size, memory_usage):
from ..config import options
chunk_size = dictify_chunk_size(shape, chunk_size)
average_memory_usage = memory_usage / shape[0] if shape[0] != 0 else memory_usage
if len(chunk_size) == len(shape):
return normalize_chunk_sizes(shape, chunk_size[0])
max_chunk_size = options.chunk_store_limit
series_chunk_size = max_chunk_size / average_memory_usage
return normalize_chunk_sizes(shape, int(series_chunk_size))
|
https://github.com/mars-project/mars/issues/1786
|
In [1]: import mars.dataframe as md
In [2]: import pandas as pd
In [4]: md.Series(pd.Series([], dtype=object)).execute()
---------------------------------------------------------------------------
ZeroDivisionError Traceback (most recent call last)
<ipython-input-4-665db81dbad7> in <module>
----> 1 md.Series(pd.Series([], dtype=object)).execute()
~/Workspace/mars/mars/core.py in execute(self, session, **kw)
641
642 if wait:
--> 643 return run()
644 else:
645 thread_executor = ThreadPoolExecutor(1)
~/Workspace/mars/mars/core.py in run()
637
638 def run():
--> 639 self.data.execute(session, **kw)
640 return self
641
~/Workspace/mars/mars/core.py in execute(self, session, **kw)
377
378 if wait:
--> 379 return run()
380 else:
381 # leverage ThreadPoolExecutor to submit task,
~/Workspace/mars/mars/core.py in run()
372 def run():
373 # no more fetch, thus just fire run
--> 374 session.run(self, **kw)
375 # return Tileable or ExecutableTuple itself
376 return self
~/Workspace/mars/mars/session.py in run(self, *tileables, **kw)
503 tileables = tuple(mt.tensor(t) if not isinstance(t, (Entity, Base)) else t
504 for t in tileables)
--> 505 result = self._sess.run(*tileables, **kw)
506
507 for t in tileables:
~/Workspace/mars/mars/session.py in run(self, *tileables, **kw)
109 # set number of running cores
110 self.context.set_ncores(kw['n_parallel'])
--> 111 res = self._executor.execute_tileables(tileables, **kw)
112 return res
113
~/Workspace/mars/mars/utils.py in _inner(*args, **kwargs)
449 def _inner(*args, **kwargs):
450 with self:
--> 451 return func(*args, **kwargs)
452
453 return _inner
~/Workspace/mars/mars/executor.py in execute_tileables(self, tileables, fetch, n_parallel, n_thread, print_progress, mock, compose, name)
859 # build chunk graph, tile will be done during building
860 chunk_graph = chunk_graph_builder.build(
--> 861 tileables, tileable_graph=tileable_graph)
862 tileable_graph = chunk_graph_builder.prev_tileable_graph
863 temp_result_keys = set(result_keys)
~/Workspace/mars/mars/utils.py in _inner(*args, **kwargs)
449 def _inner(*args, **kwargs):
450 with self:
--> 451 return func(*args, **kwargs)
452
453 return _inner
~/Workspace/mars/mars/tiles.py in build(self, tileables, tileable_graph)
346
347 chunk_graph = super().build(
--> 348 tileables, tileable_graph=tileable_graph)
349 self._iterative_chunk_graphs.append(chunk_graph)
350 if len(self._interrupted_ops) == 0:
~/Workspace/mars/mars/utils.py in _inner(*args, **kwargs)
449 def _inner(*args, **kwargs):
450 with self:
--> 451 return func(*args, **kwargs)
452
453 return _inner
~/Workspace/mars/mars/tiles.py in build(self, tileables, tileable_graph)
260 # for further execution
261 partial_tiled_chunks = \
--> 262 self._on_tile_failure(tileable_data.op, exc_info)
263 if partial_tiled_chunks is not None and \
264 len(partial_tiled_chunks) > 0:
~/Workspace/mars/mars/tiles.py in inner(op, exc_info)
299 on_tile_failure(op, exc_info)
300 else:
--> 301 raise exc_info[1].with_traceback(exc_info[2]) from None
302 return inner
303
~/Workspace/mars/mars/tiles.py in build(self, tileables, tileable_graph)
240 continue
241 try:
--> 242 tiled = self._tile(tileable_data, tileable_graph)
243 tiled_op.add(tileable_data.op)
244 for t, td in zip(tileable_data.op.outputs, tiled):
~/Workspace/mars/mars/tiles.py in _tile(self, tileable_data, tileable_graph)
335 if any(inp.op in self._interrupted_ops for inp in tileable_data.inputs):
336 raise TilesError('Tile fail due to failure of inputs')
--> 337 return super()._tile(tileable_data, tileable_graph)
338
339 @enter_mode(build=True, kernel=True)
~/Workspace/mars/mars/tiles.py in _tile(self, tileable_data, tileable_graph)
199 t._nsplits = o.nsplits
200 elif on_tile is None:
--> 201 tds[0]._inplace_tile()
202 else:
203 tds = on_tile(tileable_data.op.outputs, tds)
~/Workspace/mars/mars/core.py in _inplace_tile(self)
166
167 def _inplace_tile(self):
--> 168 return handler.inplace_tile(self)
169
170 def __getattr__(self, attr):
~/Workspace/mars/mars/tiles.py in inplace_tile(self, to_tile)
134 if not to_tile.is_coarse():
135 return to_tile
--> 136 dispatched = self.dispatch(to_tile.op)
137 self._assign_to([d.data for d in dispatched], to_tile.op.outputs)
138 return to_tile
~/Workspace/mars/mars/utils.py in _inner(*args, **kwargs)
449 def _inner(*args, **kwargs):
450 with self:
--> 451 return func(*args, **kwargs)
452
453 return _inner
~/Workspace/mars/mars/tiles.py in dispatch(self, op)
117 else:
118 try:
--> 119 tiled = op_cls.tile(op)
120 except NotImplementedError as ex:
121 cause = ex
~/Workspace/mars/mars/dataframe/datasource/series.py in tile(cls, op)
60 memory_usage = raw_series.memory_usage(index=False, deep=True)
61 chunk_size = series.extra_params.raw_chunk_size or options.chunk_size
---> 62 chunk_size = decide_series_chunk_size(series.shape, chunk_size, memory_usage)
63 chunk_size_idxes = (range(len(size)) for size in chunk_size)
64
~/Workspace/mars/mars/dataframe/utils.py in decide_series_chunk_size(shape, chunk_size, memory_usage)
193
194 max_chunk_size = options.chunk_store_limit
--> 195 series_chunk_size = max_chunk_size / average_memory_usage
196 return normalize_chunk_sizes(shape, int(series_chunk_size))
197
ZeroDivisionError: division by zero
|
ZeroDivisionError
|
def tile(cls, op):
if op.compression:
return cls._tile_compressed(op)
df = op.outputs[0]
chunk_bytes = df.extra_params.chunk_bytes
chunk_bytes = int(parse_readable_size(chunk_bytes)[0])
dtypes = df.dtypes
if (
op.use_arrow_dtype is None and not op.gpu and options.dataframe.use_arrow_dtype
): # pragma: no cover
# check if use_arrow_dtype set on the server side
dtypes = to_arrow_dtypes(df.dtypes)
path_prefix = ""
if isinstance(op.path, (tuple, list)):
paths = op.path
elif get_fs(op.path, op.storage_options).isdir(op.path):
parsed_path = urlparse(op.path)
if parsed_path.scheme.lower() == "hdfs":
path_prefix = f"{parsed_path.scheme}://{parsed_path.netloc}"
paths = get_fs(op.path, op.storage_options).ls(op.path)
else:
paths = glob(op.path.rstrip("/") + "/*", storage_options=op.storage_options)
else:
paths = glob(op.path, storage_options=op.storage_options)
out_chunks = []
index_num = 0
for path in paths:
path = path_prefix + path
total_bytes = file_size(path)
offset = 0
for _ in range(int(np.ceil(total_bytes * 1.0 / chunk_bytes))):
chunk_op = op.copy().reset_key()
chunk_op._path = path
chunk_op._offset = offset
chunk_op._size = min(chunk_bytes, total_bytes - offset)
shape = (np.nan, len(dtypes))
index_value = parse_index(df.index_value.to_pandas(), path, index_num)
new_chunk = chunk_op.new_chunk(
None,
shape=shape,
index=(index_num, 0),
index_value=index_value,
columns_value=df.columns_value,
dtypes=dtypes,
)
out_chunks.append(new_chunk)
index_num += 1
offset += chunk_bytes
if (
op.incremental_index
and len(out_chunks) > 1
and isinstance(df.index_value._index_value, IndexValue.RangeIndex)
):
out_chunks = standardize_range_index(out_chunks)
new_op = op.copy()
nsplits = ((np.nan,) * len(out_chunks), (df.shape[1],))
return new_op.new_dataframes(
None,
df.shape,
dtypes=dtypes,
index_value=df.index_value,
columns_value=df.columns_value,
chunks=out_chunks,
nsplits=nsplits,
)
|
def tile(cls, op):
if op.compression:
return cls._tile_compressed(op)
df = op.outputs[0]
chunk_bytes = df.extra_params.chunk_bytes
chunk_bytes = int(parse_readable_size(chunk_bytes)[0])
dtypes = df.dtypes
if (
op.use_arrow_dtype is None and not op.gpu and options.dataframe.use_arrow_dtype
): # pragma: no cover
# check if use_arrow_dtype set on the server side
dtypes = to_arrow_dtypes(df.dtypes)
paths = (
op.path
if isinstance(op.path, (tuple, list))
else glob(op.path, storage_options=op.storage_options)
)
out_chunks = []
index_num = 0
for path in paths:
total_bytes = file_size(path)
offset = 0
for _ in range(int(np.ceil(total_bytes * 1.0 / chunk_bytes))):
chunk_op = op.copy().reset_key()
chunk_op._path = path
chunk_op._offset = offset
chunk_op._size = min(chunk_bytes, total_bytes - offset)
shape = (np.nan, len(dtypes))
index_value = parse_index(df.index_value.to_pandas(), path, index_num)
new_chunk = chunk_op.new_chunk(
None,
shape=shape,
index=(index_num, 0),
index_value=index_value,
columns_value=df.columns_value,
dtypes=dtypes,
)
out_chunks.append(new_chunk)
index_num += 1
offset += chunk_bytes
if (
op.incremental_index
and len(out_chunks) > 1
and isinstance(df.index_value._index_value, IndexValue.RangeIndex)
):
out_chunks = standardize_range_index(out_chunks)
new_op = op.copy()
nsplits = ((np.nan,) * len(out_chunks), (df.shape[1],))
return new_op.new_dataframes(
None,
df.shape,
dtypes=dtypes,
index_value=df.index_value,
columns_value=df.columns_value,
chunks=out_chunks,
nsplits=nsplits,
)
|
https://github.com/mars-project/mars/issues/1780
|
20/12/13 13:13:40 WARN util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
hdfsOpenFile(hdfs://<hdfs_ip>:8020/user/test/parquet_test): FileSystem#open((Lorg/apache/hadoop/fs/Path;I)Lorg/apache/hadoop/fs/FSDataInputStream;) error:
RemoteException: Path is not a file: /user/test/parquet_test
at org.apache.hadoop.hdfs.server.namenode.INodeFile.valueOf(INodeFile.java:70)
at org.apache.hadoop.hdfs.server.namenode.INodeFile.valueOf(INodeFile.java:56)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocationsInt(FSNamesystem.java:2092)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:2062)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1975)
at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getBlockLocations(NameNodeRpcServer.java:575)
at org.apache.hadoop.hdfs.server.namenode.AuthorizationProviderProxyClientProtocol.getBlockLocations(AuthorizationProviderProxyClientProtocol.java:92)
at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getBlockLocations(ClientNamenodeProtocolServerSideTranslatorPB.java:376)
at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:617)
at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1073)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2226)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2222)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1917)
at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2220)
java.io.FileNotFoundException: Path is not a file: /user/test/parquet_test
at org.apache.hadoop.hdfs.server.namenode.INodeFile.valueOf(INodeFile.java:70)
at org.apache.hadoop.hdfs.server.namenode.INodeFile.valueOf(INodeFile.java:56)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocationsInt(FSNamesystem.java:2092)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:2062)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1975)
at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getBlockLocations(NameNodeRpcServer.java:575)
at org.apache.hadoop.hdfs.server.namenode.AuthorizationProviderProxyClientProtocol.getBlockLocations(AuthorizationProviderProxyClientProtocol.java:92)
at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getBlockLocations(ClientNamenodeProtocolServerSideTranslatorPB.java:376)
at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:617)
at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1073)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2226)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2222)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1917)
at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2220)
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
at org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:106)
at org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:73)
at org.apache.hadoop.hdfs.DFSClient.callGetBlockLocations(DFSClient.java:1289)
at org.apache.hadoop.hdfs.DFSClient.getLocatedBlocks(DFSClient.java:1274)
at org.apache.hadoop.hdfs.DFSClient.getLocatedBlocks(DFSClient.java:1262)
at org.apache.hadoop.hdfs.DFSInputStream.fetchLocatedBlocksAndGetLastBlockLength(DFSInputStream.java:307)
at org.apache.hadoop.hdfs.DFSInputStream.openInfo(DFSInputStream.java:273)
at org.apache.hadoop.hdfs.DFSInputStream.<init>(DFSInputStream.java:265)
at org.apache.hadoop.hdfs.DFSClient.open(DFSClient.java:1593)
at org.apache.hadoop.hdfs.DistributedFileSystem$4.doCall(DistributedFileSystem.java:338)
at org.apache.hadoop.hdfs.DistributedFileSystem$4.doCall(DistributedFileSystem.java:334)
at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81)
at org.apache.hadoop.hdfs.DistributedFileSystem.open(DistributedFileSystem.java:334)
Caused by: org.apache.hadoop.ipc.RemoteException(java.io.FileNotFoundException): Path is not a file: /user/test/parquet_test
at org.apache.hadoop.hdfs.server.namenode.INodeFile.valueOf(INodeFile.java:70)
at org.apache.hadoop.hdfs.server.namenode.INodeFile.valueOf(INodeFile.java:56)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocationsInt(FSNamesystem.java:2092)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:2062)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1975)
at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getBlockLocations(NameNodeRpcServer.java:575)
at org.apache.hadoop.hdfs.server.namenode.AuthorizationProviderProxyClientProtocol.getBlockLocations(AuthorizationProviderProxyClientProtocol.java:92)
at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getBlockLocations(ClientNamenodeProtocolServerSideTranslatorPB.java:376)
at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:617)
at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1073)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2226)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2222)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1917)
at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2220)
at org.apache.hadoop.ipc.Client.call(Client.java:1504)
at org.apache.hadoop.ipc.Client.call(Client.java:1441)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:230)
at com.sun.proxy.$Proxy10.getBlockLocations(Unknown Source)
at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getBlockLocations(ClientNamenodeProtocolTranslatorPB.java:266)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:260)
at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:104)
at com.sun.proxy.$Proxy11.getBlockLocations(Unknown Source)
at org.apache.hadoop.hdfs.DFSClient.callGetBlockLocations(DFSClient.java:1287)
... 10 more
Traceback (most recent call last):
File "read_hdfs_dir.py", line 12, in <module>
df = md.read_parquet('hdfs://<hdfs_ip>:8020/user/test/parquet_test')
File "/home/test/lib/anaconda3/lib/python3.7/site-packages/mars/dataframe/datasource/read_parquet.py", line 394, in read_parquet
with open_file(file_path, storage_options=storage_options) as f:
File "/home/test/lib/anaconda3/lib/python3.7/site-packages/mars/filesystem.py", line 383, in open_file
f = fs.open(path, mode=mode)
File "pyarrow/io-hdfs.pxi", line 409, in pyarrow.lib.HadoopFileSystem.open
File "pyarrow/error.pxi", line 100, in pyarrow.lib.check_status
OSError: HDFS path exists, but opening file failed: hdfs://<hdfs_ip>:8020/user/test/parquet_test
|
OSError
|
def read_csv(
path,
names=None,
sep=",",
index_col=None,
compression=None,
header="infer",
dtype=None,
usecols=None,
nrows=None,
chunk_bytes="64M",
gpu=None,
head_bytes="100k",
head_lines=None,
incremental_index=False,
use_arrow_dtype=None,
storage_options=None,
**kwargs,
):
r"""
Read a comma-separated values (csv) file into DataFrame.
Also supports optionally iterating or breaking of the file
into chunks.
Parameters
----------
path : str
Any valid string path is acceptable. The string could be a URL. Valid
URL schemes include http, ftp, s3, and file. For file URLs, a host is
expected. A local file could be: file://localhost/path/to/table.csv,
you can alos read from external resources using a URL like:
hdfs://localhost:8020/test.csv.
If you want to pass in a path object, pandas accepts any ``os.PathLike``.
By file-like object, we refer to objects with a ``read()`` method, such as
a file handler (e.g. via builtin ``open`` function) or ``StringIO``.
sep : str, default ','
Delimiter to use. If sep is None, the C engine cannot automatically detect
the separator, but the Python parsing engine can, meaning the latter will
be used and automatically detect the separator by Python's builtin sniffer
tool, ``csv.Sniffer``. In addition, separators longer than 1 character and
different from ``'\s+'`` will be interpreted as regular expressions and
will also force the use of the Python parsing engine. Note that regex
delimiters are prone to ignoring quoted data. Regex example: ``'\r\t'``.
delimiter : str, default ``None``
Alias for sep.
header : int, list of int, default 'infer'
Row number(s) to use as the column names, and the start of the
data. Default behavior is to infer the column names: if no names
are passed the behavior is identical to ``header=0`` and column
names are inferred from the first line of the file, if column
names are passed explicitly then the behavior is identical to
``header=None``. Explicitly pass ``header=0`` to be able to
replace existing names. The header can be a list of integers that
specify row locations for a multi-index on the columns
e.g. [0,1,3]. Intervening rows that are not specified will be
skipped (e.g. 2 in this example is skipped). Note that this
parameter ignores commented lines and empty lines if
``skip_blank_lines=True``, so ``header=0`` denotes the first line of
data rather than the first line of the file.
names : array-like, optional
List of column names to use. If the file contains a header row,
then you should explicitly pass ``header=0`` to override the column names.
Duplicates in this list are not allowed.
index_col : int, str, sequence of int / str, or False, default ``None``
Column(s) to use as the row labels of the ``DataFrame``, either given as
string name or column index. If a sequence of int / str is given, a
MultiIndex is used.
Note: ``index_col=False`` can be used to force pandas to *not* use the first
column as the index, e.g. when you have a malformed file with delimiters at
the end of each line.
usecols : list-like or callable, optional
Return a subset of the columns. If list-like, all elements must either
be positional (i.e. integer indices into the document columns) or strings
that correspond to column names provided either by the user in `names` or
inferred from the document header row(s). For example, a valid list-like
`usecols` parameter would be ``[0, 1, 2]`` or ``['foo', 'bar', 'baz']``.
Element order is ignored, so ``usecols=[0, 1]`` is the same as ``[1, 0]``.
To instantiate a DataFrame from ``data`` with element order preserved use
``pd.read_csv(data, usecols=['foo', 'bar'])[['foo', 'bar']]`` for columns
in ``['foo', 'bar']`` order or
``pd.read_csv(data, usecols=['foo', 'bar'])[['bar', 'foo']]``
for ``['bar', 'foo']`` order.
If callable, the callable function will be evaluated against the column
names, returning names where the callable function evaluates to True. An
example of a valid callable argument would be ``lambda x: x.upper() in
['AAA', 'BBB', 'DDD']``. Using this parameter results in much faster
parsing time and lower memory usage.
squeeze : bool, default False
If the parsed data only contains one column then return a Series.
prefix : str, optional
Prefix to add to column numbers when no header, e.g. 'X' for X0, X1, ...
mangle_dupe_cols : bool, default True
Duplicate columns will be specified as 'X', 'X.1', ...'X.N', rather than
'X'...'X'. Passing in False will cause data to be overwritten if there
are duplicate names in the columns.
dtype : Type name or dict of column -> type, optional
Data type for data or columns. E.g. {'a': np.float64, 'b': np.int32,
'c': 'Int64'}
Use `str` or `object` together with suitable `na_values` settings
to preserve and not interpret dtype.
If converters are specified, they will be applied INSTEAD
of dtype conversion.
engine : {'c', 'python'}, optional
Parser engine to use. The C engine is faster while the python engine is
currently more feature-complete.
converters : dict, optional
Dict of functions for converting values in certain columns. Keys can either
be integers or column labels.
true_values : list, optional
Values to consider as True.
false_values : list, optional
Values to consider as False.
skipinitialspace : bool, default False
Skip spaces after delimiter.
skiprows : list-like, int or callable, optional
Line numbers to skip (0-indexed) or number of lines to skip (int)
at the start of the file.
If callable, the callable function will be evaluated against the row
indices, returning True if the row should be skipped and False otherwise.
An example of a valid callable argument would be ``lambda x: x in [0, 2]``.
skipfooter : int, default 0
Number of lines at bottom of file to skip (Unsupported with engine='c').
nrows : int, optional
Number of rows of file to read. Useful for reading pieces of large files.
na_values : scalar, str, list-like, or dict, optional
Additional strings to recognize as NA/NaN. If dict passed, specific
per-column NA values. By default the following values are interpreted as
NaN: '', '#N/A', '#N/A N/A', '#NA', '-1.#IND', '-1.#QNAN', '-NaN', '-nan',
'1.#IND', '1.#QNAN', '<NA>', 'N/A', 'NA', 'NULL', 'NaN', 'n/a',
'nan', 'null'.
keep_default_na : bool, default True
Whether or not to include the default NaN values when parsing the data.
Depending on whether `na_values` is passed in, the behavior is as follows:
* If `keep_default_na` is True, and `na_values` are specified, `na_values`
is appended to the default NaN values used for parsing.
* If `keep_default_na` is True, and `na_values` are not specified, only
the default NaN values are used for parsing.
* If `keep_default_na` is False, and `na_values` are specified, only
the NaN values specified `na_values` are used for parsing.
* If `keep_default_na` is False, and `na_values` are not specified, no
strings will be parsed as NaN.
Note that if `na_filter` is passed in as False, the `keep_default_na` and
`na_values` parameters will be ignored.
na_filter : bool, default True
Detect missing value markers (empty strings and the value of na_values). In
data without any NAs, passing na_filter=False can improve the performance
of reading a large file.
verbose : bool, default False
Indicate number of NA values placed in non-numeric columns.
skip_blank_lines : bool, default True
If True, skip over blank lines rather than interpreting as NaN values.
parse_dates : bool or list of int or names or list of lists or dict, default False
The behavior is as follows:
* boolean. If True -> try parsing the index.
* list of int or names. e.g. If [1, 2, 3] -> try parsing columns 1, 2, 3
each as a separate date column.
* list of lists. e.g. If [[1, 3]] -> combine columns 1 and 3 and parse as
a single date column.
* dict, e.g. {'foo' : [1, 3]} -> parse columns 1, 3 as date and call
result 'foo'
If a column or index cannot be represented as an array of datetimes,
say because of an unparseable value or a mixture of timezones, the column
or index will be returned unaltered as an object data type. For
non-standard datetime parsing, use ``pd.to_datetime`` after
``pd.read_csv``. To parse an index or column with a mixture of timezones,
specify ``date_parser`` to be a partially-applied
:func:`pandas.to_datetime` with ``utc=True``. See
:ref:`io.csv.mixed_timezones` for more.
Note: A fast-path exists for iso8601-formatted dates.
infer_datetime_format : bool, default False
If True and `parse_dates` is enabled, pandas will attempt to infer the
format of the datetime strings in the columns, and if it can be inferred,
switch to a faster method of parsing them. In some cases this can increase
the parsing speed by 5-10x.
keep_date_col : bool, default False
If True and `parse_dates` specifies combining multiple columns then
keep the original columns.
date_parser : function, optional
Function to use for converting a sequence of string columns to an array of
datetime instances. The default uses ``dateutil.parser.parser`` to do the
conversion. Pandas will try to call `date_parser` in three different ways,
advancing to the next if an exception occurs: 1) Pass one or more arrays
(as defined by `parse_dates`) as arguments; 2) concatenate (row-wise) the
string values from the columns defined by `parse_dates` into a single array
and pass that; and 3) call `date_parser` once for each row using one or
more strings (corresponding to the columns defined by `parse_dates`) as
arguments.
dayfirst : bool, default False
DD/MM format dates, international and European format.
cache_dates : bool, default True
If True, use a cache of unique, converted dates to apply the datetime
conversion. May produce significant speed-up when parsing duplicate
date strings, especially ones with timezone offsets.
.. versionadded:: 0.25.0
iterator : bool, default False
Return TextFileReader object for iteration or getting chunks with
``get_chunk()``.
chunksize : int, optional
Return TextFileReader object for iteration.
See the `IO Tools docs
<https://pandas.pydata.org/pandas-docs/stable/io.html#io-chunking>`_
for more information on ``iterator`` and ``chunksize``.
compression : {'infer', 'gzip', 'bz2', 'zip', 'xz', None}, default 'infer'
For on-the-fly decompression of on-disk data. If 'infer' and
`filepath_or_buffer` is path-like, then detect compression from the
following extensions: '.gz', '.bz2', '.zip', or '.xz' (otherwise no
decompression). If using 'zip', the ZIP file must contain only one data
file to be read in. Set to None for no decompression.
thousands : str, optional
Thousands separator.
decimal : str, default '.'
Character to recognize as decimal point (e.g. use ',' for European data).
lineterminator : str (length 1), optional
Character to break file into lines. Only valid with C parser.
quotechar : str (length 1), optional
The character used to denote the start and end of a quoted item. Quoted
items can include the delimiter and it will be ignored.
quoting : int or csv.QUOTE_* instance, default 0
Control field quoting behavior per ``csv.QUOTE_*`` constants. Use one of
QUOTE_MINIMAL (0), QUOTE_ALL (1), QUOTE_NONNUMERIC (2) or QUOTE_NONE (3).
doublequote : bool, default ``True``
When quotechar is specified and quoting is not ``QUOTE_NONE``, indicate
whether or not to interpret two consecutive quotechar elements INSIDE a
field as a single ``quotechar`` element.
escapechar : str (length 1), optional
One-character string used to escape other characters.
comment : str, optional
Indicates remainder of line should not be parsed. If found at the beginning
of a line, the line will be ignored altogether. This parameter must be a
single character. Like empty lines (as long as ``skip_blank_lines=True``),
fully commented lines are ignored by the parameter `header` but not by
`skiprows`. For example, if ``comment='#'``, parsing
``#empty\na,b,c\n1,2,3`` with ``header=0`` will result in 'a,b,c' being
treated as the header.
encoding : str, optional
Encoding to use for UTF when reading/writing (ex. 'utf-8'). `List of Python
standard encodings
<https://docs.python.org/3/library/codecs.html#standard-encodings>`_ .
dialect : str or csv.Dialect, optional
If provided, this parameter will override values (default or not) for the
following parameters: `delimiter`, `doublequote`, `escapechar`,
`skipinitialspace`, `quotechar`, and `quoting`. If it is necessary to
override values, a ParserWarning will be issued. See csv.Dialect
documentation for more details.
error_bad_lines : bool, default True
Lines with too many fields (e.g. a csv line with too many commas) will by
default cause an exception to be raised, and no DataFrame will be returned.
If False, then these "bad lines" will dropped from the DataFrame that is
returned.
warn_bad_lines : bool, default True
If error_bad_lines is False, and warn_bad_lines is True, a warning for each
"bad line" will be output.
delim_whitespace : bool, default False
Specifies whether or not whitespace (e.g. ``' '`` or ``' '``) will be
used as the sep. Equivalent to setting ``sep='\s+'``. If this option
is set to True, nothing should be passed in for the ``delimiter``
parameter.
low_memory : bool, default True
Internally process the file in chunks, resulting in lower memory use
while parsing, but possibly mixed type inference. To ensure no mixed
types either set False, or specify the type with the `dtype` parameter.
Note that the entire file is read into a single DataFrame regardless,
use the `chunksize` or `iterator` parameter to return the data in chunks.
(Only valid with C parser).
float_precision : str, optional
Specifies which converter the C engine should use for floating-point
values. The options are `None` for the ordinary converter,
`high` for the high-precision converter, and `round_trip` for the
round-trip converter.
chunk_bytes: int, float or str, optional
Number of chunk bytes.
gpu: bool, default False
If read into cudf DataFrame.
head_bytes: int, float or str, optional
Number of bytes to use in the head of file, mainly for data inference.
head_lines: int, optional
Number of lines to use in the head of file, mainly for data inference.
incremental_index: bool, default False
Create a new RangeIndex if csv doesn't contain index columns.
use_arrow_dtype: bool, default None
If True, use arrow dtype to store columns.
storage_options: dict, optional
Options for storage connection.
Returns
-------
DataFrame
A comma-separated values (csv) file is returned as two-dimensional
data structure with labeled axes.
See Also
--------
to_csv : Write DataFrame to a comma-separated values (csv) file.
Examples
--------
>>> import mars.dataframe as md
>>> md.read_csv('data.csv') # doctest: +SKIP
>>> # read from HDFS
>>> md.read_csv('hdfs://localhost:8020/test.csv') # doctest: +SKIP
"""
# infer dtypes and columns
if isinstance(path, (list, tuple)):
file_path = path[0]
elif get_fs(path, storage_options).isdir(path):
parsed_path = urlparse(path)
if parsed_path.scheme.lower() == "hdfs":
path_prefix = f"{parsed_path.scheme}://{parsed_path.netloc}"
file_path = path_prefix + get_fs(path, storage_options).ls(path)[0]
else:
file_path = glob(path.rstrip("/") + "/*", storage_options)[0]
else:
file_path = glob(path, storage_options)[0]
with open_file(
file_path, compression=compression, storage_options=storage_options
) as f:
if head_lines is not None:
b = b"".join([f.readline() for _ in range(head_lines)])
else:
head_bytes = int(parse_readable_size(head_bytes)[0])
head_start, head_end = _find_chunk_start_end(f, 0, head_bytes)
f.seek(head_start)
b = f.read(head_end - head_start)
mini_df = pd.read_csv(
BytesIO(b),
sep=sep,
index_col=index_col,
dtype=dtype,
names=names,
header=header,
)
if names is None:
names = list(mini_df.columns)
else:
# if names specified, header should be None
header = None
if usecols:
usecols = usecols if isinstance(usecols, list) else [usecols]
col_index = sorted(mini_df.columns.get_indexer(usecols))
mini_df = mini_df.iloc[:, col_index]
if isinstance(mini_df.index, pd.RangeIndex):
index_value = parse_index(pd.RangeIndex(-1))
else:
index_value = parse_index(mini_df.index)
columns_value = parse_index(mini_df.columns, store_data=True)
if index_col and not isinstance(index_col, int):
index_col = list(mini_df.columns).index(index_col)
op = DataFrameReadCSV(
path=path,
names=names,
sep=sep,
header=header,
index_col=index_col,
usecols=usecols,
compression=compression,
gpu=gpu,
incremental_index=incremental_index,
use_arrow_dtype=use_arrow_dtype,
storage_options=storage_options,
**kwargs,
)
chunk_bytes = chunk_bytes or options.chunk_store_limit
dtypes = mini_df.dtypes
if use_arrow_dtype is None:
use_arrow_dtype = options.dataframe.use_arrow_dtype
if not gpu and use_arrow_dtype:
dtypes = to_arrow_dtypes(dtypes, test_df=mini_df)
ret = op(
index_value=index_value,
columns_value=columns_value,
dtypes=dtypes,
chunk_bytes=chunk_bytes,
)
if nrows is not None:
return ret.head(nrows)
return ret
|
def read_csv(
path,
names=None,
sep=",",
index_col=None,
compression=None,
header="infer",
dtype=None,
usecols=None,
nrows=None,
chunk_bytes="64M",
gpu=None,
head_bytes="100k",
head_lines=None,
incremental_index=False,
use_arrow_dtype=None,
storage_options=None,
**kwargs,
):
r"""
Read a comma-separated values (csv) file into DataFrame.
Also supports optionally iterating or breaking of the file
into chunks.
Parameters
----------
path : str
Any valid string path is acceptable. The string could be a URL. Valid
URL schemes include http, ftp, s3, and file. For file URLs, a host is
expected. A local file could be: file://localhost/path/to/table.csv,
you can alos read from external resources using a URL like:
hdfs://localhost:8020/test.csv.
If you want to pass in a path object, pandas accepts any ``os.PathLike``.
By file-like object, we refer to objects with a ``read()`` method, such as
a file handler (e.g. via builtin ``open`` function) or ``StringIO``.
sep : str, default ','
Delimiter to use. If sep is None, the C engine cannot automatically detect
the separator, but the Python parsing engine can, meaning the latter will
be used and automatically detect the separator by Python's builtin sniffer
tool, ``csv.Sniffer``. In addition, separators longer than 1 character and
different from ``'\s+'`` will be interpreted as regular expressions and
will also force the use of the Python parsing engine. Note that regex
delimiters are prone to ignoring quoted data. Regex example: ``'\r\t'``.
delimiter : str, default ``None``
Alias for sep.
header : int, list of int, default 'infer'
Row number(s) to use as the column names, and the start of the
data. Default behavior is to infer the column names: if no names
are passed the behavior is identical to ``header=0`` and column
names are inferred from the first line of the file, if column
names are passed explicitly then the behavior is identical to
``header=None``. Explicitly pass ``header=0`` to be able to
replace existing names. The header can be a list of integers that
specify row locations for a multi-index on the columns
e.g. [0,1,3]. Intervening rows that are not specified will be
skipped (e.g. 2 in this example is skipped). Note that this
parameter ignores commented lines and empty lines if
``skip_blank_lines=True``, so ``header=0`` denotes the first line of
data rather than the first line of the file.
names : array-like, optional
List of column names to use. If the file contains a header row,
then you should explicitly pass ``header=0`` to override the column names.
Duplicates in this list are not allowed.
index_col : int, str, sequence of int / str, or False, default ``None``
Column(s) to use as the row labels of the ``DataFrame``, either given as
string name or column index. If a sequence of int / str is given, a
MultiIndex is used.
Note: ``index_col=False`` can be used to force pandas to *not* use the first
column as the index, e.g. when you have a malformed file with delimiters at
the end of each line.
usecols : list-like or callable, optional
Return a subset of the columns. If list-like, all elements must either
be positional (i.e. integer indices into the document columns) or strings
that correspond to column names provided either by the user in `names` or
inferred from the document header row(s). For example, a valid list-like
`usecols` parameter would be ``[0, 1, 2]`` or ``['foo', 'bar', 'baz']``.
Element order is ignored, so ``usecols=[0, 1]`` is the same as ``[1, 0]``.
To instantiate a DataFrame from ``data`` with element order preserved use
``pd.read_csv(data, usecols=['foo', 'bar'])[['foo', 'bar']]`` for columns
in ``['foo', 'bar']`` order or
``pd.read_csv(data, usecols=['foo', 'bar'])[['bar', 'foo']]``
for ``['bar', 'foo']`` order.
If callable, the callable function will be evaluated against the column
names, returning names where the callable function evaluates to True. An
example of a valid callable argument would be ``lambda x: x.upper() in
['AAA', 'BBB', 'DDD']``. Using this parameter results in much faster
parsing time and lower memory usage.
squeeze : bool, default False
If the parsed data only contains one column then return a Series.
prefix : str, optional
Prefix to add to column numbers when no header, e.g. 'X' for X0, X1, ...
mangle_dupe_cols : bool, default True
Duplicate columns will be specified as 'X', 'X.1', ...'X.N', rather than
'X'...'X'. Passing in False will cause data to be overwritten if there
are duplicate names in the columns.
dtype : Type name or dict of column -> type, optional
Data type for data or columns. E.g. {'a': np.float64, 'b': np.int32,
'c': 'Int64'}
Use `str` or `object` together with suitable `na_values` settings
to preserve and not interpret dtype.
If converters are specified, they will be applied INSTEAD
of dtype conversion.
engine : {'c', 'python'}, optional
Parser engine to use. The C engine is faster while the python engine is
currently more feature-complete.
converters : dict, optional
Dict of functions for converting values in certain columns. Keys can either
be integers or column labels.
true_values : list, optional
Values to consider as True.
false_values : list, optional
Values to consider as False.
skipinitialspace : bool, default False
Skip spaces after delimiter.
skiprows : list-like, int or callable, optional
Line numbers to skip (0-indexed) or number of lines to skip (int)
at the start of the file.
If callable, the callable function will be evaluated against the row
indices, returning True if the row should be skipped and False otherwise.
An example of a valid callable argument would be ``lambda x: x in [0, 2]``.
skipfooter : int, default 0
Number of lines at bottom of file to skip (Unsupported with engine='c').
nrows : int, optional
Number of rows of file to read. Useful for reading pieces of large files.
na_values : scalar, str, list-like, or dict, optional
Additional strings to recognize as NA/NaN. If dict passed, specific
per-column NA values. By default the following values are interpreted as
NaN: '', '#N/A', '#N/A N/A', '#NA', '-1.#IND', '-1.#QNAN', '-NaN', '-nan',
'1.#IND', '1.#QNAN', '<NA>', 'N/A', 'NA', 'NULL', 'NaN', 'n/a',
'nan', 'null'.
keep_default_na : bool, default True
Whether or not to include the default NaN values when parsing the data.
Depending on whether `na_values` is passed in, the behavior is as follows:
* If `keep_default_na` is True, and `na_values` are specified, `na_values`
is appended to the default NaN values used for parsing.
* If `keep_default_na` is True, and `na_values` are not specified, only
the default NaN values are used for parsing.
* If `keep_default_na` is False, and `na_values` are specified, only
the NaN values specified `na_values` are used for parsing.
* If `keep_default_na` is False, and `na_values` are not specified, no
strings will be parsed as NaN.
Note that if `na_filter` is passed in as False, the `keep_default_na` and
`na_values` parameters will be ignored.
na_filter : bool, default True
Detect missing value markers (empty strings and the value of na_values). In
data without any NAs, passing na_filter=False can improve the performance
of reading a large file.
verbose : bool, default False
Indicate number of NA values placed in non-numeric columns.
skip_blank_lines : bool, default True
If True, skip over blank lines rather than interpreting as NaN values.
parse_dates : bool or list of int or names or list of lists or dict, default False
The behavior is as follows:
* boolean. If True -> try parsing the index.
* list of int or names. e.g. If [1, 2, 3] -> try parsing columns 1, 2, 3
each as a separate date column.
* list of lists. e.g. If [[1, 3]] -> combine columns 1 and 3 and parse as
a single date column.
* dict, e.g. {'foo' : [1, 3]} -> parse columns 1, 3 as date and call
result 'foo'
If a column or index cannot be represented as an array of datetimes,
say because of an unparseable value or a mixture of timezones, the column
or index will be returned unaltered as an object data type. For
non-standard datetime parsing, use ``pd.to_datetime`` after
``pd.read_csv``. To parse an index or column with a mixture of timezones,
specify ``date_parser`` to be a partially-applied
:func:`pandas.to_datetime` with ``utc=True``. See
:ref:`io.csv.mixed_timezones` for more.
Note: A fast-path exists for iso8601-formatted dates.
infer_datetime_format : bool, default False
If True and `parse_dates` is enabled, pandas will attempt to infer the
format of the datetime strings in the columns, and if it can be inferred,
switch to a faster method of parsing them. In some cases this can increase
the parsing speed by 5-10x.
keep_date_col : bool, default False
If True and `parse_dates` specifies combining multiple columns then
keep the original columns.
date_parser : function, optional
Function to use for converting a sequence of string columns to an array of
datetime instances. The default uses ``dateutil.parser.parser`` to do the
conversion. Pandas will try to call `date_parser` in three different ways,
advancing to the next if an exception occurs: 1) Pass one or more arrays
(as defined by `parse_dates`) as arguments; 2) concatenate (row-wise) the
string values from the columns defined by `parse_dates` into a single array
and pass that; and 3) call `date_parser` once for each row using one or
more strings (corresponding to the columns defined by `parse_dates`) as
arguments.
dayfirst : bool, default False
DD/MM format dates, international and European format.
cache_dates : bool, default True
If True, use a cache of unique, converted dates to apply the datetime
conversion. May produce significant speed-up when parsing duplicate
date strings, especially ones with timezone offsets.
.. versionadded:: 0.25.0
iterator : bool, default False
Return TextFileReader object for iteration or getting chunks with
``get_chunk()``.
chunksize : int, optional
Return TextFileReader object for iteration.
See the `IO Tools docs
<https://pandas.pydata.org/pandas-docs/stable/io.html#io-chunking>`_
for more information on ``iterator`` and ``chunksize``.
compression : {'infer', 'gzip', 'bz2', 'zip', 'xz', None}, default 'infer'
For on-the-fly decompression of on-disk data. If 'infer' and
`filepath_or_buffer` is path-like, then detect compression from the
following extensions: '.gz', '.bz2', '.zip', or '.xz' (otherwise no
decompression). If using 'zip', the ZIP file must contain only one data
file to be read in. Set to None for no decompression.
thousands : str, optional
Thousands separator.
decimal : str, default '.'
Character to recognize as decimal point (e.g. use ',' for European data).
lineterminator : str (length 1), optional
Character to break file into lines. Only valid with C parser.
quotechar : str (length 1), optional
The character used to denote the start and end of a quoted item. Quoted
items can include the delimiter and it will be ignored.
quoting : int or csv.QUOTE_* instance, default 0
Control field quoting behavior per ``csv.QUOTE_*`` constants. Use one of
QUOTE_MINIMAL (0), QUOTE_ALL (1), QUOTE_NONNUMERIC (2) or QUOTE_NONE (3).
doublequote : bool, default ``True``
When quotechar is specified and quoting is not ``QUOTE_NONE``, indicate
whether or not to interpret two consecutive quotechar elements INSIDE a
field as a single ``quotechar`` element.
escapechar : str (length 1), optional
One-character string used to escape other characters.
comment : str, optional
Indicates remainder of line should not be parsed. If found at the beginning
of a line, the line will be ignored altogether. This parameter must be a
single character. Like empty lines (as long as ``skip_blank_lines=True``),
fully commented lines are ignored by the parameter `header` but not by
`skiprows`. For example, if ``comment='#'``, parsing
``#empty\na,b,c\n1,2,3`` with ``header=0`` will result in 'a,b,c' being
treated as the header.
encoding : str, optional
Encoding to use for UTF when reading/writing (ex. 'utf-8'). `List of Python
standard encodings
<https://docs.python.org/3/library/codecs.html#standard-encodings>`_ .
dialect : str or csv.Dialect, optional
If provided, this parameter will override values (default or not) for the
following parameters: `delimiter`, `doublequote`, `escapechar`,
`skipinitialspace`, `quotechar`, and `quoting`. If it is necessary to
override values, a ParserWarning will be issued. See csv.Dialect
documentation for more details.
error_bad_lines : bool, default True
Lines with too many fields (e.g. a csv line with too many commas) will by
default cause an exception to be raised, and no DataFrame will be returned.
If False, then these "bad lines" will dropped from the DataFrame that is
returned.
warn_bad_lines : bool, default True
If error_bad_lines is False, and warn_bad_lines is True, a warning for each
"bad line" will be output.
delim_whitespace : bool, default False
Specifies whether or not whitespace (e.g. ``' '`` or ``' '``) will be
used as the sep. Equivalent to setting ``sep='\s+'``. If this option
is set to True, nothing should be passed in for the ``delimiter``
parameter.
low_memory : bool, default True
Internally process the file in chunks, resulting in lower memory use
while parsing, but possibly mixed type inference. To ensure no mixed
types either set False, or specify the type with the `dtype` parameter.
Note that the entire file is read into a single DataFrame regardless,
use the `chunksize` or `iterator` parameter to return the data in chunks.
(Only valid with C parser).
float_precision : str, optional
Specifies which converter the C engine should use for floating-point
values. The options are `None` for the ordinary converter,
`high` for the high-precision converter, and `round_trip` for the
round-trip converter.
chunk_bytes: int, float or str, optional
Number of chunk bytes.
gpu: bool, default False
If read into cudf DataFrame.
head_bytes: int, float or str, optional
Number of bytes to use in the head of file, mainly for data inference.
head_lines: int, optional
Number of lines to use in the head of file, mainly for data inference.
incremental_index: bool, default False
Create a new RangeIndex if csv doesn't contain index columns.
use_arrow_dtype: bool, default None
If True, use arrow dtype to store columns.
storage_options: dict, optional
Options for storage connection.
Returns
-------
DataFrame
A comma-separated values (csv) file is returned as two-dimensional
data structure with labeled axes.
See Also
--------
to_csv : Write DataFrame to a comma-separated values (csv) file.
Examples
--------
>>> import mars.dataframe as md
>>> md.read_csv('data.csv') # doctest: +SKIP
>>> # read from HDFS
>>> md.read_csv('hdfs://localhost:8020/test.csv') # doctest: +SKIP
"""
# infer dtypes and columns
if isinstance(path, (list, tuple)):
file_path = path[0]
else:
file_path = glob(path)[0]
with open_file(
file_path, compression=compression, storage_options=storage_options
) as f:
if head_lines is not None:
b = b"".join([f.readline() for _ in range(head_lines)])
else:
head_bytes = int(parse_readable_size(head_bytes)[0])
head_start, head_end = _find_chunk_start_end(f, 0, head_bytes)
f.seek(head_start)
b = f.read(head_end - head_start)
mini_df = pd.read_csv(
BytesIO(b),
sep=sep,
index_col=index_col,
dtype=dtype,
names=names,
header=header,
)
if names is None:
names = list(mini_df.columns)
else:
# if names specified, header should be None
header = None
if usecols:
usecols = usecols if isinstance(usecols, list) else [usecols]
col_index = sorted(mini_df.columns.get_indexer(usecols))
mini_df = mini_df.iloc[:, col_index]
if isinstance(mini_df.index, pd.RangeIndex):
index_value = parse_index(pd.RangeIndex(-1))
else:
index_value = parse_index(mini_df.index)
columns_value = parse_index(mini_df.columns, store_data=True)
if index_col and not isinstance(index_col, int):
index_col = list(mini_df.columns).index(index_col)
op = DataFrameReadCSV(
path=path,
names=names,
sep=sep,
header=header,
index_col=index_col,
usecols=usecols,
compression=compression,
gpu=gpu,
incremental_index=incremental_index,
use_arrow_dtype=use_arrow_dtype,
storage_options=storage_options,
**kwargs,
)
chunk_bytes = chunk_bytes or options.chunk_store_limit
dtypes = mini_df.dtypes
if use_arrow_dtype is None:
use_arrow_dtype = options.dataframe.use_arrow_dtype
if not gpu and use_arrow_dtype:
dtypes = to_arrow_dtypes(dtypes, test_df=mini_df)
ret = op(
index_value=index_value,
columns_value=columns_value,
dtypes=dtypes,
chunk_bytes=chunk_bytes,
)
if nrows is not None:
return ret.head(nrows)
return ret
|
https://github.com/mars-project/mars/issues/1780
|
20/12/13 13:13:40 WARN util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
hdfsOpenFile(hdfs://<hdfs_ip>:8020/user/test/parquet_test): FileSystem#open((Lorg/apache/hadoop/fs/Path;I)Lorg/apache/hadoop/fs/FSDataInputStream;) error:
RemoteException: Path is not a file: /user/test/parquet_test
at org.apache.hadoop.hdfs.server.namenode.INodeFile.valueOf(INodeFile.java:70)
at org.apache.hadoop.hdfs.server.namenode.INodeFile.valueOf(INodeFile.java:56)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocationsInt(FSNamesystem.java:2092)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:2062)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1975)
at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getBlockLocations(NameNodeRpcServer.java:575)
at org.apache.hadoop.hdfs.server.namenode.AuthorizationProviderProxyClientProtocol.getBlockLocations(AuthorizationProviderProxyClientProtocol.java:92)
at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getBlockLocations(ClientNamenodeProtocolServerSideTranslatorPB.java:376)
at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:617)
at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1073)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2226)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2222)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1917)
at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2220)
java.io.FileNotFoundException: Path is not a file: /user/test/parquet_test
at org.apache.hadoop.hdfs.server.namenode.INodeFile.valueOf(INodeFile.java:70)
at org.apache.hadoop.hdfs.server.namenode.INodeFile.valueOf(INodeFile.java:56)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocationsInt(FSNamesystem.java:2092)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:2062)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1975)
at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getBlockLocations(NameNodeRpcServer.java:575)
at org.apache.hadoop.hdfs.server.namenode.AuthorizationProviderProxyClientProtocol.getBlockLocations(AuthorizationProviderProxyClientProtocol.java:92)
at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getBlockLocations(ClientNamenodeProtocolServerSideTranslatorPB.java:376)
at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:617)
at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1073)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2226)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2222)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1917)
at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2220)
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
at org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:106)
at org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:73)
at org.apache.hadoop.hdfs.DFSClient.callGetBlockLocations(DFSClient.java:1289)
at org.apache.hadoop.hdfs.DFSClient.getLocatedBlocks(DFSClient.java:1274)
at org.apache.hadoop.hdfs.DFSClient.getLocatedBlocks(DFSClient.java:1262)
at org.apache.hadoop.hdfs.DFSInputStream.fetchLocatedBlocksAndGetLastBlockLength(DFSInputStream.java:307)
at org.apache.hadoop.hdfs.DFSInputStream.openInfo(DFSInputStream.java:273)
at org.apache.hadoop.hdfs.DFSInputStream.<init>(DFSInputStream.java:265)
at org.apache.hadoop.hdfs.DFSClient.open(DFSClient.java:1593)
at org.apache.hadoop.hdfs.DistributedFileSystem$4.doCall(DistributedFileSystem.java:338)
at org.apache.hadoop.hdfs.DistributedFileSystem$4.doCall(DistributedFileSystem.java:334)
at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81)
at org.apache.hadoop.hdfs.DistributedFileSystem.open(DistributedFileSystem.java:334)
Caused by: org.apache.hadoop.ipc.RemoteException(java.io.FileNotFoundException): Path is not a file: /user/test/parquet_test
at org.apache.hadoop.hdfs.server.namenode.INodeFile.valueOf(INodeFile.java:70)
at org.apache.hadoop.hdfs.server.namenode.INodeFile.valueOf(INodeFile.java:56)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocationsInt(FSNamesystem.java:2092)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:2062)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1975)
at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getBlockLocations(NameNodeRpcServer.java:575)
at org.apache.hadoop.hdfs.server.namenode.AuthorizationProviderProxyClientProtocol.getBlockLocations(AuthorizationProviderProxyClientProtocol.java:92)
at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getBlockLocations(ClientNamenodeProtocolServerSideTranslatorPB.java:376)
at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:617)
at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1073)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2226)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2222)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1917)
at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2220)
at org.apache.hadoop.ipc.Client.call(Client.java:1504)
at org.apache.hadoop.ipc.Client.call(Client.java:1441)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:230)
at com.sun.proxy.$Proxy10.getBlockLocations(Unknown Source)
at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getBlockLocations(ClientNamenodeProtocolTranslatorPB.java:266)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:260)
at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:104)
at com.sun.proxy.$Proxy11.getBlockLocations(Unknown Source)
at org.apache.hadoop.hdfs.DFSClient.callGetBlockLocations(DFSClient.java:1287)
... 10 more
Traceback (most recent call last):
File "read_hdfs_dir.py", line 12, in <module>
df = md.read_parquet('hdfs://<hdfs_ip>:8020/user/test/parquet_test')
File "/home/test/lib/anaconda3/lib/python3.7/site-packages/mars/dataframe/datasource/read_parquet.py", line 394, in read_parquet
with open_file(file_path, storage_options=storage_options) as f:
File "/home/test/lib/anaconda3/lib/python3.7/site-packages/mars/filesystem.py", line 383, in open_file
f = fs.open(path, mode=mode)
File "pyarrow/io-hdfs.pxi", line 409, in pyarrow.lib.HadoopFileSystem.open
File "pyarrow/error.pxi", line 100, in pyarrow.lib.check_status
OSError: HDFS path exists, but opening file failed: hdfs://<hdfs_ip>:8020/user/test/parquet_test
|
OSError
|
def _tile_partitioned(cls, op):
out_df = op.outputs[0]
shape = (np.nan, out_df.shape[1])
dtypes = cls._to_arrow_dtypes(out_df.dtypes, op)
dataset = pq.ParquetDataset(op.path)
parsed_path = urlparse(op.path)
if not os.path.exists(op.path) and parsed_path.scheme:
path_prefix = f"{parsed_path.scheme}://{parsed_path.netloc}"
else:
path_prefix = ""
chunk_index = 0
out_chunks = []
for piece in dataset.pieces:
chunk_op = op.copy().reset_key()
chunk_op._path = path_prefix + piece.path
chunk_op._partitions = pickle.dumps(dataset.partitions)
chunk_op._partition_keys = piece.partition_keys
new_chunk = chunk_op.new_chunk(
None,
shape=shape,
index=(chunk_index, 0),
index_value=out_df.index_value,
columns_value=out_df.columns_value,
dtypes=dtypes,
)
out_chunks.append(new_chunk)
chunk_index += 1
new_op = op.copy()
nsplits = ((np.nan,) * len(out_chunks), (out_df.shape[1],))
return new_op.new_dataframes(
None,
out_df.shape,
dtypes=dtypes,
index_value=out_df.index_value,
columns_value=out_df.columns_value,
chunks=out_chunks,
nsplits=nsplits,
)
|
def _tile_partitioned(cls, op):
out_df = op.outputs[0]
shape = (np.nan, out_df.shape[1])
dtypes = cls._to_arrow_dtypes(out_df.dtypes, op)
dataset = pq.ParquetDataset(op.path)
chunk_index = 0
out_chunks = []
for piece in dataset.pieces:
chunk_op = op.copy().reset_key()
chunk_op._path = piece.path
chunk_op._partitions = pickle.dumps(dataset.partitions)
chunk_op._partition_keys = piece.partition_keys
new_chunk = chunk_op.new_chunk(
None,
shape=shape,
index=(chunk_index, 0),
index_value=out_df.index_value,
columns_value=out_df.columns_value,
dtypes=dtypes,
)
out_chunks.append(new_chunk)
chunk_index += 1
new_op = op.copy()
nsplits = ((np.nan,) * len(out_chunks), (out_df.shape[1],))
return new_op.new_dataframes(
None,
out_df.shape,
dtypes=dtypes,
index_value=out_df.index_value,
columns_value=out_df.columns_value,
chunks=out_chunks,
nsplits=nsplits,
)
|
https://github.com/mars-project/mars/issues/1780
|
20/12/13 13:13:40 WARN util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
hdfsOpenFile(hdfs://<hdfs_ip>:8020/user/test/parquet_test): FileSystem#open((Lorg/apache/hadoop/fs/Path;I)Lorg/apache/hadoop/fs/FSDataInputStream;) error:
RemoteException: Path is not a file: /user/test/parquet_test
at org.apache.hadoop.hdfs.server.namenode.INodeFile.valueOf(INodeFile.java:70)
at org.apache.hadoop.hdfs.server.namenode.INodeFile.valueOf(INodeFile.java:56)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocationsInt(FSNamesystem.java:2092)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:2062)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1975)
at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getBlockLocations(NameNodeRpcServer.java:575)
at org.apache.hadoop.hdfs.server.namenode.AuthorizationProviderProxyClientProtocol.getBlockLocations(AuthorizationProviderProxyClientProtocol.java:92)
at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getBlockLocations(ClientNamenodeProtocolServerSideTranslatorPB.java:376)
at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:617)
at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1073)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2226)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2222)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1917)
at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2220)
java.io.FileNotFoundException: Path is not a file: /user/test/parquet_test
at org.apache.hadoop.hdfs.server.namenode.INodeFile.valueOf(INodeFile.java:70)
at org.apache.hadoop.hdfs.server.namenode.INodeFile.valueOf(INodeFile.java:56)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocationsInt(FSNamesystem.java:2092)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:2062)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1975)
at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getBlockLocations(NameNodeRpcServer.java:575)
at org.apache.hadoop.hdfs.server.namenode.AuthorizationProviderProxyClientProtocol.getBlockLocations(AuthorizationProviderProxyClientProtocol.java:92)
at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getBlockLocations(ClientNamenodeProtocolServerSideTranslatorPB.java:376)
at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:617)
at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1073)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2226)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2222)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1917)
at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2220)
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
at org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:106)
at org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:73)
at org.apache.hadoop.hdfs.DFSClient.callGetBlockLocations(DFSClient.java:1289)
at org.apache.hadoop.hdfs.DFSClient.getLocatedBlocks(DFSClient.java:1274)
at org.apache.hadoop.hdfs.DFSClient.getLocatedBlocks(DFSClient.java:1262)
at org.apache.hadoop.hdfs.DFSInputStream.fetchLocatedBlocksAndGetLastBlockLength(DFSInputStream.java:307)
at org.apache.hadoop.hdfs.DFSInputStream.openInfo(DFSInputStream.java:273)
at org.apache.hadoop.hdfs.DFSInputStream.<init>(DFSInputStream.java:265)
at org.apache.hadoop.hdfs.DFSClient.open(DFSClient.java:1593)
at org.apache.hadoop.hdfs.DistributedFileSystem$4.doCall(DistributedFileSystem.java:338)
at org.apache.hadoop.hdfs.DistributedFileSystem$4.doCall(DistributedFileSystem.java:334)
at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81)
at org.apache.hadoop.hdfs.DistributedFileSystem.open(DistributedFileSystem.java:334)
Caused by: org.apache.hadoop.ipc.RemoteException(java.io.FileNotFoundException): Path is not a file: /user/test/parquet_test
at org.apache.hadoop.hdfs.server.namenode.INodeFile.valueOf(INodeFile.java:70)
at org.apache.hadoop.hdfs.server.namenode.INodeFile.valueOf(INodeFile.java:56)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocationsInt(FSNamesystem.java:2092)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:2062)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1975)
at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getBlockLocations(NameNodeRpcServer.java:575)
at org.apache.hadoop.hdfs.server.namenode.AuthorizationProviderProxyClientProtocol.getBlockLocations(AuthorizationProviderProxyClientProtocol.java:92)
at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getBlockLocations(ClientNamenodeProtocolServerSideTranslatorPB.java:376)
at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:617)
at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1073)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2226)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2222)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1917)
at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2220)
at org.apache.hadoop.ipc.Client.call(Client.java:1504)
at org.apache.hadoop.ipc.Client.call(Client.java:1441)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:230)
at com.sun.proxy.$Proxy10.getBlockLocations(Unknown Source)
at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getBlockLocations(ClientNamenodeProtocolTranslatorPB.java:266)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:260)
at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:104)
at com.sun.proxy.$Proxy11.getBlockLocations(Unknown Source)
at org.apache.hadoop.hdfs.DFSClient.callGetBlockLocations(DFSClient.java:1287)
... 10 more
Traceback (most recent call last):
File "read_hdfs_dir.py", line 12, in <module>
df = md.read_parquet('hdfs://<hdfs_ip>:8020/user/test/parquet_test')
File "/home/test/lib/anaconda3/lib/python3.7/site-packages/mars/dataframe/datasource/read_parquet.py", line 394, in read_parquet
with open_file(file_path, storage_options=storage_options) as f:
File "/home/test/lib/anaconda3/lib/python3.7/site-packages/mars/filesystem.py", line 383, in open_file
f = fs.open(path, mode=mode)
File "pyarrow/io-hdfs.pxi", line 409, in pyarrow.lib.HadoopFileSystem.open
File "pyarrow/error.pxi", line 100, in pyarrow.lib.check_status
OSError: HDFS path exists, but opening file failed: hdfs://<hdfs_ip>:8020/user/test/parquet_test
|
OSError
|
def tile(cls, op):
if get_fs(op.path, op.storage_options).isdir(op.path):
return cls._tile_partitioned(op)
else:
return cls._tile_no_partitioned(op)
|
def tile(cls, op):
if os.path.isdir(op.path):
return cls._tile_partitioned(op)
else:
return cls._tile_no_partitioned(op)
|
https://github.com/mars-project/mars/issues/1780
|
20/12/13 13:13:40 WARN util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
hdfsOpenFile(hdfs://<hdfs_ip>:8020/user/test/parquet_test): FileSystem#open((Lorg/apache/hadoop/fs/Path;I)Lorg/apache/hadoop/fs/FSDataInputStream;) error:
RemoteException: Path is not a file: /user/test/parquet_test
at org.apache.hadoop.hdfs.server.namenode.INodeFile.valueOf(INodeFile.java:70)
at org.apache.hadoop.hdfs.server.namenode.INodeFile.valueOf(INodeFile.java:56)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocationsInt(FSNamesystem.java:2092)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:2062)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1975)
at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getBlockLocations(NameNodeRpcServer.java:575)
at org.apache.hadoop.hdfs.server.namenode.AuthorizationProviderProxyClientProtocol.getBlockLocations(AuthorizationProviderProxyClientProtocol.java:92)
at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getBlockLocations(ClientNamenodeProtocolServerSideTranslatorPB.java:376)
at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:617)
at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1073)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2226)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2222)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1917)
at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2220)
java.io.FileNotFoundException: Path is not a file: /user/test/parquet_test
at org.apache.hadoop.hdfs.server.namenode.INodeFile.valueOf(INodeFile.java:70)
at org.apache.hadoop.hdfs.server.namenode.INodeFile.valueOf(INodeFile.java:56)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocationsInt(FSNamesystem.java:2092)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:2062)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1975)
at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getBlockLocations(NameNodeRpcServer.java:575)
at org.apache.hadoop.hdfs.server.namenode.AuthorizationProviderProxyClientProtocol.getBlockLocations(AuthorizationProviderProxyClientProtocol.java:92)
at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getBlockLocations(ClientNamenodeProtocolServerSideTranslatorPB.java:376)
at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:617)
at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1073)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2226)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2222)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1917)
at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2220)
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
at org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:106)
at org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:73)
at org.apache.hadoop.hdfs.DFSClient.callGetBlockLocations(DFSClient.java:1289)
at org.apache.hadoop.hdfs.DFSClient.getLocatedBlocks(DFSClient.java:1274)
at org.apache.hadoop.hdfs.DFSClient.getLocatedBlocks(DFSClient.java:1262)
at org.apache.hadoop.hdfs.DFSInputStream.fetchLocatedBlocksAndGetLastBlockLength(DFSInputStream.java:307)
at org.apache.hadoop.hdfs.DFSInputStream.openInfo(DFSInputStream.java:273)
at org.apache.hadoop.hdfs.DFSInputStream.<init>(DFSInputStream.java:265)
at org.apache.hadoop.hdfs.DFSClient.open(DFSClient.java:1593)
at org.apache.hadoop.hdfs.DistributedFileSystem$4.doCall(DistributedFileSystem.java:338)
at org.apache.hadoop.hdfs.DistributedFileSystem$4.doCall(DistributedFileSystem.java:334)
at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81)
at org.apache.hadoop.hdfs.DistributedFileSystem.open(DistributedFileSystem.java:334)
Caused by: org.apache.hadoop.ipc.RemoteException(java.io.FileNotFoundException): Path is not a file: /user/test/parquet_test
at org.apache.hadoop.hdfs.server.namenode.INodeFile.valueOf(INodeFile.java:70)
at org.apache.hadoop.hdfs.server.namenode.INodeFile.valueOf(INodeFile.java:56)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocationsInt(FSNamesystem.java:2092)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:2062)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1975)
at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getBlockLocations(NameNodeRpcServer.java:575)
at org.apache.hadoop.hdfs.server.namenode.AuthorizationProviderProxyClientProtocol.getBlockLocations(AuthorizationProviderProxyClientProtocol.java:92)
at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getBlockLocations(ClientNamenodeProtocolServerSideTranslatorPB.java:376)
at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:617)
at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1073)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2226)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2222)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1917)
at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2220)
at org.apache.hadoop.ipc.Client.call(Client.java:1504)
at org.apache.hadoop.ipc.Client.call(Client.java:1441)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:230)
at com.sun.proxy.$Proxy10.getBlockLocations(Unknown Source)
at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getBlockLocations(ClientNamenodeProtocolTranslatorPB.java:266)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:260)
at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:104)
at com.sun.proxy.$Proxy11.getBlockLocations(Unknown Source)
at org.apache.hadoop.hdfs.DFSClient.callGetBlockLocations(DFSClient.java:1287)
... 10 more
Traceback (most recent call last):
File "read_hdfs_dir.py", line 12, in <module>
df = md.read_parquet('hdfs://<hdfs_ip>:8020/user/test/parquet_test')
File "/home/test/lib/anaconda3/lib/python3.7/site-packages/mars/dataframe/datasource/read_parquet.py", line 394, in read_parquet
with open_file(file_path, storage_options=storage_options) as f:
File "/home/test/lib/anaconda3/lib/python3.7/site-packages/mars/filesystem.py", line 383, in open_file
f = fs.open(path, mode=mode)
File "pyarrow/io-hdfs.pxi", line 409, in pyarrow.lib.HadoopFileSystem.open
File "pyarrow/error.pxi", line 100, in pyarrow.lib.check_status
OSError: HDFS path exists, but opening file failed: hdfs://<hdfs_ip>:8020/user/test/parquet_test
|
OSError
|
def read_parquet(
path,
engine: str = "auto",
columns=None,
groups_as_chunks=False,
use_arrow_dtype=None,
incremental_index=False,
storage_options=None,
**kwargs,
):
"""
Load a parquet object from the file path, returning a DataFrame.
Parameters
----------
path : str, path object or file-like object
Any valid string path is acceptable. The string could be a URL.
For file URLs, a host is expected. A local file could be:
``file://localhost/path/to/table.parquet``.
A file URL can also be a path to a directory that contains multiple
partitioned parquet files. Both pyarrow and fastparquet support
paths to directories as well as file URLs. A directory path could be:
``file://localhost/path/to/tables``.
By file-like object, we refer to objects with a ``read()`` method,
such as a file handler (e.g. via builtin ``open`` function)
or ``StringIO``.
engine : {'auto', 'pyarrow', 'fastparquet'}, default 'auto'
Parquet library to use. The default behavior is to try 'pyarrow',
falling back to 'fastparquet' if 'pyarrow' is unavailable.
columns : list, default=None
If not None, only these columns will be read from the file.
groups_as_chunks : bool, default False
if True, each row group correspond to a chunk.
if False, each file correspond to a chunk.
Only available for 'pyarrow' engine.
incremental_index: bool, default False
Create a new RangeIndex if csv doesn't contain index columns.
use_arrow_dtype: bool, default None
If True, use arrow dtype to store columns.
storage_options: dict, optional
Options for storage connection.
**kwargs
Any additional kwargs are passed to the engine.
Returns
-------
Mars DataFrame
"""
engine_type = check_engine(engine)
engine = get_engine(engine_type)
if get_fs(path, storage_options).isdir(path):
# If path is a directory, we will read as a partitioned datasets.
if engine_type != "pyarrow":
raise TypeError(
"Only support pyarrow engine when reading frompartitioned datasets."
)
dataset = pq.ParquetDataset(path)
dtypes = dataset.schema.to_arrow_schema().empty_table().to_pandas().dtypes
for partition in dataset.partitions:
dtypes[partition.name] = pd.CategoricalDtype()
else:
if not isinstance(path, list):
file_path = glob(path, storage_options=storage_options)[0]
else:
file_path = path[0]
with open_file(file_path, storage_options=storage_options) as f:
dtypes = engine.read_dtypes(f)
if columns:
dtypes = dtypes[columns]
if use_arrow_dtype is None:
use_arrow_dtype = options.dataframe.use_arrow_dtype
if use_arrow_dtype:
dtypes = to_arrow_dtypes(dtypes)
index_value = parse_index(pd.RangeIndex(-1))
columns_value = parse_index(dtypes.index, store_data=True)
op = DataFrameReadParquet(
path=path,
engine=engine_type,
columns=columns,
groups_as_chunks=groups_as_chunks,
use_arrow_dtype=use_arrow_dtype,
read_kwargs=kwargs,
incremental_index=incremental_index,
storage_options=storage_options,
)
return op(index_value=index_value, columns_value=columns_value, dtypes=dtypes)
|
def read_parquet(
path,
engine: str = "auto",
columns=None,
groups_as_chunks=False,
use_arrow_dtype=None,
incremental_index=False,
storage_options=None,
**kwargs,
):
"""
Load a parquet object from the file path, returning a DataFrame.
Parameters
----------
path : str, path object or file-like object
Any valid string path is acceptable. The string could be a URL.
For file URLs, a host is expected. A local file could be:
``file://localhost/path/to/table.parquet``.
A file URL can also be a path to a directory that contains multiple
partitioned parquet files. Both pyarrow and fastparquet support
paths to directories as well as file URLs. A directory path could be:
``file://localhost/path/to/tables``.
By file-like object, we refer to objects with a ``read()`` method,
such as a file handler (e.g. via builtin ``open`` function)
or ``StringIO``.
engine : {'auto', 'pyarrow', 'fastparquet'}, default 'auto'
Parquet library to use. The default behavior is to try 'pyarrow',
falling back to 'fastparquet' if 'pyarrow' is unavailable.
columns : list, default=None
If not None, only these columns will be read from the file.
groups_as_chunks : bool, default False
if True, each row group correspond to a chunk.
if False, each file correspond to a chunk.
Only available for 'pyarrow' engine.
incremental_index: bool, default False
Create a new RangeIndex if csv doesn't contain index columns.
use_arrow_dtype: bool, default None
If True, use arrow dtype to store columns.
storage_options: dict, optional
Options for storage connection.
**kwargs
Any additional kwargs are passed to the engine.
Returns
-------
Mars DataFrame
"""
engine_type = check_engine(engine)
engine = get_engine(engine_type)
if os.path.isdir(path):
# If path is a directory, we will read as a partitioned datasets.
if engine_type != "pyarrow":
raise TypeError(
"Only support pyarrow engine when reading frompartitioned datasets."
)
dataset = pq.ParquetDataset(path)
dtypes = dataset.schema.to_arrow_schema().empty_table().to_pandas().dtypes
for partition in dataset.partitions:
dtypes[partition.name] = pd.CategoricalDtype()
else:
if not isinstance(path, list):
file_path = glob(path, storage_options=storage_options)[0]
else:
file_path = path[0]
with open_file(file_path, storage_options=storage_options) as f:
dtypes = engine.read_dtypes(f)
if columns:
dtypes = dtypes[columns]
if use_arrow_dtype is None:
use_arrow_dtype = options.dataframe.use_arrow_dtype
if use_arrow_dtype:
dtypes = to_arrow_dtypes(dtypes)
index_value = parse_index(pd.RangeIndex(-1))
columns_value = parse_index(dtypes.index, store_data=True)
op = DataFrameReadParquet(
path=path,
engine=engine_type,
columns=columns,
groups_as_chunks=groups_as_chunks,
use_arrow_dtype=use_arrow_dtype,
read_kwargs=kwargs,
incremental_index=incremental_index,
storage_options=storage_options,
)
return op(index_value=index_value, columns_value=columns_value, dtypes=dtypes)
|
https://github.com/mars-project/mars/issues/1780
|
20/12/13 13:13:40 WARN util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
hdfsOpenFile(hdfs://<hdfs_ip>:8020/user/test/parquet_test): FileSystem#open((Lorg/apache/hadoop/fs/Path;I)Lorg/apache/hadoop/fs/FSDataInputStream;) error:
RemoteException: Path is not a file: /user/test/parquet_test
at org.apache.hadoop.hdfs.server.namenode.INodeFile.valueOf(INodeFile.java:70)
at org.apache.hadoop.hdfs.server.namenode.INodeFile.valueOf(INodeFile.java:56)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocationsInt(FSNamesystem.java:2092)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:2062)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1975)
at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getBlockLocations(NameNodeRpcServer.java:575)
at org.apache.hadoop.hdfs.server.namenode.AuthorizationProviderProxyClientProtocol.getBlockLocations(AuthorizationProviderProxyClientProtocol.java:92)
at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getBlockLocations(ClientNamenodeProtocolServerSideTranslatorPB.java:376)
at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:617)
at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1073)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2226)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2222)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1917)
at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2220)
java.io.FileNotFoundException: Path is not a file: /user/test/parquet_test
at org.apache.hadoop.hdfs.server.namenode.INodeFile.valueOf(INodeFile.java:70)
at org.apache.hadoop.hdfs.server.namenode.INodeFile.valueOf(INodeFile.java:56)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocationsInt(FSNamesystem.java:2092)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:2062)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1975)
at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getBlockLocations(NameNodeRpcServer.java:575)
at org.apache.hadoop.hdfs.server.namenode.AuthorizationProviderProxyClientProtocol.getBlockLocations(AuthorizationProviderProxyClientProtocol.java:92)
at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getBlockLocations(ClientNamenodeProtocolServerSideTranslatorPB.java:376)
at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:617)
at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1073)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2226)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2222)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1917)
at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2220)
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
at org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:106)
at org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:73)
at org.apache.hadoop.hdfs.DFSClient.callGetBlockLocations(DFSClient.java:1289)
at org.apache.hadoop.hdfs.DFSClient.getLocatedBlocks(DFSClient.java:1274)
at org.apache.hadoop.hdfs.DFSClient.getLocatedBlocks(DFSClient.java:1262)
at org.apache.hadoop.hdfs.DFSInputStream.fetchLocatedBlocksAndGetLastBlockLength(DFSInputStream.java:307)
at org.apache.hadoop.hdfs.DFSInputStream.openInfo(DFSInputStream.java:273)
at org.apache.hadoop.hdfs.DFSInputStream.<init>(DFSInputStream.java:265)
at org.apache.hadoop.hdfs.DFSClient.open(DFSClient.java:1593)
at org.apache.hadoop.hdfs.DistributedFileSystem$4.doCall(DistributedFileSystem.java:338)
at org.apache.hadoop.hdfs.DistributedFileSystem$4.doCall(DistributedFileSystem.java:334)
at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81)
at org.apache.hadoop.hdfs.DistributedFileSystem.open(DistributedFileSystem.java:334)
Caused by: org.apache.hadoop.ipc.RemoteException(java.io.FileNotFoundException): Path is not a file: /user/test/parquet_test
at org.apache.hadoop.hdfs.server.namenode.INodeFile.valueOf(INodeFile.java:70)
at org.apache.hadoop.hdfs.server.namenode.INodeFile.valueOf(INodeFile.java:56)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocationsInt(FSNamesystem.java:2092)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:2062)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1975)
at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getBlockLocations(NameNodeRpcServer.java:575)
at org.apache.hadoop.hdfs.server.namenode.AuthorizationProviderProxyClientProtocol.getBlockLocations(AuthorizationProviderProxyClientProtocol.java:92)
at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getBlockLocations(ClientNamenodeProtocolServerSideTranslatorPB.java:376)
at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:617)
at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1073)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2226)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2222)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1917)
at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2220)
at org.apache.hadoop.ipc.Client.call(Client.java:1504)
at org.apache.hadoop.ipc.Client.call(Client.java:1441)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:230)
at com.sun.proxy.$Proxy10.getBlockLocations(Unknown Source)
at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getBlockLocations(ClientNamenodeProtocolTranslatorPB.java:266)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:260)
at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:104)
at com.sun.proxy.$Proxy11.getBlockLocations(Unknown Source)
at org.apache.hadoop.hdfs.DFSClient.callGetBlockLocations(DFSClient.java:1287)
... 10 more
Traceback (most recent call last):
File "read_hdfs_dir.py", line 12, in <module>
df = md.read_parquet('hdfs://<hdfs_ip>:8020/user/test/parquet_test')
File "/home/test/lib/anaconda3/lib/python3.7/site-packages/mars/dataframe/datasource/read_parquet.py", line 394, in read_parquet
with open_file(file_path, storage_options=storage_options) as f:
File "/home/test/lib/anaconda3/lib/python3.7/site-packages/mars/filesystem.py", line 383, in open_file
f = fs.open(path, mode=mode)
File "pyarrow/io-hdfs.pxi", line 409, in pyarrow.lib.HadoopFileSystem.open
File "pyarrow/error.pxi", line 100, in pyarrow.lib.check_status
OSError: HDFS path exists, but opening file failed: hdfs://<hdfs_ip>:8020/user/test/parquet_test
|
OSError
|
def fetch_data(
self,
session_id,
tileable_key,
index_obj=None,
serial=True,
serial_type=None,
compressions=None,
pickle_protocol=None,
):
logger.debug("Fetching tileable data %s", tileable_key)
session_uid = SessionActor.gen_uid(session_id)
session_ref = self.get_actor_ref(session_uid)
graph_ref = self.actor_client.actor_ref(
session_ref.get_graph_ref_by_tileable_key(tileable_key)
)
nsplits, chunk_keys, chunk_indexes = graph_ref.get_tileable_metas([tileable_key])[0]
return self.fetch_chunks_data(
session_id,
chunk_indexes,
chunk_keys,
nsplits,
index_obj=index_obj,
serial=serial,
serial_type=serial_type,
compressions=compressions,
pickle_protocol=pickle_protocol,
)
|
def fetch_data(
self,
session_id,
tileable_key,
index_obj=None,
serial=True,
serial_type=None,
compressions=None,
pickle_protocol=None,
):
session_uid = SessionActor.gen_uid(session_id)
session_ref = self.get_actor_ref(session_uid)
graph_ref = self.actor_client.actor_ref(
session_ref.get_graph_ref_by_tileable_key(tileable_key)
)
nsplits, chunk_keys, chunk_indexes = graph_ref.get_tileable_metas([tileable_key])[0]
return self.fetch_chunks_data(
session_id,
chunk_indexes,
chunk_keys,
nsplits,
index_obj=index_obj,
serial=serial,
serial_type=serial_type,
compressions=compressions,
pickle_protocol=pickle_protocol,
)
|
https://github.com/mars-project/mars/issues/1771
|
Traceback (most recent call last):
File "/Users/wenjun.swj/miniconda3/lib/python3.8/site-packages/IPython/core/interactiveshell.py", line 3417, in run_code
exec(code_obj, self.user_global_ns, self.user_ns)
File "<ipython-input-9-60fd09690beb>", line 1, in <module>
df[df[0] > 0.5].execute()
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 643, in execute
return run()
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 639, in run
self.data.execute(session, **kw)
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 379, in execute
return run()
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 374, in run
session.run(self, **kw)
File "/Users/wenjun.swj/Code/mars/mars/session.py", line 505, in run
result = self._sess.run(*tileables, **kw)
File "/Users/wenjun.swj/Code/mars/mars/session.py", line 111, in run
res = self._executor.execute_tileables(tileables, **kw)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/executor.py", line 860, in execute_tileables
chunk_graph = chunk_graph_builder.build(
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 347, in build
chunk_graph = super().build(
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 262, in build
self._on_tile_failure(tileable_data.op, exc_info)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 301, in inner
raise exc_info[1].with_traceback(exc_info[2]) from None
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 242, in build
tiled = self._tile(tileable_data, tileable_graph)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 337, in _tile
return super()._tile(tileable_data, tileable_graph)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 201, in _tile
tds[0]._inplace_tile()
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 168, in _inplace_tile
return handler.inplace_tile(self)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 136, in inplace_tile
dispatched = self.dispatch(to_tile.op)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 119, in dispatch
tiled = op_cls.tile(op)
File "/Users/wenjun.swj/Code/mars/mars/dataframe/indexing/getitem.py", line 273, in tile
return cls.tile_with_mask(op)
File "/Users/wenjun.swj/Code/mars/mars/dataframe/indexing/getitem.py", line 287, in tile_with_mask
align_dataframe_series(in_df, mask, axis='index')
File "/Users/wenjun.swj/Code/mars/mars/dataframe/align.py", line 713, in align_dataframe_series
index_splits, index_nsplits = _calc_axis_splits(left.index_value, right.index_value,
File "/Users/wenjun.swj/Code/mars/mars/dataframe/align.py", line 490, in _calc_axis_splits
right_splits = left_splits = [[c] for c in left_chunk_index_min_max]
TypeError: 'NoneType' object is not iterable
|
TypeError
|
def fetch_chunk_data(self, session_id, chunk_key, index_obj=None):
endpoints = self.chunk_meta_client.get_workers(session_id, chunk_key)
if endpoints is None:
raise KeyError(f"Chunk key {chunk_key} not exist in cluster")
source_endpoint = random.choice(endpoints)
logger.debug("Fetching chunk %s from worker %s", chunk_key, source_endpoint)
sender_ref = self.actor_client.actor_ref(
ResultSenderActor.default_uid(), address=source_endpoint
)
return sender_ref.fetch_data(session_id, chunk_key, index_obj, _wait=False)
|
def fetch_chunk_data(self, session_id, chunk_key, index_obj=None):
endpoints = self.chunk_meta_client.get_workers(session_id, chunk_key)
if endpoints is None:
raise KeyError(f"Chunk key {chunk_key} not exist in cluster")
sender_ref = self.actor_client.actor_ref(
ResultSenderActor.default_uid(), address=random.choice(endpoints)
)
return sender_ref.fetch_data(session_id, chunk_key, index_obj, _wait=False)
|
https://github.com/mars-project/mars/issues/1771
|
Traceback (most recent call last):
File "/Users/wenjun.swj/miniconda3/lib/python3.8/site-packages/IPython/core/interactiveshell.py", line 3417, in run_code
exec(code_obj, self.user_global_ns, self.user_ns)
File "<ipython-input-9-60fd09690beb>", line 1, in <module>
df[df[0] > 0.5].execute()
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 643, in execute
return run()
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 639, in run
self.data.execute(session, **kw)
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 379, in execute
return run()
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 374, in run
session.run(self, **kw)
File "/Users/wenjun.swj/Code/mars/mars/session.py", line 505, in run
result = self._sess.run(*tileables, **kw)
File "/Users/wenjun.swj/Code/mars/mars/session.py", line 111, in run
res = self._executor.execute_tileables(tileables, **kw)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/executor.py", line 860, in execute_tileables
chunk_graph = chunk_graph_builder.build(
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 347, in build
chunk_graph = super().build(
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 262, in build
self._on_tile_failure(tileable_data.op, exc_info)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 301, in inner
raise exc_info[1].with_traceback(exc_info[2]) from None
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 242, in build
tiled = self._tile(tileable_data, tileable_graph)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 337, in _tile
return super()._tile(tileable_data, tileable_graph)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 201, in _tile
tds[0]._inplace_tile()
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 168, in _inplace_tile
return handler.inplace_tile(self)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 136, in inplace_tile
dispatched = self.dispatch(to_tile.op)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 119, in dispatch
tiled = op_cls.tile(op)
File "/Users/wenjun.swj/Code/mars/mars/dataframe/indexing/getitem.py", line 273, in tile
return cls.tile_with_mask(op)
File "/Users/wenjun.swj/Code/mars/mars/dataframe/indexing/getitem.py", line 287, in tile_with_mask
align_dataframe_series(in_df, mask, axis='index')
File "/Users/wenjun.swj/Code/mars/mars/dataframe/align.py", line 713, in align_dataframe_series
index_splits, index_nsplits = _calc_axis_splits(left.index_value, right.index_value,
File "/Users/wenjun.swj/Code/mars/mars/dataframe/align.py", line 490, in _calc_axis_splits
right_splits = left_splits = [[c] for c in left_chunk_index_min_max]
TypeError: 'NoneType' object is not iterable
|
TypeError
|
def _get_chunk_index_min_max(index_chunks):
chunk_index_min_max = []
for chunk in index_chunks:
min_val = chunk.min_val
min_val_close = chunk.min_val_close
max_val = chunk.max_val
max_val_close = chunk.max_val_close
if min_val is None or max_val is None:
chunk_index_min_max.append((None, True, None, True))
else:
chunk_index_min_max.append((min_val, min_val_close, max_val, max_val_close))
return chunk_index_min_max
|
def _get_chunk_index_min_max(index_chunks):
chunk_index_min_max = []
for chunk in index_chunks:
min_val = chunk.min_val
min_val_close = chunk.min_val_close
max_val = chunk.max_val
max_val_close = chunk.max_val_close
if min_val is None or max_val is None:
return
chunk_index_min_max.append((min_val, min_val_close, max_val, max_val_close))
return chunk_index_min_max
|
https://github.com/mars-project/mars/issues/1771
|
Traceback (most recent call last):
File "/Users/wenjun.swj/miniconda3/lib/python3.8/site-packages/IPython/core/interactiveshell.py", line 3417, in run_code
exec(code_obj, self.user_global_ns, self.user_ns)
File "<ipython-input-9-60fd09690beb>", line 1, in <module>
df[df[0] > 0.5].execute()
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 643, in execute
return run()
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 639, in run
self.data.execute(session, **kw)
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 379, in execute
return run()
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 374, in run
session.run(self, **kw)
File "/Users/wenjun.swj/Code/mars/mars/session.py", line 505, in run
result = self._sess.run(*tileables, **kw)
File "/Users/wenjun.swj/Code/mars/mars/session.py", line 111, in run
res = self._executor.execute_tileables(tileables, **kw)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/executor.py", line 860, in execute_tileables
chunk_graph = chunk_graph_builder.build(
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 347, in build
chunk_graph = super().build(
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 262, in build
self._on_tile_failure(tileable_data.op, exc_info)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 301, in inner
raise exc_info[1].with_traceback(exc_info[2]) from None
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 242, in build
tiled = self._tile(tileable_data, tileable_graph)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 337, in _tile
return super()._tile(tileable_data, tileable_graph)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 201, in _tile
tds[0]._inplace_tile()
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 168, in _inplace_tile
return handler.inplace_tile(self)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 136, in inplace_tile
dispatched = self.dispatch(to_tile.op)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 119, in dispatch
tiled = op_cls.tile(op)
File "/Users/wenjun.swj/Code/mars/mars/dataframe/indexing/getitem.py", line 273, in tile
return cls.tile_with_mask(op)
File "/Users/wenjun.swj/Code/mars/mars/dataframe/indexing/getitem.py", line 287, in tile_with_mask
align_dataframe_series(in_df, mask, axis='index')
File "/Users/wenjun.swj/Code/mars/mars/dataframe/align.py", line 713, in align_dataframe_series
index_splits, index_nsplits = _calc_axis_splits(left.index_value, right.index_value,
File "/Users/wenjun.swj/Code/mars/mars/dataframe/align.py", line 490, in _calc_axis_splits
right_splits = left_splits = [[c] for c in left_chunk_index_min_max]
TypeError: 'NoneType' object is not iterable
|
TypeError
|
def _need_align_map(
input_chunk,
index_min_max,
column_min_max,
dummy_index_splits=False,
dummy_column_splits=False,
):
if isinstance(input_chunk, SERIES_CHUNK_TYPE):
if input_chunk.index_value is None:
return True
if input_chunk.index_value.min_max != index_min_max:
return True
else:
if not dummy_index_splits:
if (
input_chunk.index_value is None
or input_chunk.index_value.min_max != index_min_max
):
return True
if not dummy_column_splits:
if (
input_chunk.columns_value is None
or input_chunk.columns_value.min_max != column_min_max
):
return True
return False
|
def _need_align_map(
input_chunk,
index_min_max,
column_min_max,
dummy_index_splits=False,
dummy_column_splits=False,
):
if not dummy_index_splits:
assert not index_min_max[0] is None and not index_min_max[2] is None
if isinstance(input_chunk, SERIES_CHUNK_TYPE):
if input_chunk.index_value is None:
return True
if input_chunk.index_value.min_max != index_min_max:
return True
else:
if not dummy_index_splits:
if (
input_chunk.index_value is None
or input_chunk.index_value.min_max != index_min_max
):
return True
if not dummy_column_splits:
if (
input_chunk.columns_value is None
or input_chunk.columns_value.min_max != column_min_max
):
return True
return False
|
https://github.com/mars-project/mars/issues/1771
|
Traceback (most recent call last):
File "/Users/wenjun.swj/miniconda3/lib/python3.8/site-packages/IPython/core/interactiveshell.py", line 3417, in run_code
exec(code_obj, self.user_global_ns, self.user_ns)
File "<ipython-input-9-60fd09690beb>", line 1, in <module>
df[df[0] > 0.5].execute()
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 643, in execute
return run()
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 639, in run
self.data.execute(session, **kw)
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 379, in execute
return run()
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 374, in run
session.run(self, **kw)
File "/Users/wenjun.swj/Code/mars/mars/session.py", line 505, in run
result = self._sess.run(*tileables, **kw)
File "/Users/wenjun.swj/Code/mars/mars/session.py", line 111, in run
res = self._executor.execute_tileables(tileables, **kw)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/executor.py", line 860, in execute_tileables
chunk_graph = chunk_graph_builder.build(
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 347, in build
chunk_graph = super().build(
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 262, in build
self._on_tile_failure(tileable_data.op, exc_info)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 301, in inner
raise exc_info[1].with_traceback(exc_info[2]) from None
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 242, in build
tiled = self._tile(tileable_data, tileable_graph)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 337, in _tile
return super()._tile(tileable_data, tileable_graph)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 201, in _tile
tds[0]._inplace_tile()
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 168, in _inplace_tile
return handler.inplace_tile(self)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 136, in inplace_tile
dispatched = self.dispatch(to_tile.op)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 119, in dispatch
tiled = op_cls.tile(op)
File "/Users/wenjun.swj/Code/mars/mars/dataframe/indexing/getitem.py", line 273, in tile
return cls.tile_with_mask(op)
File "/Users/wenjun.swj/Code/mars/mars/dataframe/indexing/getitem.py", line 287, in tile_with_mask
align_dataframe_series(in_df, mask, axis='index')
File "/Users/wenjun.swj/Code/mars/mars/dataframe/align.py", line 713, in align_dataframe_series
index_splits, index_nsplits = _calc_axis_splits(left.index_value, right.index_value,
File "/Users/wenjun.swj/Code/mars/mars/dataframe/align.py", line 490, in _calc_axis_splits
right_splits = left_splits = [[c] for c in left_chunk_index_min_max]
TypeError: 'NoneType' object is not iterable
|
TypeError
|
def _install():
from ..core import DATAFRAME_TYPE, SERIES_TYPE, INDEX_TYPE
def _register_method(cls, name, func, wrapper=None):
if wrapper is None:
@functools.wraps(func)
def wrapper(df, *args, **kwargs):
return func(df, *args, **kwargs)
try:
if issubclass(cls, DATAFRAME_TYPE):
wrapper.__doc__ = func.__frame_doc__
elif issubclass(cls, SERIES_TYPE):
wrapper.__doc__ = func.__series_doc__
else:
wrapper = func
except AttributeError:
wrapper = func
wrapper.__name__ = func.__name__
setattr(cls, name, wrapper)
def _register_bin_method(cls, name, func):
def call_df_fill(df, other, axis="columns", level=None, fill_value=None):
return func(df, other, axis=axis, level=level, fill_value=fill_value)
def call_df_no_fill(df, other, axis="columns", level=None):
return func(df, other, axis=axis, level=level)
def call_series_fill(df, other, level=None, fill_value=None, axis=0):
return func(df, other, axis=axis, level=level, fill_value=fill_value)
def call_series_no_fill(df, other, level=None, axis=0):
return func(df, other, axis=axis, level=level)
if issubclass(cls, DATAFRAME_TYPE):
call = (
call_df_fill
if "fill_value" in func.__code__.co_varnames
else call_df_no_fill
)
elif issubclass(cls, SERIES_TYPE):
call = (
call_series_fill
if "fill_value" in func.__code__.co_varnames
else call_series_no_fill
)
else:
call = None
return _register_method(cls, name, func, wrapper=call)
# register mars tensor ufuncs
ufunc_ops = [
# unary
DataFrameAbs,
DataFrameLog,
DataFrameLog2,
DataFrameLog10,
DataFrameSin,
DataFrameCos,
DataFrameTan,
DataFrameSinh,
DataFrameCosh,
DataFrameTanh,
DataFrameArcsin,
DataFrameArccos,
DataFrameArctan,
DataFrameArcsinh,
DataFrameArccosh,
DataFrameArctanh,
DataFrameRadians,
DataFrameDegrees,
DataFrameCeil,
DataFrameFloor,
DataFrameAround,
DataFrameExp,
DataFrameExp2,
DataFrameExpm1,
DataFrameSqrt,
DataFrameNot,
DataFrameIsNan,
DataFrameIsInf,
DataFrameIsFinite,
DataFrameNegative,
# binary
DataFrameAdd,
DataFrameEqual,
DataFrameFloorDiv,
DataFrameGreater,
DataFrameGreaterEqual,
DataFrameLess,
DataFrameLessEqual,
DataFrameAnd,
DataFrameOr,
DataFrameXor,
DataFrameMod,
DataFrameMul,
DataFrameNotEqual,
DataFramePower,
DataFrameSubtract,
DataFrameTrueDiv,
]
for ufunc_op in ufunc_ops:
register_tensor_ufunc(ufunc_op)
for entity in DATAFRAME_TYPE + SERIES_TYPE:
setattr(entity, "__abs__", abs_)
setattr(entity, "abs", abs_)
_register_method(entity, "round", around)
setattr(entity, "__invert__", invert)
setattr(entity, "__add__", wrap_notimplemented_exception(add))
setattr(entity, "__radd__", wrap_notimplemented_exception(radd))
_register_bin_method(entity, "add", add)
_register_bin_method(entity, "radd", radd)
setattr(entity, "__sub__", wrap_notimplemented_exception(subtract))
setattr(entity, "__rsub__", wrap_notimplemented_exception(rsubtract))
_register_bin_method(entity, "sub", subtract)
_register_bin_method(entity, "rsub", rsubtract)
setattr(entity, "__mul__", wrap_notimplemented_exception(mul))
setattr(entity, "__rmul__", wrap_notimplemented_exception(rmul))
_register_bin_method(entity, "mul", mul)
_register_bin_method(entity, "multiply", mul)
_register_bin_method(entity, "rmul", rmul)
setattr(entity, "__floordiv__", wrap_notimplemented_exception(floordiv))
setattr(entity, "__rfloordiv__", wrap_notimplemented_exception(rfloordiv))
setattr(entity, "__truediv__", wrap_notimplemented_exception(truediv))
setattr(entity, "__rtruediv__", wrap_notimplemented_exception(rtruediv))
setattr(entity, "__div__", wrap_notimplemented_exception(truediv))
setattr(entity, "__rdiv__", wrap_notimplemented_exception(rtruediv))
_register_bin_method(entity, "floordiv", floordiv)
_register_bin_method(entity, "rfloordiv", rfloordiv)
_register_bin_method(entity, "truediv", truediv)
_register_bin_method(entity, "rtruediv", rtruediv)
_register_bin_method(entity, "div", truediv)
_register_bin_method(entity, "rdiv", rtruediv)
setattr(entity, "__mod__", wrap_notimplemented_exception(mod))
setattr(entity, "__rmod__", wrap_notimplemented_exception(rmod))
_register_bin_method(entity, "mod", mod)
_register_bin_method(entity, "rmod", rmod)
setattr(entity, "__pow__", wrap_notimplemented_exception(power))
setattr(entity, "__rpow__", wrap_notimplemented_exception(rpower))
_register_bin_method(entity, "pow", power)
_register_bin_method(entity, "rpow", rpower)
setattr(entity, "__eq__", _wrap_eq())
setattr(entity, "__ne__", _wrap_comparison(ne))
setattr(entity, "__lt__", _wrap_comparison(lt))
setattr(entity, "__gt__", _wrap_comparison(gt))
setattr(entity, "__ge__", _wrap_comparison(ge))
setattr(entity, "__le__", _wrap_comparison(le))
_register_bin_method(entity, "eq", eq)
_register_bin_method(entity, "ne", ne)
_register_bin_method(entity, "lt", lt)
_register_bin_method(entity, "gt", gt)
_register_bin_method(entity, "ge", ge)
_register_bin_method(entity, "le", le)
setattr(entity, "__matmul__", dot)
_register_method(entity, "dot", dot)
setattr(entity, "__and__", wrap_notimplemented_exception(bitand))
setattr(entity, "__rand__", wrap_notimplemented_exception(rbitand))
setattr(entity, "__or__", wrap_notimplemented_exception(bitor))
setattr(entity, "__ror__", wrap_notimplemented_exception(rbitor))
setattr(entity, "__xor__", wrap_notimplemented_exception(bitxor))
setattr(entity, "__rxor__", wrap_notimplemented_exception(rbitxor))
setattr(entity, "__neg__", wrap_notimplemented_exception(negative))
for entity in INDEX_TYPE:
setattr(entity, "__eq__", _wrap_eq())
|
def _install():
from ..core import DATAFRAME_TYPE, SERIES_TYPE, INDEX_TYPE
def _register_method(cls, name, func, wrapper=None):
if wrapper is None:
@functools.wraps(func)
def wrapper(df, *args, **kwargs):
return func(df, *args, **kwargs)
try:
if issubclass(cls, DATAFRAME_TYPE):
wrapper.__doc__ = func.__frame_doc__
elif issubclass(cls, SERIES_TYPE):
wrapper.__doc__ = func.__series_doc__
else:
wrapper = func
except AttributeError:
wrapper = func
wrapper.__name__ = func.__name__
setattr(cls, name, wrapper)
def _register_bin_method(cls, name, func):
def call_df_fill(df, other, axis="columns", level=None, fill_value=None):
return func(df, other, axis=axis, level=level, fill_value=fill_value)
def call_df_no_fill(df, other, axis="columns", level=None):
return func(df, other, axis=axis, level=level)
def call_series_fill(df, other, level=None, fill_value=None, axis=0):
return func(df, other, axis=axis, level=level, fill_value=fill_value)
def call_series_no_fill(df, other, level=None, axis=0):
return func(df, other, axis=axis, level=level)
if issubclass(cls, DATAFRAME_TYPE):
call = (
call_df_fill
if "fill_value" in func.__code__.co_varnames
else call_df_no_fill
)
elif issubclass(cls, SERIES_TYPE):
call = (
call_series_fill
if "fill_value" in func.__code__.co_varnames
else call_series_no_fill
)
else:
call = None
return _register_method(cls, name, func, wrapper=call)
# register mars unary ufuncs
unary_ops = [
DataFrameAbs,
DataFrameLog,
DataFrameLog2,
DataFrameLog10,
DataFrameSin,
DataFrameCos,
DataFrameTan,
DataFrameSinh,
DataFrameCosh,
DataFrameTanh,
DataFrameArcsin,
DataFrameArccos,
DataFrameArctan,
DataFrameArcsinh,
DataFrameArccosh,
DataFrameArctanh,
DataFrameRadians,
DataFrameDegrees,
DataFrameCeil,
DataFrameFloor,
DataFrameAround,
DataFrameExp,
DataFrameExp2,
DataFrameExpm1,
DataFrameSqrt,
DataFrameNot,
DataFrameIsNan,
DataFrameIsInf,
DataFrameIsFinite,
DataFrameNegative,
]
for unary_op in unary_ops:
register_tensor_unary_ufunc(unary_op)
for entity in DATAFRAME_TYPE + SERIES_TYPE:
setattr(entity, "__abs__", abs_)
setattr(entity, "abs", abs_)
_register_method(entity, "round", around)
setattr(entity, "__invert__", logical_not)
setattr(entity, "__add__", wrap_notimplemented_exception(add))
setattr(entity, "__radd__", wrap_notimplemented_exception(radd))
_register_bin_method(entity, "add", add)
_register_bin_method(entity, "radd", radd)
setattr(entity, "__sub__", wrap_notimplemented_exception(subtract))
setattr(entity, "__rsub__", wrap_notimplemented_exception(rsubtract))
_register_bin_method(entity, "sub", subtract)
_register_bin_method(entity, "rsub", rsubtract)
setattr(entity, "__mul__", wrap_notimplemented_exception(mul))
setattr(entity, "__rmul__", wrap_notimplemented_exception(rmul))
_register_bin_method(entity, "mul", mul)
_register_bin_method(entity, "multiply", mul)
_register_bin_method(entity, "rmul", rmul)
setattr(entity, "__floordiv__", wrap_notimplemented_exception(floordiv))
setattr(entity, "__rfloordiv__", wrap_notimplemented_exception(rfloordiv))
setattr(entity, "__truediv__", wrap_notimplemented_exception(truediv))
setattr(entity, "__rtruediv__", wrap_notimplemented_exception(rtruediv))
setattr(entity, "__div__", wrap_notimplemented_exception(truediv))
setattr(entity, "__rdiv__", wrap_notimplemented_exception(rtruediv))
_register_bin_method(entity, "floordiv", floordiv)
_register_bin_method(entity, "rfloordiv", rfloordiv)
_register_bin_method(entity, "truediv", truediv)
_register_bin_method(entity, "rtruediv", rtruediv)
_register_bin_method(entity, "div", truediv)
_register_bin_method(entity, "rdiv", rtruediv)
setattr(entity, "__mod__", wrap_notimplemented_exception(mod))
setattr(entity, "__rmod__", wrap_notimplemented_exception(rmod))
_register_bin_method(entity, "mod", mod)
_register_bin_method(entity, "rmod", rmod)
setattr(entity, "__pow__", wrap_notimplemented_exception(power))
setattr(entity, "__rpow__", wrap_notimplemented_exception(rpower))
_register_bin_method(entity, "pow", power)
_register_bin_method(entity, "rpow", rpower)
setattr(entity, "__eq__", _wrap_eq())
setattr(entity, "__ne__", _wrap_comparison(ne))
setattr(entity, "__lt__", _wrap_comparison(lt))
setattr(entity, "__gt__", _wrap_comparison(gt))
setattr(entity, "__ge__", _wrap_comparison(ge))
setattr(entity, "__le__", _wrap_comparison(le))
_register_bin_method(entity, "eq", eq)
_register_bin_method(entity, "ne", ne)
_register_bin_method(entity, "lt", lt)
_register_bin_method(entity, "gt", gt)
_register_bin_method(entity, "ge", ge)
_register_bin_method(entity, "le", le)
setattr(entity, "__matmul__", dot)
_register_method(entity, "dot", dot)
setattr(entity, "__and__", wrap_notimplemented_exception(logical_and))
setattr(entity, "__rand__", wrap_notimplemented_exception(logical_rand))
setattr(entity, "__or__", wrap_notimplemented_exception(logical_or))
setattr(entity, "__ror__", wrap_notimplemented_exception(logical_ror))
setattr(entity, "__xor__", wrap_notimplemented_exception(logical_xor))
setattr(entity, "__rxor__", wrap_notimplemented_exception(logical_rxor))
setattr(entity, "__neg__", wrap_notimplemented_exception(negative))
for entity in INDEX_TYPE:
setattr(entity, "__eq__", _wrap_eq())
|
https://github.com/mars-project/mars/issues/1771
|
Traceback (most recent call last):
File "/Users/wenjun.swj/miniconda3/lib/python3.8/site-packages/IPython/core/interactiveshell.py", line 3417, in run_code
exec(code_obj, self.user_global_ns, self.user_ns)
File "<ipython-input-9-60fd09690beb>", line 1, in <module>
df[df[0] > 0.5].execute()
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 643, in execute
return run()
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 639, in run
self.data.execute(session, **kw)
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 379, in execute
return run()
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 374, in run
session.run(self, **kw)
File "/Users/wenjun.swj/Code/mars/mars/session.py", line 505, in run
result = self._sess.run(*tileables, **kw)
File "/Users/wenjun.swj/Code/mars/mars/session.py", line 111, in run
res = self._executor.execute_tileables(tileables, **kw)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/executor.py", line 860, in execute_tileables
chunk_graph = chunk_graph_builder.build(
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 347, in build
chunk_graph = super().build(
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 262, in build
self._on_tile_failure(tileable_data.op, exc_info)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 301, in inner
raise exc_info[1].with_traceback(exc_info[2]) from None
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 242, in build
tiled = self._tile(tileable_data, tileable_graph)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 337, in _tile
return super()._tile(tileable_data, tileable_graph)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 201, in _tile
tds[0]._inplace_tile()
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 168, in _inplace_tile
return handler.inplace_tile(self)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 136, in inplace_tile
dispatched = self.dispatch(to_tile.op)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 119, in dispatch
tiled = op_cls.tile(op)
File "/Users/wenjun.swj/Code/mars/mars/dataframe/indexing/getitem.py", line 273, in tile
return cls.tile_with_mask(op)
File "/Users/wenjun.swj/Code/mars/mars/dataframe/indexing/getitem.py", line 287, in tile_with_mask
align_dataframe_series(in_df, mask, axis='index')
File "/Users/wenjun.swj/Code/mars/mars/dataframe/align.py", line 713, in align_dataframe_series
index_splits, index_nsplits = _calc_axis_splits(left.index_value, right.index_value,
File "/Users/wenjun.swj/Code/mars/mars/dataframe/align.py", line 490, in _calc_axis_splits
right_splits = left_splits = [[c] for c in left_chunk_index_min_max]
TypeError: 'NoneType' object is not iterable
|
TypeError
|
def _tile_with_tensor(cls, op):
out = op.outputs[0]
axis = op.axis
if axis is None:
axis = 0
rhs_is_tensor = isinstance(op.rhs, TENSOR_TYPE)
tensor, other = (op.rhs, op.lhs) if rhs_is_tensor else (op.lhs, op.rhs)
if tensor.shape == other.shape:
tensor = tensor.rechunk(other.nsplits)._inplace_tile()
else:
# shape differs only when dataframe add 1-d tensor, we need rechunk on columns axis.
if axis in ["columns", 1] and other.ndim == 1:
# force axis == 0 if it's Series other than DataFrame
axis = 0
rechunk_size = (
other.nsplits[1] if axis == "columns" or axis == 1 else other.nsplits[0]
)
if tensor.ndim > 0:
tensor = tensor.rechunk((rechunk_size,))._inplace_tile()
cum_splits = [0] + np.cumsum(other.nsplits[axis]).tolist()
out_chunks = []
for out_index in itertools.product(*(map(range, other.chunk_shape))):
tensor_chunk = tensor.cix[out_index[: tensor.ndim]]
other_chunk = other.cix[out_index]
out_op = op.copy().reset_key()
inputs = (
[other_chunk, tensor_chunk]
if rhs_is_tensor
else [tensor_chunk, other_chunk]
)
if isinstance(other_chunk, DATAFRAME_CHUNK_TYPE):
start = cum_splits[out_index[axis]]
end = cum_splits[out_index[axis] + 1]
chunk_dtypes = out.dtypes.iloc[start:end]
out_chunk = out_op.new_chunk(
inputs,
shape=other_chunk.shape,
index=other_chunk.index,
dtypes=chunk_dtypes,
index_value=other_chunk.index_value,
columns_value=other.columns_value,
)
else:
out_chunk = out_op.new_chunk(
inputs,
shape=other_chunk.shape,
index=other_chunk.index,
dtype=out.dtype,
index_value=other_chunk.index_value,
name=other_chunk.name,
)
out_chunks.append(out_chunk)
new_op = op.copy()
if isinstance(other, SERIES_TYPE):
return new_op.new_seriess(
op.inputs,
other.shape,
nsplits=other.nsplits,
dtype=out.dtype,
index_value=other.index_value,
chunks=out_chunks,
)
else:
return new_op.new_dataframes(
op.inputs,
other.shape,
nsplits=other.nsplits,
dtypes=out.dtypes,
index_value=other.index_value,
columns_value=other.columns_value,
chunks=out_chunks,
)
|
def _tile_with_tensor(cls, op):
out = op.outputs[0]
axis = op.axis
rhs_is_tensor = isinstance(op.rhs, TENSOR_TYPE)
tensor, other = (op.rhs, op.lhs) if rhs_is_tensor else (op.lhs, op.rhs)
if tensor.shape == other.shape:
tensor = tensor.rechunk(other.nsplits)._inplace_tile()
else:
# shape differs only when dataframe add 1-d tensor, we need rechunk on columns axis.
if op.axis in ["columns", 1] and other.ndim == 1:
# force axis == 0 if it's Series other than DataFrame
axis = 0
rechunk_size = (
other.nsplits[1] if axis == "columns" or axis == 1 else other.nsplits[0]
)
if tensor.ndim > 0:
tensor = tensor.rechunk((rechunk_size,))._inplace_tile()
cum_splits = [0] + np.cumsum(other.nsplits[axis]).tolist()
out_chunks = []
for out_index in itertools.product(*(map(range, other.chunk_shape))):
tensor_chunk = tensor.cix[out_index[: tensor.ndim]]
other_chunk = other.cix[out_index]
out_op = op.copy().reset_key()
inputs = (
[other_chunk, tensor_chunk]
if rhs_is_tensor
else [tensor_chunk, other_chunk]
)
if isinstance(other_chunk, DATAFRAME_CHUNK_TYPE):
start = cum_splits[out_index[axis]]
end = cum_splits[out_index[axis] + 1]
chunk_dtypes = out.dtypes.iloc[start:end]
out_chunk = out_op.new_chunk(
inputs,
shape=other_chunk.shape,
index=other_chunk.index,
dtypes=chunk_dtypes,
index_value=other_chunk.index_value,
columns_value=other.columns_value,
)
else:
out_chunk = out_op.new_chunk(
inputs,
shape=other_chunk.shape,
index=other_chunk.index,
dtype=out.dtype,
index_value=other_chunk.index_value,
name=other_chunk.name,
)
out_chunks.append(out_chunk)
new_op = op.copy()
if isinstance(other, SERIES_TYPE):
return new_op.new_seriess(
op.inputs,
other.shape,
nsplits=other.nsplits,
dtype=out.dtype,
index_value=other.index_value,
chunks=out_chunks,
)
else:
return new_op.new_dataframes(
op.inputs,
other.shape,
nsplits=other.nsplits,
dtypes=out.dtypes,
index_value=other.index_value,
columns_value=other.columns_value,
chunks=out_chunks,
)
|
https://github.com/mars-project/mars/issues/1771
|
Traceback (most recent call last):
File "/Users/wenjun.swj/miniconda3/lib/python3.8/site-packages/IPython/core/interactiveshell.py", line 3417, in run_code
exec(code_obj, self.user_global_ns, self.user_ns)
File "<ipython-input-9-60fd09690beb>", line 1, in <module>
df[df[0] > 0.5].execute()
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 643, in execute
return run()
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 639, in run
self.data.execute(session, **kw)
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 379, in execute
return run()
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 374, in run
session.run(self, **kw)
File "/Users/wenjun.swj/Code/mars/mars/session.py", line 505, in run
result = self._sess.run(*tileables, **kw)
File "/Users/wenjun.swj/Code/mars/mars/session.py", line 111, in run
res = self._executor.execute_tileables(tileables, **kw)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/executor.py", line 860, in execute_tileables
chunk_graph = chunk_graph_builder.build(
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 347, in build
chunk_graph = super().build(
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 262, in build
self._on_tile_failure(tileable_data.op, exc_info)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 301, in inner
raise exc_info[1].with_traceback(exc_info[2]) from None
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 242, in build
tiled = self._tile(tileable_data, tileable_graph)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 337, in _tile
return super()._tile(tileable_data, tileable_graph)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 201, in _tile
tds[0]._inplace_tile()
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 168, in _inplace_tile
return handler.inplace_tile(self)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 136, in inplace_tile
dispatched = self.dispatch(to_tile.op)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 119, in dispatch
tiled = op_cls.tile(op)
File "/Users/wenjun.swj/Code/mars/mars/dataframe/indexing/getitem.py", line 273, in tile
return cls.tile_with_mask(op)
File "/Users/wenjun.swj/Code/mars/mars/dataframe/indexing/getitem.py", line 287, in tile_with_mask
align_dataframe_series(in_df, mask, axis='index')
File "/Users/wenjun.swj/Code/mars/mars/dataframe/align.py", line 713, in align_dataframe_series
index_splits, index_nsplits = _calc_axis_splits(left.index_value, right.index_value,
File "/Users/wenjun.swj/Code/mars/mars/dataframe/align.py", line 490, in _calc_axis_splits
right_splits = left_splits = [[c] for c in left_chunk_index_min_max]
TypeError: 'NoneType' object is not iterable
|
TypeError
|
def __call__(self, df):
if self.col_names is not None:
# if col_names is a list, return a DataFrame, else return a Series
if isinstance(self._col_names, list):
dtypes = df.dtypes[self._col_names]
columns = parse_index(pd.Index(self._col_names), store_data=True)
return self.new_dataframe(
[df],
shape=(df.shape[0], len(self._col_names)),
dtypes=dtypes,
index_value=df.index_value,
columns_value=columns,
)
else:
dtype = df.dtypes[self._col_names]
return self.new_series(
[df],
shape=(df.shape[0],),
dtype=dtype,
index_value=df.index_value,
name=self._col_names,
)
else:
if isinstance(self.mask, (SERIES_TYPE, DATAFRAME_TYPE, TENSOR_TYPE)):
index_value = parse_index(
pd.Index(
[], dtype=df.index_value.to_pandas().dtype, name=df.index_value.name
),
df,
self._mask,
)
return self.new_dataframe(
[df, self._mask],
shape=(np.nan, df.shape[1]),
dtypes=df.dtypes,
index_value=index_value,
columns_value=df.columns_value,
)
else:
index_value = parse_index(
pd.Index(
[], dtype=df.index_value.to_pandas().dtype, name=df.index_value.name
),
df,
self._mask,
)
return self.new_dataframe(
[df],
shape=(np.nan, df.shape[1]),
dtypes=df.dtypes,
index_value=index_value,
columns_value=df.columns_value,
)
|
def __call__(self, df):
if self.col_names is not None:
# if col_names is a list, return a DataFrame, else return a Series
if isinstance(self._col_names, list):
dtypes = df.dtypes[self._col_names]
columns = parse_index(pd.Index(self._col_names), store_data=True)
return self.new_dataframe(
[df],
shape=(df.shape[0], len(self._col_names)),
dtypes=dtypes,
index_value=df.index_value,
columns_value=columns,
)
else:
dtype = df.dtypes[self._col_names]
return self.new_series(
[df],
shape=(df.shape[0],),
dtype=dtype,
index_value=df.index_value,
name=self._col_names,
)
else:
if isinstance(self.mask, (SERIES_TYPE, DATAFRAME_TYPE)):
index_value = parse_index(
pd.Index(
[], dtype=df.index_value.to_pandas().dtype, name=df.index_value.name
),
df,
self._mask,
)
return self.new_dataframe(
[df, self._mask],
shape=(np.nan, df.shape[1]),
dtypes=df.dtypes,
index_value=index_value,
columns_value=df.columns_value,
)
else:
index_value = parse_index(
pd.Index(
[], dtype=df.index_value.to_pandas().dtype, name=df.index_value.name
),
df,
self._mask,
)
return self.new_dataframe(
[df],
shape=(np.nan, df.shape[1]),
dtypes=df.dtypes,
index_value=index_value,
columns_value=df.columns_value,
)
|
https://github.com/mars-project/mars/issues/1771
|
Traceback (most recent call last):
File "/Users/wenjun.swj/miniconda3/lib/python3.8/site-packages/IPython/core/interactiveshell.py", line 3417, in run_code
exec(code_obj, self.user_global_ns, self.user_ns)
File "<ipython-input-9-60fd09690beb>", line 1, in <module>
df[df[0] > 0.5].execute()
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 643, in execute
return run()
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 639, in run
self.data.execute(session, **kw)
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 379, in execute
return run()
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 374, in run
session.run(self, **kw)
File "/Users/wenjun.swj/Code/mars/mars/session.py", line 505, in run
result = self._sess.run(*tileables, **kw)
File "/Users/wenjun.swj/Code/mars/mars/session.py", line 111, in run
res = self._executor.execute_tileables(tileables, **kw)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/executor.py", line 860, in execute_tileables
chunk_graph = chunk_graph_builder.build(
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 347, in build
chunk_graph = super().build(
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 262, in build
self._on_tile_failure(tileable_data.op, exc_info)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 301, in inner
raise exc_info[1].with_traceback(exc_info[2]) from None
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 242, in build
tiled = self._tile(tileable_data, tileable_graph)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 337, in _tile
return super()._tile(tileable_data, tileable_graph)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 201, in _tile
tds[0]._inplace_tile()
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 168, in _inplace_tile
return handler.inplace_tile(self)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 136, in inplace_tile
dispatched = self.dispatch(to_tile.op)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 119, in dispatch
tiled = op_cls.tile(op)
File "/Users/wenjun.swj/Code/mars/mars/dataframe/indexing/getitem.py", line 273, in tile
return cls.tile_with_mask(op)
File "/Users/wenjun.swj/Code/mars/mars/dataframe/indexing/getitem.py", line 287, in tile_with_mask
align_dataframe_series(in_df, mask, axis='index')
File "/Users/wenjun.swj/Code/mars/mars/dataframe/align.py", line 713, in align_dataframe_series
index_splits, index_nsplits = _calc_axis_splits(left.index_value, right.index_value,
File "/Users/wenjun.swj/Code/mars/mars/dataframe/align.py", line 490, in _calc_axis_splits
right_splits = left_splits = [[c] for c in left_chunk_index_min_max]
TypeError: 'NoneType' object is not iterable
|
TypeError
|
def tile_with_mask(cls, op):
in_df = op.inputs[0]
out_df = op.outputs[0]
out_chunks = []
if isinstance(op.mask, (SERIES_TYPE, DATAFRAME_TYPE, TENSOR_TYPE)):
mask = op.inputs[1]
if isinstance(op.mask, SERIES_TYPE):
nsplits, out_shape, df_chunks, mask_chunks = align_dataframe_series(
in_df, mask, axis="index"
)
elif isinstance(op.mask, DATAFRAME_TYPE):
nsplits, out_shape, df_chunks, mask_chunks = align_dataframe_dataframe(
in_df, mask
)
else:
# tensor
nsplits = in_df.nsplits
mask = mask.rechunk(nsplits[: mask.ndim])._inplace_tile()
out_shape = in_df.chunk_shape
df_chunks = in_df.chunks
mask_chunks = mask.chunks
out_chunk_indexes = itertools.product(*(range(s) for s in out_shape))
out_chunks = []
for i, idx, df_chunk in zip(itertools.count(), out_chunk_indexes, df_chunks):
if op.mask.ndim == 1:
mask_chunk = mask_chunks[df_chunk.index[0]]
else:
mask_chunk = mask_chunks[i]
index_value = parse_index(out_df.index_value.to_pandas(), df_chunk)
out_chunk = (
op.copy()
.reset_key()
.new_chunk(
[df_chunk, mask_chunk],
index=idx,
shape=(np.nan, df_chunk.shape[1]),
dtypes=df_chunk.dtypes,
index_value=index_value,
columns_value=df_chunk.columns_value,
)
)
out_chunks.append(out_chunk)
else:
check_chunks_unknown_shape([in_df], TilesError)
nsplits_acc = np.cumsum((0,) + in_df.nsplits[0])
for idx in range(in_df.chunk_shape[0]):
for idxj in range(in_df.chunk_shape[1]):
in_chunk = in_df.cix[idx, idxj]
chunk_op = op.copy().reset_key()
chunk_op._mask = op.mask.iloc[nsplits_acc[idx] : nsplits_acc[idx + 1]]
out_chunk = chunk_op.new_chunk(
[in_chunk],
index=in_chunk.index,
shape=(np.nan, in_chunk.shape[1]),
dtypes=in_chunk.dtypes,
index_value=in_df.index_value,
columns_value=in_chunk.columns_value,
)
out_chunks.append(out_chunk)
nsplits_on_columns = tuple(c.shape[1] for c in out_chunks if c.index[0] == 0)
row_chunk_num = len([c.shape[0] for c in out_chunks if c.index[1] == 0])
nsplits = ((np.nan,) * row_chunk_num, nsplits_on_columns)
new_op = op.copy()
return new_op.new_dataframes(
op.inputs,
shape=out_df.shape,
dtypes=out_df.dtypes,
index_value=out_df.index_value,
columns_value=out_df.columns_value,
chunks=out_chunks,
nsplits=nsplits,
)
|
def tile_with_mask(cls, op):
in_df = op.inputs[0]
out_df = op.outputs[0]
out_chunks = []
if isinstance(op.mask, (SERIES_TYPE, DATAFRAME_TYPE)):
mask = op.inputs[1]
if isinstance(op.mask, SERIES_TYPE):
nsplits, out_shape, df_chunks, mask_chunks = align_dataframe_series(
in_df, mask, axis="index"
)
else:
nsplits, out_shape, df_chunks, mask_chunks = align_dataframe_dataframe(
in_df, mask
)
out_chunk_indexes = itertools.product(*(range(s) for s in out_shape))
out_chunks = []
for i, idx, df_chunk in zip(itertools.count(), out_chunk_indexes, df_chunks):
if op.mask.ndim == 1:
mask_chunk = mask_chunks[df_chunk.index[0]]
else:
mask_chunk = mask_chunks[i]
index_value = parse_index(out_df.index_value.to_pandas(), df_chunk)
out_chunk = (
op.copy()
.reset_key()
.new_chunk(
[df_chunk, mask_chunk],
index=idx,
shape=(np.nan, df_chunk.shape[1]),
dtypes=df_chunk.dtypes,
index_value=index_value,
columns_value=df_chunk.columns_value,
)
)
out_chunks.append(out_chunk)
nsplits = ((np.nan,) * len(nsplits[0]), nsplits[1])
else:
check_chunks_unknown_shape([in_df], TilesError)
nsplits_acc = np.cumsum((0,) + in_df.nsplits[0])
for idx in range(in_df.chunk_shape[0]):
for idxj in range(in_df.chunk_shape[1]):
in_chunk = in_df.cix[idx, idxj]
chunk_op = op.copy().reset_key()
chunk_op._mask = op.mask.iloc[nsplits_acc[idx] : nsplits_acc[idx + 1]]
out_chunk = chunk_op.new_chunk(
[in_chunk],
index=in_chunk.index,
shape=(np.nan, in_chunk.shape[1]),
dtypes=in_chunk.dtypes,
index_value=in_df.index_value,
columns_value=in_chunk.columns_value,
)
out_chunks.append(out_chunk)
nsplits_on_columns = tuple(c.shape[1] for c in out_chunks if c.index[0] == 0)
row_chunk_num = len([c.shape[0] for c in out_chunks if c.index[1] == 0])
nsplits = ((np.nan,) * row_chunk_num, nsplits_on_columns)
new_op = op.copy()
return new_op.new_dataframes(
op.inputs,
shape=out_df.shape,
dtypes=out_df.dtypes,
index_value=out_df.index_value,
columns_value=out_df.columns_value,
chunks=out_chunks,
nsplits=nsplits,
)
|
https://github.com/mars-project/mars/issues/1771
|
Traceback (most recent call last):
File "/Users/wenjun.swj/miniconda3/lib/python3.8/site-packages/IPython/core/interactiveshell.py", line 3417, in run_code
exec(code_obj, self.user_global_ns, self.user_ns)
File "<ipython-input-9-60fd09690beb>", line 1, in <module>
df[df[0] > 0.5].execute()
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 643, in execute
return run()
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 639, in run
self.data.execute(session, **kw)
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 379, in execute
return run()
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 374, in run
session.run(self, **kw)
File "/Users/wenjun.swj/Code/mars/mars/session.py", line 505, in run
result = self._sess.run(*tileables, **kw)
File "/Users/wenjun.swj/Code/mars/mars/session.py", line 111, in run
res = self._executor.execute_tileables(tileables, **kw)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/executor.py", line 860, in execute_tileables
chunk_graph = chunk_graph_builder.build(
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 347, in build
chunk_graph = super().build(
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 262, in build
self._on_tile_failure(tileable_data.op, exc_info)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 301, in inner
raise exc_info[1].with_traceback(exc_info[2]) from None
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 242, in build
tiled = self._tile(tileable_data, tileable_graph)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 337, in _tile
return super()._tile(tileable_data, tileable_graph)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 201, in _tile
tds[0]._inplace_tile()
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 168, in _inplace_tile
return handler.inplace_tile(self)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 136, in inplace_tile
dispatched = self.dispatch(to_tile.op)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 119, in dispatch
tiled = op_cls.tile(op)
File "/Users/wenjun.swj/Code/mars/mars/dataframe/indexing/getitem.py", line 273, in tile
return cls.tile_with_mask(op)
File "/Users/wenjun.swj/Code/mars/mars/dataframe/indexing/getitem.py", line 287, in tile_with_mask
align_dataframe_series(in_df, mask, axis='index')
File "/Users/wenjun.swj/Code/mars/mars/dataframe/align.py", line 713, in align_dataframe_series
index_splits, index_nsplits = _calc_axis_splits(left.index_value, right.index_value,
File "/Users/wenjun.swj/Code/mars/mars/dataframe/align.py", line 490, in _calc_axis_splits
right_splits = left_splits = [[c] for c in left_chunk_index_min_max]
TypeError: 'NoneType' object is not iterable
|
TypeError
|
def execute(cls, ctx, op):
if op.mask is None:
df = ctx[op.inputs[0].key]
ctx[op.outputs[0].key] = df[op.col_names]
else:
df = ctx[op.inputs[0].key]
if isinstance(
op.mask, (SERIES_CHUNK_TYPE, DATAFRAME_CHUNK_TYPE, TENSOR_CHUNK_TYPE)
):
mask = ctx[op.inputs[1].key]
else:
mask = op.mask
if hasattr(mask, "reindex_like"):
mask = mask.reindex_like(df).fillna(False)
if mask.ndim == 2:
mask = mask[df.columns.tolist()]
ctx[op.outputs[0].key] = df[mask]
|
def execute(cls, ctx, op):
if op.mask is None:
df = ctx[op.inputs[0].key]
ctx[op.outputs[0].key] = df[op.col_names]
else:
df = ctx[op.inputs[0].key]
if isinstance(op.mask, (SERIES_CHUNK_TYPE, DATAFRAME_CHUNK_TYPE)):
mask = ctx[op.inputs[1].key]
else:
mask = op.mask
mask = mask.reindex_like(df).fillna(False)
if mask.ndim == 2:
mask = mask[df.columns.tolist()]
ctx[op.outputs[0].key] = df[mask]
|
https://github.com/mars-project/mars/issues/1771
|
Traceback (most recent call last):
File "/Users/wenjun.swj/miniconda3/lib/python3.8/site-packages/IPython/core/interactiveshell.py", line 3417, in run_code
exec(code_obj, self.user_global_ns, self.user_ns)
File "<ipython-input-9-60fd09690beb>", line 1, in <module>
df[df[0] > 0.5].execute()
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 643, in execute
return run()
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 639, in run
self.data.execute(session, **kw)
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 379, in execute
return run()
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 374, in run
session.run(self, **kw)
File "/Users/wenjun.swj/Code/mars/mars/session.py", line 505, in run
result = self._sess.run(*tileables, **kw)
File "/Users/wenjun.swj/Code/mars/mars/session.py", line 111, in run
res = self._executor.execute_tileables(tileables, **kw)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/executor.py", line 860, in execute_tileables
chunk_graph = chunk_graph_builder.build(
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 347, in build
chunk_graph = super().build(
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 262, in build
self._on_tile_failure(tileable_data.op, exc_info)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 301, in inner
raise exc_info[1].with_traceback(exc_info[2]) from None
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 242, in build
tiled = self._tile(tileable_data, tileable_graph)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 337, in _tile
return super()._tile(tileable_data, tileable_graph)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 201, in _tile
tds[0]._inplace_tile()
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 168, in _inplace_tile
return handler.inplace_tile(self)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 136, in inplace_tile
dispatched = self.dispatch(to_tile.op)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 119, in dispatch
tiled = op_cls.tile(op)
File "/Users/wenjun.swj/Code/mars/mars/dataframe/indexing/getitem.py", line 273, in tile
return cls.tile_with_mask(op)
File "/Users/wenjun.swj/Code/mars/mars/dataframe/indexing/getitem.py", line 287, in tile_with_mask
align_dataframe_series(in_df, mask, axis='index')
File "/Users/wenjun.swj/Code/mars/mars/dataframe/align.py", line 713, in align_dataframe_series
index_splits, index_nsplits = _calc_axis_splits(left.index_value, right.index_value,
File "/Users/wenjun.swj/Code/mars/mars/dataframe/align.py", line 490, in _calc_axis_splits
right_splits = left_splits = [[c] for c in left_chunk_index_min_max]
TypeError: 'NoneType' object is not iterable
|
TypeError
|
def tile(cls, op: "DataFrameDropNA"):
in_df = op.inputs[0]
out_df = op.outputs[0]
# series tiling will go here
if len(in_df.chunk_shape) == 1 or in_df.chunk_shape[1] == 1:
return cls._tile_drop_directly(op)
subset_df = in_df
if op.subset:
subset_df = in_df[op.subset]._inplace_tile()
count_series = subset_df.agg(
"count", axis=1, _use_inf_as_na=op.use_inf_as_na
)._inplace_tile()
nsplits, out_shape, left_chunks, right_chunks = align_dataframe_series(
in_df, count_series, axis=0
)
out_chunk_indexes = itertools.product(*(range(s) for s in out_shape))
out_chunks = []
for out_idx, df_chunk in zip(out_chunk_indexes, left_chunks):
series_chunk = right_chunks[out_idx[0]]
kw = dict(
shape=(np.nan, nsplits[1][out_idx[1]]),
dtypes=df_chunk.dtypes,
index_value=df_chunk.index_value,
columns_value=df_chunk.columns_value,
)
new_op = op.copy().reset_key()
new_op._drop_directly = False
new_op._subset_size = len(op.subset) if op.subset else len(in_df.dtypes)
out_chunks.append(
new_op.new_chunk([df_chunk, series_chunk], index=out_idx, **kw)
)
new_op = op.copy().reset_key()
params = out_df.params.copy()
new_nsplits = list(tuple(ns) for ns in nsplits)
new_nsplits[0] = (np.nan,) * len(new_nsplits[0])
params.update(dict(nsplits=tuple(new_nsplits), chunks=out_chunks))
return new_op.new_tileables(op.inputs, **params)
|
def tile(cls, op: "DataFrameDropNA"):
in_df = op.inputs[0]
out_df = op.outputs[0]
if len(in_df.chunk_shape) == 1 or in_df.chunk_shape[1] == 1:
return cls._tile_drop_directly(op)
subset_df = in_df
if op.subset:
subset_df = in_df[op.subset]._inplace_tile()
count_series = subset_df.agg(
"count", axis=1, _use_inf_as_na=op.use_inf_as_na
)._inplace_tile()
nsplits, out_shape, left_chunks, right_chunks = align_dataframe_series(
in_df, count_series, axis=0
)
out_chunk_indexes = itertools.product(*(range(s) for s in out_shape))
out_chunks = []
for out_idx, df_chunk in zip(out_chunk_indexes, left_chunks):
series_chunk = right_chunks[out_idx[0]]
kw = dict(
shape=(np.nan, nsplits[1][out_idx[1]]),
index_value=df_chunk.index_value,
columns_value=df_chunk.columns_value,
)
new_op = op.copy().reset_key()
new_op._drop_directly = False
new_op._subset_size = len(op.subset) if op.subset else len(in_df.dtypes)
out_chunks.append(
new_op.new_chunk([df_chunk, series_chunk], index=out_idx, **kw)
)
new_op = op.copy().reset_key()
params = out_df.params.copy()
new_nsplits = list(tuple(ns) for ns in nsplits)
new_nsplits[0] = (np.nan,) * len(new_nsplits[0])
params.update(dict(nsplits=tuple(new_nsplits), chunks=out_chunks))
return new_op.new_tileables(op.inputs, **params)
|
https://github.com/mars-project/mars/issues/1771
|
Traceback (most recent call last):
File "/Users/wenjun.swj/miniconda3/lib/python3.8/site-packages/IPython/core/interactiveshell.py", line 3417, in run_code
exec(code_obj, self.user_global_ns, self.user_ns)
File "<ipython-input-9-60fd09690beb>", line 1, in <module>
df[df[0] > 0.5].execute()
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 643, in execute
return run()
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 639, in run
self.data.execute(session, **kw)
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 379, in execute
return run()
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 374, in run
session.run(self, **kw)
File "/Users/wenjun.swj/Code/mars/mars/session.py", line 505, in run
result = self._sess.run(*tileables, **kw)
File "/Users/wenjun.swj/Code/mars/mars/session.py", line 111, in run
res = self._executor.execute_tileables(tileables, **kw)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/executor.py", line 860, in execute_tileables
chunk_graph = chunk_graph_builder.build(
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 347, in build
chunk_graph = super().build(
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 262, in build
self._on_tile_failure(tileable_data.op, exc_info)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 301, in inner
raise exc_info[1].with_traceback(exc_info[2]) from None
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 242, in build
tiled = self._tile(tileable_data, tileable_graph)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 337, in _tile
return super()._tile(tileable_data, tileable_graph)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 201, in _tile
tds[0]._inplace_tile()
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 168, in _inplace_tile
return handler.inplace_tile(self)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 136, in inplace_tile
dispatched = self.dispatch(to_tile.op)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 119, in dispatch
tiled = op_cls.tile(op)
File "/Users/wenjun.swj/Code/mars/mars/dataframe/indexing/getitem.py", line 273, in tile
return cls.tile_with_mask(op)
File "/Users/wenjun.swj/Code/mars/mars/dataframe/indexing/getitem.py", line 287, in tile_with_mask
align_dataframe_series(in_df, mask, axis='index')
File "/Users/wenjun.swj/Code/mars/mars/dataframe/align.py", line 713, in align_dataframe_series
index_splits, index_nsplits = _calc_axis_splits(left.index_value, right.index_value,
File "/Users/wenjun.swj/Code/mars/mars/dataframe/align.py", line 490, in _calc_axis_splits
right_splits = left_splits = [[c] for c in left_chunk_index_min_max]
TypeError: 'NoneType' object is not iterable
|
TypeError
|
def _call(self, x1, x2, out=None, where=None):
# check tensor ufunc, if x1 or x2 is not a tensor, e.g. Mars DataFrame
# which implements tensor ufunc, will delegate the computation
# to it if possible
ret = self._call_tensor_ufunc(x1, x2, out=out, where=where)
if ret is not None:
return ret
x1, x2, out, where = self._process_inputs(x1, x2, out, where)
# check broadcast
x1_shape = () if np.isscalar(x1) else x1.shape
x2_shape = () if np.isscalar(x2) else x2.shape
shape = broadcast_shape(x1_shape, x2_shape)
order = self._calc_order(x1, x2, out)
inputs = filter_inputs([x1, x2, out, where])
t = self.new_tensor(inputs, shape, order=order)
if out is None:
return t
check_out_param(out, t, getattr(self, "_casting"))
out_shape, out_dtype = out.shape, out.dtype
# if `out` is specified, use out's dtype and shape
if t.shape != out_shape:
t = self.new_tensor(inputs, out_shape, order=order)
setattr(self, "_dtype", out_dtype)
out.data = t.data
return out
|
def _call(self, x1, x2, out=None, where=None):
x1, x2, out, where = self._process_inputs(x1, x2, out, where)
# check broadcast
x1_shape = () if np.isscalar(x1) else x1.shape
x2_shape = () if np.isscalar(x2) else x2.shape
shape = broadcast_shape(x1_shape, x2_shape)
order = self._calc_order(x1, x2, out)
inputs = filter_inputs([x1, x2, out, where])
t = self.new_tensor(inputs, shape, order=order)
if out is None:
return t
check_out_param(out, t, getattr(self, "_casting"))
out_shape, out_dtype = out.shape, out.dtype
# if `out` is specified, use out's dtype and shape
if t.shape != out_shape:
t = self.new_tensor(inputs, out_shape, order=order)
setattr(self, "_dtype", out_dtype)
out.data = t.data
return out
|
https://github.com/mars-project/mars/issues/1771
|
Traceback (most recent call last):
File "/Users/wenjun.swj/miniconda3/lib/python3.8/site-packages/IPython/core/interactiveshell.py", line 3417, in run_code
exec(code_obj, self.user_global_ns, self.user_ns)
File "<ipython-input-9-60fd09690beb>", line 1, in <module>
df[df[0] > 0.5].execute()
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 643, in execute
return run()
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 639, in run
self.data.execute(session, **kw)
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 379, in execute
return run()
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 374, in run
session.run(self, **kw)
File "/Users/wenjun.swj/Code/mars/mars/session.py", line 505, in run
result = self._sess.run(*tileables, **kw)
File "/Users/wenjun.swj/Code/mars/mars/session.py", line 111, in run
res = self._executor.execute_tileables(tileables, **kw)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/executor.py", line 860, in execute_tileables
chunk_graph = chunk_graph_builder.build(
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 347, in build
chunk_graph = super().build(
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 262, in build
self._on_tile_failure(tileable_data.op, exc_info)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 301, in inner
raise exc_info[1].with_traceback(exc_info[2]) from None
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 242, in build
tiled = self._tile(tileable_data, tileable_graph)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 337, in _tile
return super()._tile(tileable_data, tileable_graph)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 201, in _tile
tds[0]._inplace_tile()
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 168, in _inplace_tile
return handler.inplace_tile(self)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 136, in inplace_tile
dispatched = self.dispatch(to_tile.op)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 119, in dispatch
tiled = op_cls.tile(op)
File "/Users/wenjun.swj/Code/mars/mars/dataframe/indexing/getitem.py", line 273, in tile
return cls.tile_with_mask(op)
File "/Users/wenjun.swj/Code/mars/mars/dataframe/indexing/getitem.py", line 287, in tile_with_mask
align_dataframe_series(in_df, mask, axis='index')
File "/Users/wenjun.swj/Code/mars/mars/dataframe/align.py", line 713, in align_dataframe_series
index_splits, index_nsplits = _calc_axis_splits(left.index_value, right.index_value,
File "/Users/wenjun.swj/Code/mars/mars/dataframe/align.py", line 490, in _calc_axis_splits
right_splits = left_splits = [[c] for c in left_chunk_index_min_max]
TypeError: 'NoneType' object is not iterable
|
TypeError
|
def fetch_data(self, session_id, chunk_key, index_obj=None, compression_type=None):
logger.debug("Sending data %s from %s", chunk_key, self.uid)
if compression_type is None:
compression_type = dataserializer.CompressType(
options.worker.transfer_compression
)
if index_obj is None:
if options.vineyard.socket:
target_devs = [
DataStorageDevice.VINEYARD,
DataStorageDevice.DISK,
] # pragma: no cover
else:
target_devs = [DataStorageDevice.SHARED_MEMORY, DataStorageDevice.DISK]
ev = self._result_copy_ref.start_copy(session_id, chunk_key, target_devs)
if ev:
ev.wait(options.worker.prepare_data_timeout)
reader = self.storage_client.create_reader(
session_id,
chunk_key,
target_devs,
packed=True,
packed_compression=compression_type,
_promise=False,
)
with reader:
pool = reader.get_io_pool()
return pool.submit(reader.read).result()
else:
try:
if options.vineyard.socket:
memory_device = DataStorageDevice.VINEYARD # pragma: no cover
else:
memory_device = DataStorageDevice.SHARED_MEMORY
value = self.storage_client.get_object(
session_id, chunk_key, [memory_device], _promise=False
)
except IOError:
reader = self.storage_client.create_reader(
session_id,
chunk_key,
[DataStorageDevice.DISK],
packed=False,
_promise=False,
)
with reader:
pool = reader.get_io_pool()
value = dataserializer.deserialize(pool.submit(reader.read).result())
try:
sliced_value = value.iloc[tuple(index_obj)]
except AttributeError:
sliced_value = value[tuple(index_obj)]
return self._serialize_pool.submit(
dataserializer.dumps, sliced_value, compress=compression_type
).result()
|
def fetch_data(self, session_id, chunk_key, index_obj=None, compression_type=None):
if compression_type is None:
compression_type = dataserializer.CompressType(
options.worker.transfer_compression
)
if index_obj is None:
if options.vineyard.socket:
target_devs = [
DataStorageDevice.VINEYARD,
DataStorageDevice.DISK,
] # pragma: no cover
else:
target_devs = [DataStorageDevice.SHARED_MEMORY, DataStorageDevice.DISK]
ev = self._result_copy_ref.start_copy(session_id, chunk_key, target_devs)
if ev:
ev.wait(options.worker.prepare_data_timeout)
reader = self.storage_client.create_reader(
session_id,
chunk_key,
target_devs,
packed=True,
packed_compression=compression_type,
_promise=False,
)
with reader:
pool = reader.get_io_pool()
return pool.submit(reader.read).result()
else:
try:
if options.vineyard.socket:
memory_device = DataStorageDevice.VINEYARD # pragma: no cover
else:
memory_device = DataStorageDevice.SHARED_MEMORY
value = self.storage_client.get_object(
session_id, chunk_key, [memory_device], _promise=False
)
except IOError:
reader = self.storage_client.create_reader(
session_id,
chunk_key,
[DataStorageDevice.DISK],
packed=False,
_promise=False,
)
with reader:
pool = reader.get_io_pool()
value = dataserializer.deserialize(pool.submit(reader.read).result())
try:
sliced_value = value.iloc[tuple(index_obj)]
except AttributeError:
sliced_value = value[tuple(index_obj)]
return self._serialize_pool.submit(
dataserializer.dumps, sliced_value, compress=compression_type
).result()
|
https://github.com/mars-project/mars/issues/1771
|
Traceback (most recent call last):
File "/Users/wenjun.swj/miniconda3/lib/python3.8/site-packages/IPython/core/interactiveshell.py", line 3417, in run_code
exec(code_obj, self.user_global_ns, self.user_ns)
File "<ipython-input-9-60fd09690beb>", line 1, in <module>
df[df[0] > 0.5].execute()
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 643, in execute
return run()
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 639, in run
self.data.execute(session, **kw)
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 379, in execute
return run()
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 374, in run
session.run(self, **kw)
File "/Users/wenjun.swj/Code/mars/mars/session.py", line 505, in run
result = self._sess.run(*tileables, **kw)
File "/Users/wenjun.swj/Code/mars/mars/session.py", line 111, in run
res = self._executor.execute_tileables(tileables, **kw)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/executor.py", line 860, in execute_tileables
chunk_graph = chunk_graph_builder.build(
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 347, in build
chunk_graph = super().build(
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 262, in build
self._on_tile_failure(tileable_data.op, exc_info)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 301, in inner
raise exc_info[1].with_traceback(exc_info[2]) from None
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 242, in build
tiled = self._tile(tileable_data, tileable_graph)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 337, in _tile
return super()._tile(tileable_data, tileable_graph)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 201, in _tile
tds[0]._inplace_tile()
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 168, in _inplace_tile
return handler.inplace_tile(self)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 136, in inplace_tile
dispatched = self.dispatch(to_tile.op)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 451, in _inner
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 119, in dispatch
tiled = op_cls.tile(op)
File "/Users/wenjun.swj/Code/mars/mars/dataframe/indexing/getitem.py", line 273, in tile
return cls.tile_with_mask(op)
File "/Users/wenjun.swj/Code/mars/mars/dataframe/indexing/getitem.py", line 287, in tile_with_mask
align_dataframe_series(in_df, mask, axis='index')
File "/Users/wenjun.swj/Code/mars/mars/dataframe/align.py", line 713, in align_dataframe_series
index_splits, index_nsplits = _calc_axis_splits(left.index_value, right.index_value,
File "/Users/wenjun.swj/Code/mars/mars/dataframe/align.py", line 490, in _calc_axis_splits
right_splits = left_splits = [[c] for c in left_chunk_index_min_max]
TypeError: 'NoneType' object is not iterable
|
TypeError
|
def series_from_tensor(tensor, index=None, name=None, gpu=None, sparse=False):
if tensor.ndim > 1 or tensor.ndim <= 0:
raise TypeError(f"Not support create Series from {tensor.ndim} dims tensor")
gpu = tensor.op.gpu if gpu is None else gpu
op = SeriesFromTensor(dtype=tensor.dtype, gpu=gpu, sparse=sparse)
return op(tensor, index, name)
|
def series_from_tensor(tensor, index=None, name=None, gpu=None, sparse=False):
if tensor.ndim > 1 or tensor.ndim <= 0:
raise TypeError(f"Not support create DataFrame from {tensor.ndim} dims tensor")
gpu = tensor.op.gpu if gpu is None else gpu
op = SeriesFromTensor(dtype=tensor.dtype, gpu=gpu, sparse=sparse)
return op(tensor, index, name)
|
https://github.com/mars-project/mars/issues/1754
|
In [1]: import mars.dataframe as md
In [2]: import mars.tensor as mt
In [3]: df = md.DataFrame(mt.random.rand(10 ,3))
In [4]: df['a'] = df[0].mean()
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-4-fdfc58da199a> in <module>
----> 1 df['a'] = df[0].mean()
~/Workspace/mars/mars/dataframe/indexing/setitem.py in dataframe_setitem(df, col, value)
172 def dataframe_setitem(df, col, value):
173 op = DataFrameSetitem(target=df, indexes=col, value=value)
--> 174 return op(df, value)
~/Workspace/mars/mars/utils.py in _inner(*args, **kwargs)
449 def _inner(*args, **kwargs):
450 with self:
--> 451 return func(*args, **kwargs)
452
453 return _inner
~/Workspace/mars/mars/dataframe/indexing/setitem.py in __call__(self, target, value)
69 value_dtype = value.dtype
70 elif is_list_like(value) or isinstance(value, TENSOR_TYPE):
---> 71 value = asseries(value, index=target.index)
72 inputs.append(value)
73 value_dtype = value.dtype
~/Workspace/mars/mars/dataframe/initializer.py in __init__(self, data, index, dtype, name, copy, chunk_size, gpu, sparse)
66 if chunk_size is not None:
67 data = data.rechunk(chunk_size)
---> 68 series = series_from_tensor(data, index=index, name=name, gpu=gpu, sparse=sparse)
69 elif isinstance(index, INDEX_TYPE):
70 series = series_from_tensor(astensor(data, chunk_size=chunk_size), index=index,
~/Workspace/mars/mars/dataframe/datasource/from_tensor.py in series_from_tensor(tensor, index, name, gpu, sparse)
467 def series_from_tensor(tensor, index=None, name=None, gpu=None, sparse=False):
468 if tensor.ndim > 1 or tensor.ndim <= 0:
--> 469 raise TypeError(f'Not support create DataFrame from {tensor.ndim} dims tensor')
470 gpu = tensor.op.gpu if gpu is None else gpu
471 op = SeriesFromTensor(dtype=tensor.dtype, gpu=gpu, sparse=sparse)
TypeError: Not support create DataFrame from 0 dims tensor
|
TypeError
|
def __call__(self, target: DataFrame, value):
inputs = [target]
if np.isscalar(value):
value_dtype = np.array(value).dtype
elif self._is_scalar_tensor(value):
inputs.append(value)
value_dtype = value.dtype
else:
if isinstance(value, (pd.Series, SERIES_TYPE)):
value = asseries(value)
inputs.append(value)
value_dtype = value.dtype
elif is_list_like(value) or isinstance(value, TENSOR_TYPE):
value = asseries(value, index=target.index)
inputs.append(value)
value_dtype = value.dtype
else: # pragma: no cover
raise TypeError(
"Wrong value type, could be one of scalar, Series or tensor"
)
if value.index_value.key != target.index_value.key: # pragma: no cover
raise NotImplementedError(
"Does not support setting value with different index for now"
)
index_value = target.index_value
dtypes = target.dtypes.copy(deep=True)
dtypes.loc[self._indexes] = value_dtype
columns_value = parse_index(dtypes.index, store_data=True)
ret = self.new_dataframe(
inputs,
shape=(target.shape[0], len(dtypes)),
dtypes=dtypes,
index_value=index_value,
columns_value=columns_value,
)
target.data = ret.data
|
def __call__(self, target: DataFrame, value):
inputs = [target]
if np.isscalar(value):
value_dtype = np.array(value).dtype
else:
if isinstance(value, (pd.Series, SERIES_TYPE)):
value = asseries(value)
inputs.append(value)
value_dtype = value.dtype
elif is_list_like(value) or isinstance(value, TENSOR_TYPE):
value = asseries(value, index=target.index)
inputs.append(value)
value_dtype = value.dtype
else: # pragma: no cover
raise TypeError(
"Wrong value type, could be one of scalar, Series or tensor"
)
if value.index_value.key != target.index_value.key: # pragma: no cover
raise NotImplementedError(
"Does not support setting value with different index for now"
)
index_value = target.index_value
dtypes = target.dtypes.copy(deep=True)
dtypes.loc[self._indexes] = value_dtype
columns_value = parse_index(dtypes.index, store_data=True)
ret = self.new_dataframe(
inputs,
shape=(target.shape[0], len(dtypes)),
dtypes=dtypes,
index_value=index_value,
columns_value=columns_value,
)
target.data = ret.data
|
https://github.com/mars-project/mars/issues/1754
|
In [1]: import mars.dataframe as md
In [2]: import mars.tensor as mt
In [3]: df = md.DataFrame(mt.random.rand(10 ,3))
In [4]: df['a'] = df[0].mean()
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-4-fdfc58da199a> in <module>
----> 1 df['a'] = df[0].mean()
~/Workspace/mars/mars/dataframe/indexing/setitem.py in dataframe_setitem(df, col, value)
172 def dataframe_setitem(df, col, value):
173 op = DataFrameSetitem(target=df, indexes=col, value=value)
--> 174 return op(df, value)
~/Workspace/mars/mars/utils.py in _inner(*args, **kwargs)
449 def _inner(*args, **kwargs):
450 with self:
--> 451 return func(*args, **kwargs)
452
453 return _inner
~/Workspace/mars/mars/dataframe/indexing/setitem.py in __call__(self, target, value)
69 value_dtype = value.dtype
70 elif is_list_like(value) or isinstance(value, TENSOR_TYPE):
---> 71 value = asseries(value, index=target.index)
72 inputs.append(value)
73 value_dtype = value.dtype
~/Workspace/mars/mars/dataframe/initializer.py in __init__(self, data, index, dtype, name, copy, chunk_size, gpu, sparse)
66 if chunk_size is not None:
67 data = data.rechunk(chunk_size)
---> 68 series = series_from_tensor(data, index=index, name=name, gpu=gpu, sparse=sparse)
69 elif isinstance(index, INDEX_TYPE):
70 series = series_from_tensor(astensor(data, chunk_size=chunk_size), index=index,
~/Workspace/mars/mars/dataframe/datasource/from_tensor.py in series_from_tensor(tensor, index, name, gpu, sparse)
467 def series_from_tensor(tensor, index=None, name=None, gpu=None, sparse=False):
468 if tensor.ndim > 1 or tensor.ndim <= 0:
--> 469 raise TypeError(f'Not support create DataFrame from {tensor.ndim} dims tensor')
470 gpu = tensor.op.gpu if gpu is None else gpu
471 op = SeriesFromTensor(dtype=tensor.dtype, gpu=gpu, sparse=sparse)
TypeError: Not support create DataFrame from 0 dims tensor
|
TypeError
|
def tile(cls, op):
out = op.outputs[0]
target = op.target
value = op.value
col = op.indexes
columns = target.columns_value.to_pandas()
is_value_scalar = np.isscalar(value) or cls._is_scalar_tensor(value)
if not is_value_scalar:
# check if all chunk's index_value are identical
target_chunk_index_values = [
c.index_value for c in target.chunks if c.index[1] == 0
]
value_chunk_index_values = [v.index_value for v in value.chunks]
is_identical = len(target_chunk_index_values) == len(
target_chunk_index_values
) and all(
c.key == v.key
for c, v in zip(target_chunk_index_values, value_chunk_index_values)
)
if not is_identical:
# do rechunk
if any(np.isnan(s) for s in target.nsplits[0]) or any(
np.isnan(s) for s in value.nsplits[0]
): # pragma: no cover
raise TilesError("target or value has unknown chunk shape")
value = value.rechunk({0: target.nsplits[0]})._inplace_tile()
out_chunks = []
nsplits = [list(ns) for ns in target.nsplits]
if col not in columns:
nsplits[1][-1] += 1
column_chunk_shape = target.chunk_shape[1]
# append to the last chunk on columns axis direction
for c in target.chunks:
if c.index[-1] != column_chunk_shape - 1:
# not effected, just output
out_chunks.append(c)
else:
chunk_op = op.copy().reset_key()
if pd.api.types.is_scalar(value):
chunk_inputs = [c]
elif is_value_scalar:
chunk_inputs = [c, value.chunks[0]]
else:
value_chunk = value.cix[c.index[0],]
chunk_inputs = [c, value_chunk]
dtypes = c.dtypes.copy(deep=True)
dtypes.loc[out.dtypes.index[-1]] = out.dtypes.iloc[-1]
chunk = chunk_op.new_chunk(
chunk_inputs,
shape=(c.shape[0], c.shape[1] + 1),
dtypes=dtypes,
index_value=c.index_value,
columns_value=parse_index(dtypes.index, store_data=True),
index=c.index,
)
out_chunks.append(chunk)
else:
# replace exist column
for c in target.chunks:
if col in c.dtypes:
chunk_inputs = [c]
if not np.isscalar(value):
chunk_inputs.append(value.cix[c.index[0],])
chunk_op = op.copy().reset_key()
chunk = chunk_op.new_chunk(
chunk_inputs,
shape=c.shape,
dtypes=c.dtypes,
index_value=c.index_value,
columns_value=c.columns_value,
index=c.index,
)
out_chunks.append(chunk)
else:
out_chunks.append(c)
params = out.params
params["nsplits"] = tuple(tuple(ns) for ns in nsplits)
params["chunks"] = out_chunks
new_op = op.copy()
return new_op.new_tileables(op.inputs, kws=[params])
|
def tile(cls, op):
out = op.outputs[0]
target = op.target
value = op.value
col = op.indexes
columns = target.columns_value.to_pandas()
if not np.isscalar(value):
# check if all chunk's index_value are identical
target_chunk_index_values = [
c.index_value for c in target.chunks if c.index[1] == 0
]
value_chunk_index_values = [v.index_value for v in value.chunks]
is_identical = len(target_chunk_index_values) == len(
target_chunk_index_values
) and all(
c.key == v.key
for c, v in zip(target_chunk_index_values, value_chunk_index_values)
)
if not is_identical:
# do rechunk
if any(np.isnan(s) for s in target.nsplits[0]) or any(
np.isnan(s) for s in value.nsplits[0]
): # pragma: no cover
raise TilesError("target or value has unknown chunk shape")
value = value.rechunk({0: target.nsplits[0]})._inplace_tile()
out_chunks = []
nsplits = [list(ns) for ns in target.nsplits]
if col not in columns:
nsplits[1][-1] += 1
column_chunk_shape = target.chunk_shape[1]
# append to the last chunk on columns axis direction
for c in target.chunks:
if c.index[-1] != column_chunk_shape - 1:
# not effected, just output
out_chunks.append(c)
else:
chunk_op = op.copy().reset_key()
if np.isscalar(value):
chunk_inputs = [c]
else:
value_chunk = value.cix[c.index[0],]
chunk_inputs = [c, value_chunk]
dtypes = c.dtypes.copy(deep=True)
dtypes.loc[out.dtypes.index[-1]] = out.dtypes.iloc[-1]
chunk = chunk_op.new_chunk(
chunk_inputs,
shape=(c.shape[0], c.shape[1] + 1),
dtypes=dtypes,
index_value=c.index_value,
columns_value=parse_index(dtypes.index, store_data=True),
index=c.index,
)
out_chunks.append(chunk)
else:
# replace exist column
for c in target.chunks:
if col in c.dtypes:
chunk_inputs = [c]
if not np.isscalar(value):
chunk_inputs.append(value.cix[c.index[0],])
chunk_op = op.copy().reset_key()
chunk = chunk_op.new_chunk(
chunk_inputs,
shape=c.shape,
dtypes=c.dtypes,
index_value=c.index_value,
columns_value=c.columns_value,
index=c.index,
)
out_chunks.append(chunk)
else:
out_chunks.append(c)
params = out.params
params["nsplits"] = tuple(tuple(ns) for ns in nsplits)
params["chunks"] = out_chunks
new_op = op.copy()
return new_op.new_tileables(op.inputs, kws=[params])
|
https://github.com/mars-project/mars/issues/1754
|
In [1]: import mars.dataframe as md
In [2]: import mars.tensor as mt
In [3]: df = md.DataFrame(mt.random.rand(10 ,3))
In [4]: df['a'] = df[0].mean()
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-4-fdfc58da199a> in <module>
----> 1 df['a'] = df[0].mean()
~/Workspace/mars/mars/dataframe/indexing/setitem.py in dataframe_setitem(df, col, value)
172 def dataframe_setitem(df, col, value):
173 op = DataFrameSetitem(target=df, indexes=col, value=value)
--> 174 return op(df, value)
~/Workspace/mars/mars/utils.py in _inner(*args, **kwargs)
449 def _inner(*args, **kwargs):
450 with self:
--> 451 return func(*args, **kwargs)
452
453 return _inner
~/Workspace/mars/mars/dataframe/indexing/setitem.py in __call__(self, target, value)
69 value_dtype = value.dtype
70 elif is_list_like(value) or isinstance(value, TENSOR_TYPE):
---> 71 value = asseries(value, index=target.index)
72 inputs.append(value)
73 value_dtype = value.dtype
~/Workspace/mars/mars/dataframe/initializer.py in __init__(self, data, index, dtype, name, copy, chunk_size, gpu, sparse)
66 if chunk_size is not None:
67 data = data.rechunk(chunk_size)
---> 68 series = series_from_tensor(data, index=index, name=name, gpu=gpu, sparse=sparse)
69 elif isinstance(index, INDEX_TYPE):
70 series = series_from_tensor(astensor(data, chunk_size=chunk_size), index=index,
~/Workspace/mars/mars/dataframe/datasource/from_tensor.py in series_from_tensor(tensor, index, name, gpu, sparse)
467 def series_from_tensor(tensor, index=None, name=None, gpu=None, sparse=False):
468 if tensor.ndim > 1 or tensor.ndim <= 0:
--> 469 raise TypeError(f'Not support create DataFrame from {tensor.ndim} dims tensor')
470 gpu = tensor.op.gpu if gpu is None else gpu
471 op = SeriesFromTensor(dtype=tensor.dtype, gpu=gpu, sparse=sparse)
TypeError: Not support create DataFrame from 0 dims tensor
|
TypeError
|
def _gen_shuffle_chunks(cls, op, in_df, chunks):
# generate map chunks
map_chunks = []
chunk_shape = (in_df.chunk_shape[0], 1)
for chunk in chunks:
# no longer consider as_index=False for the intermediate phases,
# will do reset_index at last if so
map_op = DataFrameGroupByOperand(
stage=OperandStage.map,
shuffle_size=chunk_shape[0],
output_types=[OutputType.dataframe_groupby],
)
map_chunks.append(
map_op.new_chunk(
[chunk],
shape=(np.nan, np.nan),
index=chunk.index,
index_value=op.outputs[0].index_value,
)
)
proxy_chunk = DataFrameShuffleProxy(output_types=[OutputType.dataframe]).new_chunk(
map_chunks, shape=()
)
# generate reduce chunks
reduce_chunks = []
for out_idx in itertools.product(*(range(s) for s in chunk_shape)):
reduce_op = DataFrameGroupByOperand(
stage=OperandStage.reduce,
shuffle_key=",".join(str(idx) for idx in out_idx),
output_types=[OutputType.dataframe_groupby],
)
reduce_chunks.append(
reduce_op.new_chunk(
[proxy_chunk], shape=(np.nan, np.nan), index=out_idx, index_value=None
)
)
return reduce_chunks
|
def _gen_shuffle_chunks(cls, op, in_df, chunks):
# generate map chunks
map_chunks = []
chunk_shape = (in_df.chunk_shape[0], 1)
for chunk in chunks:
# no longer consider as_index=False for the intermediate phases,
# will do reset_index at last if so
map_op = DataFrameGroupByOperand(
stage=OperandStage.map,
shuffle_size=chunk_shape[0],
output_types=[OutputType.dataframe_groupby],
)
map_chunks.append(
map_op.new_chunk(
[chunk],
shape=(np.nan, np.nan),
index=chunk.index,
index_value=op.outputs[0].index_value,
)
)
proxy_chunk = DataFrameShuffleProxy(output_types=[OutputType.dataframe]).new_chunk(
map_chunks, shape=()
)
# generate reduce chunks
reduce_chunks = []
for out_idx in itertools.product(*(range(s) for s in chunk_shape)):
reduce_op = DataFrameGroupByOperand(
stage=OperandStage.reduce,
shuffle_key=",".join(str(idx) for idx in out_idx),
output_types=[OutputType.dataframe_groupby],
)
reduce_chunks.append(
reduce_op.new_chunk(
[proxy_chunk],
shape=(np.nan, np.nan),
index=out_idx,
index_value=op.outputs[0].index_value,
)
)
return reduce_chunks
|
https://github.com/mars-project/mars/issues/1741
|
2020-12-02 11:19:40,309 mars.scheduler.operands.common 87 ERROR Attempt 1: Unexpected error KeyError occurred in executing operand 5c7a3b06d448300987640036d2f5a34e in 11.238.145.234:49708
Traceback (most recent call last):
File "/home/admin/work/_public-mars-0.5.5.zip/mars/promise.py", line 378, in _wrapped
return func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.5.5.zip/mars/utils.py", line 377, in _wrapped
return func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.5.5.zip/mars/worker/execution.py", line 564, in execute_graph
quota_request = self._prepare_quota_request(session_id, graph_key)
File "/home/admin/work/_public-mars-0.5.5.zip/mars/worker/execution.py", line 249, in _prepare_quota_request
memory_estimations = self._estimate_calc_memory(session_id, graph_key)
File "/home/admin/work/_public-mars-0.5.5.zip/mars/worker/execution.py", line 213, in _estimate_calc_memory
res = executor.execute_graph(graph_record.graph, graph_record.chunk_targets, mock=True)
File "/home/admin/work/_public-mars-0.5.5.zip/mars/executor.py", line 690, in execute_graph
res = graph_execution.execute(retval)
File "/home/admin/work/_public-mars-0.5.5.zip/mars/executor.py", line 574, in execute
return [self._chunk_results[key] for key in self._keys]
File "/home/admin/work/_public-mars-0.5.5.zip/mars/executor.py", line 574, in <listcomp>
return [self._chunk_results[key] for key in self._keys]
KeyError: '3990ec90331559138b6ecbc6d76fbd0d'
|
KeyError
|
def execute_map(cls, ctx, op):
is_dataframe_obj = op.is_dataframe_obj
by = op.by
chunk = op.outputs[0]
df = ctx[op.inputs[0].key]
deliver_by = False # output by for the upcoming process
if isinstance(by, list):
new_by = []
for v in by:
if isinstance(v, Base):
deliver_by = True
new_by.append(ctx[v.key])
else:
new_by.append(v)
by = new_by
if isinstance(by, list) or callable(by):
on = by
else:
on = None
if isinstance(df, tuple):
filters = hash_dataframe_on(df[0], on, op.shuffle_size, level=op.level)
else:
filters = hash_dataframe_on(df, on, op.shuffle_size, level=op.level)
def _take_index(src, f):
result = src.loc[f]
if src.index.names:
result.index.names = src.index.names
return result
for index_idx, index_filter in enumerate(filters):
if is_dataframe_obj:
group_key = ",".join([str(index_idx), str(chunk.index[1])])
else:
group_key = str(index_idx)
if deliver_by:
filtered_by = []
for v in by:
if isinstance(v, pd.Series):
filtered_by.append(_take_index(v, index_filter))
else:
filtered_by.append(v)
if isinstance(df, tuple):
ctx[(chunk.key, group_key)] = tuple(
_take_index(x, index_filter) for x in df
) + (filtered_by, deliver_by)
else:
ctx[(chunk.key, group_key)] = (
_take_index(df, index_filter),
filtered_by,
deliver_by,
)
else:
if isinstance(df, tuple):
ctx[(chunk.key, group_key)] = tuple(
_take_index(x, index_filter) for x in df
) + (deliver_by,)
else:
ctx[(chunk.key, group_key)] = _take_index(df, index_filter)
|
def execute_map(cls, ctx, op):
is_dataframe_obj = op.is_dataframe_obj
by = op.by
chunk = op.outputs[0]
df = ctx[op.inputs[0].key]
deliver_by = False # output by for the upcoming process
if isinstance(by, list):
new_by = []
for v in by:
if isinstance(v, Base):
deliver_by = True
new_by.append(ctx[v.key])
else:
new_by.append(v)
by = new_by
if isinstance(by, list) or callable(by):
on = by
else:
on = None
if isinstance(df, tuple):
filters = hash_dataframe_on(df[0], on, op.shuffle_size, level=op.level)
else:
filters = hash_dataframe_on(df, on, op.shuffle_size, level=op.level)
for index_idx, index_filter in enumerate(filters):
if is_dataframe_obj:
group_key = ",".join([str(index_idx), str(chunk.index[1])])
else:
group_key = str(index_idx)
if deliver_by:
filtered_by = []
for v in by:
if isinstance(v, pd.Series):
filtered_by.append(v.loc[index_filter])
else:
filtered_by.append(v)
if isinstance(df, tuple):
ctx[(chunk.key, group_key)] = tuple(x.loc[index_filter] for x in df) + (
filtered_by,
deliver_by,
)
else:
ctx[(chunk.key, group_key)] = (
df.loc[index_filter],
filtered_by,
deliver_by,
)
else:
if isinstance(df, tuple):
ctx[(chunk.key, group_key)] = tuple(x.loc[index_filter] for x in df) + (
deliver_by,
)
else:
ctx[(chunk.key, group_key)] = df.loc[index_filter]
|
https://github.com/mars-project/mars/issues/1741
|
2020-12-02 11:19:40,309 mars.scheduler.operands.common 87 ERROR Attempt 1: Unexpected error KeyError occurred in executing operand 5c7a3b06d448300987640036d2f5a34e in 11.238.145.234:49708
Traceback (most recent call last):
File "/home/admin/work/_public-mars-0.5.5.zip/mars/promise.py", line 378, in _wrapped
return func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.5.5.zip/mars/utils.py", line 377, in _wrapped
return func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.5.5.zip/mars/worker/execution.py", line 564, in execute_graph
quota_request = self._prepare_quota_request(session_id, graph_key)
File "/home/admin/work/_public-mars-0.5.5.zip/mars/worker/execution.py", line 249, in _prepare_quota_request
memory_estimations = self._estimate_calc_memory(session_id, graph_key)
File "/home/admin/work/_public-mars-0.5.5.zip/mars/worker/execution.py", line 213, in _estimate_calc_memory
res = executor.execute_graph(graph_record.graph, graph_record.chunk_targets, mock=True)
File "/home/admin/work/_public-mars-0.5.5.zip/mars/executor.py", line 690, in execute_graph
res = graph_execution.execute(retval)
File "/home/admin/work/_public-mars-0.5.5.zip/mars/executor.py", line 574, in execute
return [self._chunk_results[key] for key in self._keys]
File "/home/admin/work/_public-mars-0.5.5.zip/mars/executor.py", line 574, in <listcomp>
return [self._chunk_results[key] for key in self._keys]
KeyError: '3990ec90331559138b6ecbc6d76fbd0d'
|
KeyError
|
def _tile_head_tail(cls, op):
from ..merge import DataFrameConcat
inp = op.input
out = op.outputs[0]
combine_size = options.combine_size
chunks = inp.chunks
new_chunks = []
for c in chunks:
chunk_op = op.copy().reset_key()
params = out.params
params["index"] = c.index
params["shape"] = c.shape if np.isnan(c.shape[0]) else out.shape
new_chunks.append(chunk_op.new_chunk([c], kws=[params]))
chunks = new_chunks
while len(chunks) > 1:
new_size = ceildiv(len(chunks), combine_size)
new_chunks = []
for i in range(new_size):
in_chunks = chunks[combine_size * i : combine_size * (i + 1)]
chunk_index = (i, 0) if in_chunks[0].ndim == 2 else (i,)
if len(inp.shape) == 1:
shape = (sum(c.shape[0] for c in in_chunks),)
else:
shape = (sum(c.shape[0] for c in in_chunks), in_chunks[0].shape[1])
concat_chunk = DataFrameConcat(
axis=0, output_types=in_chunks[0].op.output_types
).new_chunk(in_chunks, index=chunk_index, shape=shape)
chunk_op = op.copy().reset_key()
params = out.params
params["index"] = chunk_index
params["shape"] = (
in_chunks[0].shape if np.isnan(in_chunks[0].shape[0]) else out.shape
)
new_chunks.append(chunk_op.new_chunk([concat_chunk], kws=[params]))
chunks = new_chunks
new_op = op.copy()
params = out.params
params["nsplits"] = tuple((s,) for s in out.shape)
params["chunks"] = chunks
return new_op.new_tileables(op.inputs, kws=[params])
|
def _tile_head_tail(cls, op):
from ..merge import DataFrameConcat
inp = op.input
out = op.outputs[0]
combine_size = options.combine_size
chunks = inp.chunks
new_chunks = []
for c in chunks:
chunk_op = op.copy().reset_key()
params = out.params
params["index"] = c.index
new_chunks.append(chunk_op.new_chunk([c], kws=[params]))
chunks = new_chunks
while len(chunks) > 1:
new_size = ceildiv(len(chunks), combine_size)
new_chunks = []
for i in range(new_size):
in_chunks = chunks[combine_size * i : combine_size * (i + 1)]
chunk_index = (i, 0) if in_chunks[0].ndim == 2 else (i,)
if len(inp.shape) == 1:
shape = (sum(c.shape[0] for c in in_chunks),)
else:
shape = (sum(c.shape[0] for c in in_chunks), in_chunks[0].shape[1])
concat_chunk = DataFrameConcat(
axis=0, output_types=in_chunks[0].op.output_types
).new_chunk(in_chunks, index=chunk_index, shape=shape)
chunk_op = op.copy().reset_key()
params = out.params
params["index"] = chunk_index
new_chunks.append(chunk_op.new_chunk([concat_chunk], kws=[params]))
chunks = new_chunks
new_op = op.copy()
params = out.params
params["nsplits"] = tuple((s,) for s in out.shape)
params["chunks"] = chunks
return new_op.new_tileables(op.inputs, kws=[params])
|
https://github.com/mars-project/mars/issues/1741
|
2020-12-02 11:19:40,309 mars.scheduler.operands.common 87 ERROR Attempt 1: Unexpected error KeyError occurred in executing operand 5c7a3b06d448300987640036d2f5a34e in 11.238.145.234:49708
Traceback (most recent call last):
File "/home/admin/work/_public-mars-0.5.5.zip/mars/promise.py", line 378, in _wrapped
return func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.5.5.zip/mars/utils.py", line 377, in _wrapped
return func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.5.5.zip/mars/worker/execution.py", line 564, in execute_graph
quota_request = self._prepare_quota_request(session_id, graph_key)
File "/home/admin/work/_public-mars-0.5.5.zip/mars/worker/execution.py", line 249, in _prepare_quota_request
memory_estimations = self._estimate_calc_memory(session_id, graph_key)
File "/home/admin/work/_public-mars-0.5.5.zip/mars/worker/execution.py", line 213, in _estimate_calc_memory
res = executor.execute_graph(graph_record.graph, graph_record.chunk_targets, mock=True)
File "/home/admin/work/_public-mars-0.5.5.zip/mars/executor.py", line 690, in execute_graph
res = graph_execution.execute(retval)
File "/home/admin/work/_public-mars-0.5.5.zip/mars/executor.py", line 574, in execute
return [self._chunk_results[key] for key in self._keys]
File "/home/admin/work/_public-mars-0.5.5.zip/mars/executor.py", line 574, in <listcomp>
return [self._chunk_results[key] for key in self._keys]
KeyError: '3990ec90331559138b6ecbc6d76fbd0d'
|
KeyError
|
def add_finished_terminal(self, op_key, final_state=None, exc=None):
"""
Add a terminal operand to finished set. Calling this method
will change graph state if all terminals are in finished states.
:param op_key: operand key
:param final_state: state of the operand
"""
if self._state not in (GraphState.RUNNING, GraphState.CANCELLING):
return
if exc is not None:
self._graph_meta_ref.set_exc_info(exc, _tell=True, _wait=False)
tileable_keys = self._terminal_chunk_op_key_to_tileable_key[op_key]
is_failed = final_state in (GraphState.CANCELLED, GraphState.FAILED)
terminal_tileable_count = len(self._terminal_tileable_key_to_chunk_op_keys)
try:
for tileable_key in tileable_keys:
self._target_tileable_finished[tileable_key].add(op_key)
if final_state == GraphState.FAILED:
if self.final_state != GraphState.CANCELLED:
self.final_state = GraphState.FAILED
elif final_state == GraphState.CANCELLED:
self.final_state = final_state
if (
self._target_tileable_finished[tileable_key]
== self._terminal_tileable_key_to_chunk_op_keys[tileable_key]
):
self._terminated_tileable_keys.add(tileable_key)
self._all_terminated_tileable_keys.add(tileable_key)
if (
not is_failed
and len(self._terminated_tileable_keys) == terminal_tileable_count
):
# update shape if tileable or its chunks have unknown shape
self._update_tileable_and_its_chunk_shapes()
except:
for tileable_key in tileable_keys:
self._target_tileable_finished[tileable_key].remove(op_key)
raise
terminated_chunks = self._op_key_to_chunk[op_key]
self._terminated_chunk_keys.update(
[c.key for c in terminated_chunks if c.key in self._terminal_chunk_keys]
)
if self._terminated_chunk_keys == self._terminal_chunk_keys:
if self._chunk_graph_builder.done or is_failed:
if self._chunk_graph_builder.prev_tileable_graph is not None:
# if failed before, clear intermediate data
to_free_tileable_keys = self._all_terminated_tileable_keys - set(
self._target_tileable_keys
)
skip_chunk_keys = set()
for target_tileable_data in self._target_tileable_datas:
tiled_target_tileable_data = self._tileable_key_opid_to_tiled[
target_tileable_data.key, target_tileable_data.op.id
][-1]
skip_chunk_keys.update(
[c.key for c in tiled_target_tileable_data.chunks]
)
[
self.free_tileable_data(k, skip_chunk_keys=skip_chunk_keys)
for k in to_free_tileable_keys
]
self.state = (
self.final_state
if self.final_state is not None
else GraphState.SUCCEEDED
)
self._graph_meta_ref.set_graph_end(_tell=True)
else:
self._execute_graph(compose=self._chunk_graph_builder.is_compose)
|
def add_finished_terminal(self, op_key, final_state=None, exc=None):
"""
Add a terminal operand to finished set. Calling this method
will change graph state if all terminals are in finished states.
:param op_key: operand key
:param final_state: state of the operand
"""
if self._state not in (GraphState.RUNNING, GraphState.CANCELLING):
return
if exc is not None:
self._graph_meta_ref.set_exc_info(exc, _tell=True, _wait=False)
tileable_keys = self._terminal_chunk_op_key_to_tileable_key[op_key]
is_failed = final_state in (GraphState.CANCELLED, GraphState.FAILED)
terminal_tileable_count = len(self._terminal_tileable_key_to_chunk_op_keys)
for tileable_key in tileable_keys:
self._target_tileable_finished[tileable_key].add(op_key)
if final_state == GraphState.FAILED:
if self.final_state != GraphState.CANCELLED:
self.final_state = GraphState.FAILED
elif final_state == GraphState.CANCELLED:
self.final_state = final_state
if (
self._target_tileable_finished[tileable_key]
== self._terminal_tileable_key_to_chunk_op_keys[tileable_key]
):
self._terminated_tileable_keys.add(tileable_key)
self._all_terminated_tileable_keys.add(tileable_key)
if (
not is_failed
and len(self._terminated_tileable_keys) == terminal_tileable_count
):
# update shape if tileable or its chunks have unknown shape
self._update_tileable_and_its_chunk_shapes()
terminated_chunks = self._op_key_to_chunk[op_key]
self._terminated_chunk_keys.update(
[c.key for c in terminated_chunks if c.key in self._terminal_chunk_keys]
)
if self._terminated_chunk_keys == self._terminal_chunk_keys:
if self._chunk_graph_builder.done or is_failed:
if self._chunk_graph_builder.prev_tileable_graph is not None:
# if failed before, clear intermediate data
to_free_tileable_keys = self._all_terminated_tileable_keys - set(
self._target_tileable_keys
)
skip_chunk_keys = set()
for target_tileable_data in self._target_tileable_datas:
tiled_target_tileable_data = self._tileable_key_opid_to_tiled[
target_tileable_data.key, target_tileable_data.op.id
][-1]
skip_chunk_keys.update(
[c.key for c in tiled_target_tileable_data.chunks]
)
[
self.free_tileable_data(k, skip_chunk_keys=skip_chunk_keys)
for k in to_free_tileable_keys
]
self.state = (
self.final_state
if self.final_state is not None
else GraphState.SUCCEEDED
)
self._graph_meta_ref.set_graph_end(_tell=True)
else:
self._execute_graph(compose=self._chunk_graph_builder.is_compose)
|
https://github.com/mars-project/mars/issues/1741
|
2020-12-02 11:19:40,309 mars.scheduler.operands.common 87 ERROR Attempt 1: Unexpected error KeyError occurred in executing operand 5c7a3b06d448300987640036d2f5a34e in 11.238.145.234:49708
Traceback (most recent call last):
File "/home/admin/work/_public-mars-0.5.5.zip/mars/promise.py", line 378, in _wrapped
return func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.5.5.zip/mars/utils.py", line 377, in _wrapped
return func(*args, **kwargs)
File "/home/admin/work/_public-mars-0.5.5.zip/mars/worker/execution.py", line 564, in execute_graph
quota_request = self._prepare_quota_request(session_id, graph_key)
File "/home/admin/work/_public-mars-0.5.5.zip/mars/worker/execution.py", line 249, in _prepare_quota_request
memory_estimations = self._estimate_calc_memory(session_id, graph_key)
File "/home/admin/work/_public-mars-0.5.5.zip/mars/worker/execution.py", line 213, in _estimate_calc_memory
res = executor.execute_graph(graph_record.graph, graph_record.chunk_targets, mock=True)
File "/home/admin/work/_public-mars-0.5.5.zip/mars/executor.py", line 690, in execute_graph
res = graph_execution.execute(retval)
File "/home/admin/work/_public-mars-0.5.5.zip/mars/executor.py", line 574, in execute
return [self._chunk_results[key] for key in self._keys]
File "/home/admin/work/_public-mars-0.5.5.zip/mars/executor.py", line 574, in <listcomp>
return [self._chunk_results[key] for key in self._keys]
KeyError: '3990ec90331559138b6ecbc6d76fbd0d'
|
KeyError
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.