after_merge
stringlengths 28
79.6k
| before_merge
stringlengths 20
79.6k
| url
stringlengths 38
71
| full_traceback
stringlengths 43
922k
| traceback_type
stringclasses 555
values |
|---|---|---|---|---|
def create_pool(self, *args, **kwargs):
self._service = SchedulerService(disable_failover=self.args.disable_failover)
self.n_process = int(self.args.nproc or resource.cpu_count())
kwargs["distributor"] = MarsDistributor(self.n_process, "s:h1:")
return super().create_pool(*args, **kwargs)
|
def create_pool(self, *args, **kwargs):
self._service = SchedulerService()
self.n_process = int(self.args.nproc or resource.cpu_count())
kwargs["distributor"] = MarsDistributor(self.n_process, "s:h1:")
return super().create_pool(*args, **kwargs)
|
https://github.com/mars-project/mars/issues/1479
|
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 129, in __repr__
return self._data.__repr__()
File "/Users/wenjun.swj/Code/mars/mars/dataframe/core.py", line 1083, in __repr__
return self._to_str(representation=True)
File "/Users/wenjun.swj/Code/mars/mars/dataframe/core.py", line 1053, in _to_str
self, session=self._executed_sessions[-1])
File "/Users/wenjun.swj/Code/mars/mars/dataframe/utils.py", line 773, in fetch_corner_data
return df_or_series.fetch(session=session)
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 376, in fetch
return session.fetch(self, **kw)
File "/Users/wenjun.swj/Code/mars/mars/session.py", line 491, in fetch
result = self._sess.fetch(*tileables, **kw)
File "/Users/wenjun.swj/Code/mars/mars/web/session.py", line 265, in fetch
result_data = dataserializer.loads(resp.content)
File "/Users/wenjun.swj/Code/mars/mars/serialize/dataserializer.py", line 259, in loads
return pickle.loads(data)
ModuleNotFoundError: No module named 'pyarrow'
|
ModuleNotFoundError
|
def __init__(self, **kwargs):
self._cluster_info_ref = None
self._session_manager_ref = None
self._assigner_ref = None
self._resource_ref = None
self._chunk_meta_ref = None
self._kv_store_ref = None
self._node_info_ref = None
self._result_receiver_ref = None
options.scheduler.enable_failover = not (
kwargs.pop("disable_failover", None) or False
)
if kwargs: # pragma: no cover
raise TypeError(
"Keyword arguments %r cannot be recognized." % ", ".join(kwargs)
)
|
def __init__(self):
self._cluster_info_ref = None
self._session_manager_ref = None
self._assigner_ref = None
self._resource_ref = None
self._chunk_meta_ref = None
self._kv_store_ref = None
self._node_info_ref = None
self._result_receiver_ref = None
|
https://github.com/mars-project/mars/issues/1479
|
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 129, in __repr__
return self._data.__repr__()
File "/Users/wenjun.swj/Code/mars/mars/dataframe/core.py", line 1083, in __repr__
return self._to_str(representation=True)
File "/Users/wenjun.swj/Code/mars/mars/dataframe/core.py", line 1053, in _to_str
self, session=self._executed_sessions[-1])
File "/Users/wenjun.swj/Code/mars/mars/dataframe/utils.py", line 773, in fetch_corner_data
return df_or_series.fetch(session=session)
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 376, in fetch
return session.fetch(self, **kw)
File "/Users/wenjun.swj/Code/mars/mars/session.py", line 491, in fetch
result = self._sess.fetch(*tileables, **kw)
File "/Users/wenjun.swj/Code/mars/mars/web/session.py", line 265, in fetch
result_data = dataserializer.loads(resp.content)
File "/Users/wenjun.swj/Code/mars/mars/serialize/dataserializer.py", line 259, in loads
return pickle.loads(data)
ModuleNotFoundError: No module named 'pyarrow'
|
ModuleNotFoundError
|
def config_args(self, parser):
super().config_args(parser)
parser.add_argument("--cpu-procs", help="number of processes used for cpu")
parser.add_argument(
"--cuda-device", help="CUDA device to use, if not specified, will use CPU only"
)
parser.add_argument("--net-procs", help="number of processes used for networking")
parser.add_argument(
"--io-procs",
help=argparse.SUPPRESS,
action=arg_deprecated_action("--net-procs"),
)
parser.add_argument("--phy-mem", help="physical memory size limit")
parser.add_argument(
"--ignore-avail-mem", action="store_true", help="ignore available memory"
)
parser.add_argument("--cache-mem", help="cache memory size limit")
parser.add_argument(
"--min-mem", help="minimal free memory required to start worker"
)
parser.add_argument("--spill-dir", help="spill directory")
parser.add_argument(
"--io-parallel-num",
help="make file io lock free, add this when using a mounted dfs",
)
parser.add_argument(
"--disable-proc-recover",
action="store_true",
help="disable recovering failed processes",
)
parser.add_argument(
"--plasma-dir",
help="path of plasma directory. When specified, the size "
"of plasma store will not be taken into account when "
"managing host memory",
)
compress_types = ", ".join(v.value for v in CompressType.__members__.values())
parser.add_argument(
"--disk-compression",
default=options.worker.disk_compression,
help="compression type used for disks, "
"can be selected from %s. %s by default"
% (compress_types, options.worker.disk_compression),
)
parser.add_argument(
"--transfer-compression",
default=options.worker.transfer_compression,
help="compression type used for network transfer, "
"can be selected from %s. %s by default"
% (compress_types, options.worker.transfer_compression),
)
|
def config_args(self, parser):
super().config_args(parser)
parser.add_argument("--cpu-procs", help="number of processes used for cpu")
parser.add_argument(
"--cuda-device", help="CUDA device to use, if not specified, will use CPU only"
)
parser.add_argument("--net-procs", help="number of processes used for networking")
parser.add_argument(
"--io-procs",
help=argparse.SUPPRESS,
action=arg_deprecated_action("--net-procs"),
)
parser.add_argument("--phy-mem", help="physical memory size limit")
parser.add_argument(
"--ignore-avail-mem", action="store_true", help="ignore available memory"
)
parser.add_argument("--cache-mem", help="cache memory size limit")
parser.add_argument(
"--min-mem", help="minimal free memory required to start worker"
)
parser.add_argument("--spill-dir", help="spill directory")
parser.add_argument(
"--lock-free-fileio",
action="store_true",
help="make file io lock free, add this when using a mounted dfs",
)
parser.add_argument(
"--plasma-dir",
help="path of plasma directory. When specified, the size "
"of plasma store will not be taken into account when "
"managing host memory",
)
compress_types = ", ".join(v.value for v in CompressType.__members__.values())
parser.add_argument(
"--disk-compression",
default=options.worker.disk_compression,
help="compression type used for disks, "
"can be selected from %s. %s by default"
% (compress_types, options.worker.disk_compression),
)
parser.add_argument(
"--transfer-compression",
default=options.worker.transfer_compression,
help="compression type used for network transfer, "
"can be selected from %s. %s by default"
% (compress_types, options.worker.transfer_compression),
)
|
https://github.com/mars-project/mars/issues/1479
|
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 129, in __repr__
return self._data.__repr__()
File "/Users/wenjun.swj/Code/mars/mars/dataframe/core.py", line 1083, in __repr__
return self._to_str(representation=True)
File "/Users/wenjun.swj/Code/mars/mars/dataframe/core.py", line 1053, in _to_str
self, session=self._executed_sessions[-1])
File "/Users/wenjun.swj/Code/mars/mars/dataframe/utils.py", line 773, in fetch_corner_data
return df_or_series.fetch(session=session)
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 376, in fetch
return session.fetch(self, **kw)
File "/Users/wenjun.swj/Code/mars/mars/session.py", line 491, in fetch
result = self._sess.fetch(*tileables, **kw)
File "/Users/wenjun.swj/Code/mars/mars/web/session.py", line 265, in fetch
result_data = dataserializer.loads(resp.content)
File "/Users/wenjun.swj/Code/mars/mars/serialize/dataserializer.py", line 259, in loads
return pickle.loads(data)
ModuleNotFoundError: No module named 'pyarrow'
|
ModuleNotFoundError
|
def parse_args(self, parser, argv, environ=None):
args = super().parse_args(parser, argv)
environ = environ or os.environ
args.plasma_dir = args.plasma_dir or environ.get("MARS_PLASMA_DIRS")
args.spill_dir = args.spill_dir or environ.get("MARS_SPILL_DIRS")
args.cache_mem = args.cache_mem or environ.get("MARS_CACHE_MEM_SIZE")
args.disable_proc_recover = args.disable_proc_recover or bool(
int(environ.get("MARS_DISABLE_PROC_RECOVER", "0"))
)
args.io_parallel_num = args.io_parallel_num or int(
environ.get("MARS_IO_PARALLEL_NUM", "1")
)
if args.io_parallel_num == 1 and bool(
int(environ.get("MARS_LOCK_FREE_FILEIO", "0"))
):
args.io_parallel_num = 2**16
return args
|
def parse_args(self, parser, argv, environ=None):
args = super().parse_args(parser, argv)
args.plasma_dir = args.plasma_dir or os.environ.get("MARS_PLASMA_DIRS")
args.spill_dir = args.spill_dir or os.environ.get("MARS_SPILL_DIRS")
args.cache_mem = args.cache_mem or os.environ.get("MARS_CACHE_MEM_SIZE")
args.lock_free_fileio = args.lock_free_fileio or bool(
int(os.environ.get("MARS_LOCK_FREE_FILEIO", "0"))
)
return args
|
https://github.com/mars-project/mars/issues/1479
|
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 129, in __repr__
return self._data.__repr__()
File "/Users/wenjun.swj/Code/mars/mars/dataframe/core.py", line 1083, in __repr__
return self._to_str(representation=True)
File "/Users/wenjun.swj/Code/mars/mars/dataframe/core.py", line 1053, in _to_str
self, session=self._executed_sessions[-1])
File "/Users/wenjun.swj/Code/mars/mars/dataframe/utils.py", line 773, in fetch_corner_data
return df_or_series.fetch(session=session)
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 376, in fetch
return session.fetch(self, **kw)
File "/Users/wenjun.swj/Code/mars/mars/session.py", line 491, in fetch
result = self._sess.fetch(*tileables, **kw)
File "/Users/wenjun.swj/Code/mars/mars/web/session.py", line 265, in fetch
result_data = dataserializer.loads(resp.content)
File "/Users/wenjun.swj/Code/mars/mars/serialize/dataserializer.py", line 259, in loads
return pickle.loads(data)
ModuleNotFoundError: No module named 'pyarrow'
|
ModuleNotFoundError
|
def create_pool(self, *args, **kwargs):
# here we create necessary actors on worker
# and distribute them over processes
cuda_devices = [self.args.cuda_device] if self.args.cuda_device else None
self._service = WorkerService(
advertise_addr=self.args.advertise,
n_cpu_process=self.args.cpu_procs,
n_net_process=self.args.net_procs or self.args.io_procs,
cuda_devices=cuda_devices,
spill_dirs=self.args.spill_dir,
io_parallel_num=self.args.io_parallel_num,
total_mem=self.args.phy_mem,
cache_mem_limit=self.args.cache_mem,
ignore_avail_mem=self.args.ignore_avail_mem,
min_mem_size=self.args.min_mem,
disk_compression=self.args.disk_compression.lower(),
transfer_compression=self.args.transfer_compression.lower(),
plasma_dir=self.args.plasma_dir,
use_ext_plasma_dir=bool(self.args.plasma_dir),
disable_proc_recover=self.args.disable_proc_recover,
)
# start plasma
self._service.start_plasma()
self.n_process = self._service.n_process
kwargs["distributor"] = MarsDistributor(self.n_process, "w:0:")
return super().create_pool(*args, **kwargs)
|
def create_pool(self, *args, **kwargs):
# here we create necessary actors on worker
# and distribute them over processes
cuda_devices = [self.args.cuda_device] if self.args.cuda_device else None
self._service = WorkerService(
advertise_addr=self.args.advertise,
n_cpu_process=self.args.cpu_procs,
n_net_process=self.args.net_procs or self.args.io_procs,
cuda_devices=cuda_devices,
spill_dirs=self.args.spill_dir,
lock_free_fileio=self.args.lock_free_fileio,
total_mem=self.args.phy_mem,
cache_mem_limit=self.args.cache_mem,
ignore_avail_mem=self.args.ignore_avail_mem,
min_mem_size=self.args.min_mem,
disk_compression=self.args.disk_compression.lower(),
transfer_compression=self.args.transfer_compression.lower(),
plasma_dir=self.args.plasma_dir,
use_ext_plasma_dir=bool(self.args.plasma_dir),
)
# start plasma
self._service.start_plasma()
self.n_process = self._service.n_process
kwargs["distributor"] = MarsDistributor(self.n_process, "w:0:")
return super().create_pool(*args, **kwargs)
|
https://github.com/mars-project/mars/issues/1479
|
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 129, in __repr__
return self._data.__repr__()
File "/Users/wenjun.swj/Code/mars/mars/dataframe/core.py", line 1083, in __repr__
return self._to_str(representation=True)
File "/Users/wenjun.swj/Code/mars/mars/dataframe/core.py", line 1053, in _to_str
self, session=self._executed_sessions[-1])
File "/Users/wenjun.swj/Code/mars/mars/dataframe/utils.py", line 773, in fetch_corner_data
return df_or_series.fetch(session=session)
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 376, in fetch
return session.fetch(self, **kw)
File "/Users/wenjun.swj/Code/mars/mars/session.py", line 491, in fetch
result = self._sess.fetch(*tileables, **kw)
File "/Users/wenjun.swj/Code/mars/mars/web/session.py", line 265, in fetch
result_data = dataserializer.loads(resp.content)
File "/Users/wenjun.swj/Code/mars/mars/serialize/dataserializer.py", line 259, in loads
return pickle.loads(data)
ModuleNotFoundError: No module named 'pyarrow'
|
ModuleNotFoundError
|
def __init__(self, **kwargs):
self._plasma_store = None
self._storage_manager_ref = None
self._shared_holder_ref = None
self._task_queue_ref = None
self._mem_quota_ref = None
self._dispatch_ref = None
self._events_ref = None
self._status_ref = None
self._execution_ref = None
self._daemon_ref = None
self._receiver_manager_ref = None
self._cluster_info_ref = None
self._cpu_calc_actors = []
self._inproc_holder_actors = []
self._inproc_io_runner_actors = []
self._cuda_calc_actors = []
self._cuda_holder_actors = []
self._sender_actors = []
self._receiver_actors = []
self._spill_actors = []
self._process_helper_actors = []
self._result_sender_ref = None
self._advertise_addr = kwargs.pop("advertise_addr", None)
cuda_devices = kwargs.pop("cuda_devices", None) or os.environ.get(
"CUDA_VISIBLE_DEVICES"
)
if not cuda_devices:
self._n_cuda_process = 0
else:
cuda_devices = os.environ["CUDA_VISIBLE_DEVICES"] = ",".join(
str(d) for d in cuda_devices
)
if cuda_devices:
logger.info("Started Mars worker with CUDA cards %s", cuda_devices)
self._n_cuda_process = resource.cuda_count()
self._n_cpu_process = int(kwargs.pop("n_cpu_process", None) or resource.cpu_count())
self._n_net_process = int(kwargs.pop("n_net_process", None) or "4")
self._spill_dirs = kwargs.pop("spill_dirs", None)
if self._spill_dirs:
if isinstance(self._spill_dirs, str):
from .utils import parse_spill_dirs
self._spill_dirs = options.worker.spill_directory = parse_spill_dirs(
self._spill_dirs
)
else:
options.worker.spill_directory = self._spill_dirs
else:
self._spill_dirs = options.worker.spill_directory = []
options.worker.disk_compression = (
kwargs.pop("disk_compression", None) or options.worker.disk_compression
)
options.worker.transfer_compression = (
kwargs.pop("transfer_compression", None) or options.worker.transfer_compression
)
options.worker.io_parallel_num = kwargs.pop("io_parallel_num", None) or False
options.worker.recover_dead_process = not (
kwargs.pop("disable_proc_recover", None) or False
)
self._total_mem = kwargs.pop("total_mem", None)
self._cache_mem_limit = kwargs.pop("cache_mem_limit", None)
self._soft_mem_limit = kwargs.pop("soft_mem_limit", None) or "80%"
self._hard_mem_limit = kwargs.pop("hard_mem_limit", None) or "90%"
self._ignore_avail_mem = kwargs.pop("ignore_avail_mem", None) or False
self._min_mem_size = kwargs.pop("min_mem_size", None) or 128 * 1024**2
self._plasma_dir = kwargs.pop("plasma_dir", None)
self._use_ext_plasma_dir = kwargs.pop("use_ext_plasma_dir", None) or False
self._soft_quota_limit = self._soft_mem_limit
self._calc_memory_limits()
if kwargs: # pragma: no cover
raise TypeError(
"Keyword arguments %r cannot be recognized." % ", ".join(kwargs)
)
|
def __init__(self, **kwargs):
self._plasma_store = None
self._storage_manager_ref = None
self._shared_holder_ref = None
self._task_queue_ref = None
self._mem_quota_ref = None
self._dispatch_ref = None
self._events_ref = None
self._status_ref = None
self._execution_ref = None
self._daemon_ref = None
self._receiver_manager_ref = None
self._cluster_info_ref = None
self._cpu_calc_actors = []
self._inproc_holder_actors = []
self._inproc_io_runner_actors = []
self._cuda_calc_actors = []
self._cuda_holder_actors = []
self._sender_actors = []
self._receiver_actors = []
self._spill_actors = []
self._process_helper_actors = []
self._result_sender_ref = None
self._advertise_addr = kwargs.pop("advertise_addr", None)
cuda_devices = kwargs.pop("cuda_devices", None) or os.environ.get(
"CUDA_VISIBLE_DEVICES"
)
if not cuda_devices:
self._n_cuda_process = 0
else:
cuda_devices = os.environ["CUDA_VISIBLE_DEVICES"] = ",".join(
str(d) for d in cuda_devices
)
if cuda_devices:
logger.info("Started Mars worker with CUDA cards %s", cuda_devices)
self._n_cuda_process = resource.cuda_count()
self._n_cpu_process = int(kwargs.pop("n_cpu_process", None) or resource.cpu_count())
self._n_net_process = int(kwargs.pop("n_net_process", None) or "4")
self._spill_dirs = kwargs.pop("spill_dirs", None)
if self._spill_dirs:
if isinstance(self._spill_dirs, str):
from .utils import parse_spill_dirs
self._spill_dirs = options.worker.spill_directory = parse_spill_dirs(
self._spill_dirs
)
else:
options.worker.spill_directory = self._spill_dirs
else:
self._spill_dirs = options.worker.spill_directory = []
options.worker.disk_compression = (
kwargs.pop("disk_compression", None) or options.worker.disk_compression
)
options.worker.transfer_compression = (
kwargs.pop("transfer_compression", None) or options.worker.transfer_compression
)
options.worker.lock_free_fileio = kwargs.pop("lock_free_fileio", None) or False
self._total_mem = kwargs.pop("total_mem", None)
self._cache_mem_limit = kwargs.pop("cache_mem_limit", None)
self._soft_mem_limit = kwargs.pop("soft_mem_limit", None) or "80%"
self._hard_mem_limit = kwargs.pop("hard_mem_limit", None) or "90%"
self._ignore_avail_mem = kwargs.pop("ignore_avail_mem", None) or False
self._min_mem_size = kwargs.pop("min_mem_size", None) or 128 * 1024**2
self._plasma_dir = kwargs.pop("plasma_dir", None)
self._use_ext_plasma_dir = kwargs.pop("use_ext_plasma_dir", None) or False
self._soft_quota_limit = self._soft_mem_limit
self._calc_memory_limits()
if kwargs: # pragma: no cover
raise TypeError(
"Keyword arguments %r cannot be recognized." % ", ".join(kwargs)
)
|
https://github.com/mars-project/mars/issues/1479
|
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 129, in __repr__
return self._data.__repr__()
File "/Users/wenjun.swj/Code/mars/mars/dataframe/core.py", line 1083, in __repr__
return self._to_str(representation=True)
File "/Users/wenjun.swj/Code/mars/mars/dataframe/core.py", line 1053, in _to_str
self, session=self._executed_sessions[-1])
File "/Users/wenjun.swj/Code/mars/mars/dataframe/utils.py", line 773, in fetch_corner_data
return df_or_series.fetch(session=session)
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 376, in fetch
return session.fetch(self, **kw)
File "/Users/wenjun.swj/Code/mars/mars/session.py", line 491, in fetch
result = self._sess.fetch(*tileables, **kw)
File "/Users/wenjun.swj/Code/mars/mars/web/session.py", line 265, in fetch
result_data = dataserializer.loads(resp.content)
File "/Users/wenjun.swj/Code/mars/mars/serialize/dataserializer.py", line 259, in loads
return pickle.loads(data)
ModuleNotFoundError: No module named 'pyarrow'
|
ModuleNotFoundError
|
def start(
self, endpoint, pool, distributed=True, discoverer=None, process_start_index=0
):
# create plasma key mapper
from .storage import PlasmaKeyMapActor
pool.create_actor(PlasmaKeyMapActor, uid=PlasmaKeyMapActor.default_uid())
# create vineyard key mapper
if options.vineyard.socket: # pragma: no cover
from .storage import VineyardKeyMapActor
pool.create_actor(VineyardKeyMapActor, uid=VineyardKeyMapActor.default_uid())
# create WorkerClusterInfoActor
self._cluster_info_ref = pool.create_actor(
WorkerClusterInfoActor,
discoverer,
distributed=distributed,
uid=WorkerClusterInfoActor.default_uid(),
)
if distributed:
# create process daemon
from .daemon import WorkerDaemonActor
actor_holder = self._daemon_ref = pool.create_actor(
WorkerDaemonActor, uid=WorkerDaemonActor.default_uid()
)
# create StatusActor
if ":" not in self._advertise_addr:
self._advertise_addr += ":" + endpoint.rsplit(":", 1)[-1]
self._status_ref = pool.create_actor(
StatusActor, self._advertise_addr, uid=StatusActor.default_uid()
)
else:
# create StatusActor
self._status_ref = pool.create_actor(
StatusActor,
endpoint,
with_gpu=self._n_cuda_process > 0,
uid=StatusActor.default_uid(),
)
actor_holder = pool
if self._ignore_avail_mem:
# start a QuotaActor instead of MemQuotaActor to avoid memory size detection
# for debug purpose only, DON'T USE IN PRODUCTION
self._mem_quota_ref = pool.create_actor(
QuotaActor, self._soft_mem_limit, uid=MemQuotaActor.default_uid()
)
else:
self._mem_quota_ref = pool.create_actor(
MemQuotaActor,
self._soft_quota_limit,
self._hard_mem_limit,
uid=MemQuotaActor.default_uid(),
)
# create StorageManagerActor
self._storage_manager_ref = pool.create_actor(
StorageManagerActor, uid=StorageManagerActor.default_uid()
)
# create SharedHolderActor
self._shared_holder_ref = pool.create_actor(
SharedHolderActor, self._cache_mem_limit, uid=SharedHolderActor.default_uid()
)
# create DispatchActor
self._dispatch_ref = pool.create_actor(
DispatchActor, uid=DispatchActor.default_uid()
)
# create EventsActor
self._events_ref = pool.create_actor(EventsActor, uid=EventsActor.default_uid())
# create ReceiverNotifierActor
self._receiver_manager_ref = pool.create_actor(
ReceiverManagerActor, uid=ReceiverManagerActor.default_uid()
)
# create ExecutionActor
self._execution_ref = pool.create_actor(
ExecutionActor, uid=ExecutionActor.default_uid()
)
# create CpuCalcActor and InProcHolderActor
if not distributed:
self._n_cpu_process = pool.cluster_info.n_process - 1 - process_start_index
for cpu_id in range(self._n_cpu_process):
uid = "w:%d:mars-cpu-calc-%d-%d" % (cpu_id + 1, os.getpid(), cpu_id)
actor = actor_holder.create_actor(CpuCalcActor, uid=uid)
self._cpu_calc_actors.append(actor)
uid = "w:%d:mars-inproc-holder-%d-%d" % (cpu_id + 1, os.getpid(), cpu_id)
actor = actor_holder.create_actor(InProcHolderActor, uid=uid)
self._inproc_holder_actors.append(actor)
actor = actor_holder.create_actor(
IORunnerActor, dispatched=False, uid=IORunnerActor.gen_uid(cpu_id + 1)
)
self._inproc_io_runner_actors.append(actor)
start_pid = 1 + self._n_cpu_process
stats = resource.cuda_card_stats() if self._n_cuda_process else []
for cuda_id, stat in enumerate(stats):
for thread_no in range(options.worker.cuda_thread_num):
uid = "w:%d:mars-cuda-calc-%d-%d-%d" % (
start_pid + cuda_id,
os.getpid(),
cuda_id,
thread_no,
)
actor = actor_holder.create_actor(CudaCalcActor, uid=uid)
self._cuda_calc_actors.append(actor)
uid = "w:%d:mars-cuda-holder-%d-%d" % (
start_pid + cuda_id,
os.getpid(),
cuda_id,
)
actor = actor_holder.create_actor(
CudaHolderActor, stat.fb_mem_info.total, device_id=stat.index, uid=uid
)
self._cuda_holder_actors.append(actor)
actor = actor_holder.create_actor(
IORunnerActor,
dispatched=False,
uid=IORunnerActor.gen_uid(start_pid + cuda_id),
)
self._inproc_io_runner_actors.append(actor)
start_pid += self._n_cuda_process
if distributed:
# create SenderActor and ReceiverActor
for sender_id in range(self._n_net_process):
uid = "w:%d:mars-sender-%d-%d" % (
start_pid + sender_id,
os.getpid(),
sender_id,
)
actor = actor_holder.create_actor(SenderActor, uid=uid)
self._sender_actors.append(actor)
# Mutable requires ReceiverActor (with ClusterSession)
for receiver_id in range(2 * self._n_net_process):
uid = "w:%d:mars-receiver-%d-%d" % (
start_pid + receiver_id // 2,
os.getpid(),
receiver_id,
)
actor = actor_holder.create_actor(ReceiverWorkerActor, uid=uid)
self._receiver_actors.append(actor)
# create ProcessHelperActor
for proc_id in range(pool.cluster_info.n_process - process_start_index):
uid = "w:%d:mars-process-helper" % proc_id
actor = actor_holder.create_actor(ProcessHelperActor, uid=uid)
self._process_helper_actors.append(actor)
# create ResultSenderActor
self._result_sender_ref = pool.create_actor(
ResultSenderActor, uid=ResultSenderActor.default_uid()
)
# create SpillActor
start_pid = pool.cluster_info.n_process - 1
if options.worker.spill_directory:
for spill_id in range(len(options.worker.spill_directory)):
uid = "w:%d:mars-global-io-runner-%d-%d" % (
start_pid,
os.getpid(),
spill_id,
)
actor = actor_holder.create_actor(IORunnerActor, uid=uid)
self._spill_actors.append(actor)
# worker can be registered when everything is ready
self._status_ref.enable_status_upload(_tell=True)
|
def start(
self, endpoint, pool, distributed=True, discoverer=None, process_start_index=0
):
# create plasma key mapper
from .storage import PlasmaKeyMapActor
pool.create_actor(PlasmaKeyMapActor, uid=PlasmaKeyMapActor.default_uid())
# create vineyard key mapper
if options.vineyard.socket: # pragma: no cover
from .storage import VineyardKeyMapActor
pool.create_actor(VineyardKeyMapActor, uid=VineyardKeyMapActor.default_uid())
# create WorkerClusterInfoActor
self._cluster_info_ref = pool.create_actor(
WorkerClusterInfoActor,
discoverer,
distributed=distributed,
uid=WorkerClusterInfoActor.default_uid(),
)
if distributed:
# create process daemon
from .daemon import WorkerDaemonActor
actor_holder = self._daemon_ref = pool.create_actor(
WorkerDaemonActor, uid=WorkerDaemonActor.default_uid()
)
# create StatusActor
if ":" not in self._advertise_addr:
self._advertise_addr += ":" + endpoint.rsplit(":", 1)[-1]
self._status_ref = pool.create_actor(
StatusActor, self._advertise_addr, uid=StatusActor.default_uid()
)
else:
# create StatusActor
self._status_ref = pool.create_actor(
StatusActor,
endpoint,
with_gpu=self._n_cuda_process > 0,
uid=StatusActor.default_uid(),
)
actor_holder = pool
if self._ignore_avail_mem:
# start a QuotaActor instead of MemQuotaActor to avoid memory size detection
# for debug purpose only, DON'T USE IN PRODUCTION
self._mem_quota_ref = pool.create_actor(
QuotaActor, self._soft_mem_limit, uid=MemQuotaActor.default_uid()
)
else:
self._mem_quota_ref = pool.create_actor(
MemQuotaActor,
self._soft_quota_limit,
self._hard_mem_limit,
uid=MemQuotaActor.default_uid(),
)
# create StorageManagerActor
self._storage_manager_ref = pool.create_actor(
StorageManagerActor, uid=StorageManagerActor.default_uid()
)
# create SharedHolderActor
self._shared_holder_ref = pool.create_actor(
SharedHolderActor, self._cache_mem_limit, uid=SharedHolderActor.default_uid()
)
# create DispatchActor
self._dispatch_ref = pool.create_actor(
DispatchActor, uid=DispatchActor.default_uid()
)
# create EventsActor
self._events_ref = pool.create_actor(EventsActor, uid=EventsActor.default_uid())
# create ReceiverNotifierActor
self._receiver_manager_ref = pool.create_actor(
ReceiverManagerActor, uid=ReceiverManagerActor.default_uid()
)
# create ExecutionActor
self._execution_ref = pool.create_actor(
ExecutionActor, uid=ExecutionActor.default_uid()
)
# create CpuCalcActor and InProcHolderActor
if not distributed:
self._n_cpu_process = pool.cluster_info.n_process - 1 - process_start_index
for cpu_id in range(self._n_cpu_process):
uid = "w:%d:mars-cpu-calc-%d-%d" % (cpu_id + 1, os.getpid(), cpu_id)
actor = actor_holder.create_actor(CpuCalcActor, uid=uid)
self._cpu_calc_actors.append(actor)
uid = "w:%d:mars-inproc-holder-%d-%d" % (cpu_id + 1, os.getpid(), cpu_id)
actor = actor_holder.create_actor(InProcHolderActor, uid=uid)
self._inproc_holder_actors.append(actor)
actor = actor_holder.create_actor(
IORunnerActor,
lock_free=True,
dispatched=False,
uid=IORunnerActor.gen_uid(cpu_id + 1),
)
self._inproc_io_runner_actors.append(actor)
start_pid = 1 + self._n_cpu_process
stats = resource.cuda_card_stats() if self._n_cuda_process else []
for cuda_id, stat in enumerate(stats):
for thread_no in range(options.worker.cuda_thread_num):
uid = "w:%d:mars-cuda-calc-%d-%d-%d" % (
start_pid + cuda_id,
os.getpid(),
cuda_id,
thread_no,
)
actor = actor_holder.create_actor(CudaCalcActor, uid=uid)
self._cuda_calc_actors.append(actor)
uid = "w:%d:mars-cuda-holder-%d-%d" % (
start_pid + cuda_id,
os.getpid(),
cuda_id,
)
actor = actor_holder.create_actor(
CudaHolderActor, stat.fb_mem_info.total, device_id=stat.index, uid=uid
)
self._cuda_holder_actors.append(actor)
actor = actor_holder.create_actor(
IORunnerActor,
lock_free=True,
dispatched=False,
uid=IORunnerActor.gen_uid(start_pid + cuda_id),
)
self._inproc_io_runner_actors.append(actor)
start_pid += self._n_cuda_process
if distributed:
# create SenderActor and ReceiverActor
for sender_id in range(self._n_net_process):
uid = "w:%d:mars-sender-%d-%d" % (
start_pid + sender_id,
os.getpid(),
sender_id,
)
actor = actor_holder.create_actor(SenderActor, uid=uid)
self._sender_actors.append(actor)
# Mutable requires ReceiverActor (with ClusterSession)
for receiver_id in range(2 * self._n_net_process):
uid = "w:%d:mars-receiver-%d-%d" % (
start_pid + receiver_id // 2,
os.getpid(),
receiver_id,
)
actor = actor_holder.create_actor(ReceiverWorkerActor, uid=uid)
self._receiver_actors.append(actor)
# create ProcessHelperActor
for proc_id in range(pool.cluster_info.n_process - process_start_index):
uid = "w:%d:mars-process-helper" % proc_id
actor = actor_holder.create_actor(ProcessHelperActor, uid=uid)
self._process_helper_actors.append(actor)
# create ResultSenderActor
self._result_sender_ref = pool.create_actor(
ResultSenderActor, uid=ResultSenderActor.default_uid()
)
# create SpillActor
start_pid = pool.cluster_info.n_process - 1
if options.worker.spill_directory:
for spill_id in range(len(options.worker.spill_directory)):
uid = "w:%d:mars-global-io-runner-%d-%d" % (
start_pid,
os.getpid(),
spill_id,
)
actor = actor_holder.create_actor(IORunnerActor, uid=uid)
self._spill_actors.append(actor)
# worker can be registered when everything is ready
self._status_ref.enable_status_upload(_tell=True)
|
https://github.com/mars-project/mars/issues/1479
|
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 129, in __repr__
return self._data.__repr__()
File "/Users/wenjun.swj/Code/mars/mars/dataframe/core.py", line 1083, in __repr__
return self._to_str(representation=True)
File "/Users/wenjun.swj/Code/mars/mars/dataframe/core.py", line 1053, in _to_str
self, session=self._executed_sessions[-1])
File "/Users/wenjun.swj/Code/mars/mars/dataframe/utils.py", line 773, in fetch_corner_data
return df_or_series.fetch(session=session)
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 376, in fetch
return session.fetch(self, **kw)
File "/Users/wenjun.swj/Code/mars/mars/session.py", line 491, in fetch
result = self._sess.fetch(*tileables, **kw)
File "/Users/wenjun.swj/Code/mars/mars/web/session.py", line 265, in fetch
result_data = dataserializer.loads(resp.content)
File "/Users/wenjun.swj/Code/mars/mars/serialize/dataserializer.py", line 259, in loads
return pickle.loads(data)
ModuleNotFoundError: No module named 'pyarrow'
|
ModuleNotFoundError
|
def __init__(self, io_parallel_num=None, dispatched=True):
super().__init__()
self._work_items = deque()
self._max_work_item_id = 0
self._cur_work_items = dict()
self._io_parallel_num = io_parallel_num or options.worker.io_parallel_num
self._lock_work_items = dict()
self._dispatched = dispatched
|
def __init__(self, lock_free=False, dispatched=True):
super().__init__()
self._work_items = deque()
self._max_work_item_id = 0
self._cur_work_items = dict()
self._lock_free = lock_free or options.worker.lock_free_fileio
self._lock_work_items = dict()
self._dispatched = dispatched
|
https://github.com/mars-project/mars/issues/1479
|
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 129, in __repr__
return self._data.__repr__()
File "/Users/wenjun.swj/Code/mars/mars/dataframe/core.py", line 1083, in __repr__
return self._to_str(representation=True)
File "/Users/wenjun.swj/Code/mars/mars/dataframe/core.py", line 1053, in _to_str
self, session=self._executed_sessions[-1])
File "/Users/wenjun.swj/Code/mars/mars/dataframe/utils.py", line 773, in fetch_corner_data
return df_or_series.fetch(session=session)
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 376, in fetch
return session.fetch(self, **kw)
File "/Users/wenjun.swj/Code/mars/mars/session.py", line 491, in fetch
result = self._sess.fetch(*tileables, **kw)
File "/Users/wenjun.swj/Code/mars/mars/web/session.py", line 265, in fetch
result_data = dataserializer.loads(resp.content)
File "/Users/wenjun.swj/Code/mars/mars/serialize/dataserializer.py", line 259, in loads
return pickle.loads(data)
ModuleNotFoundError: No module named 'pyarrow'
|
ModuleNotFoundError
|
def load_from(self, dest_device, session_id, data_keys, src_device, callback):
logger.debug(
"Copying %r from %s into %s submitted in %s",
data_keys,
src_device,
dest_device,
self.uid,
)
self._work_items.append(
(dest_device, session_id, data_keys, src_device, False, callback)
)
if len(self._cur_work_items) < self._io_parallel_num:
self._submit_next()
|
def load_from(self, dest_device, session_id, data_keys, src_device, callback):
logger.debug(
"Copying %r from %s into %s submitted in %s",
data_keys,
src_device,
dest_device,
self.uid,
)
self._work_items.append(
(dest_device, session_id, data_keys, src_device, False, callback)
)
if self._lock_free or not self._cur_work_items:
self._submit_next()
|
https://github.com/mars-project/mars/issues/1479
|
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 129, in __repr__
return self._data.__repr__()
File "/Users/wenjun.swj/Code/mars/mars/dataframe/core.py", line 1083, in __repr__
return self._to_str(representation=True)
File "/Users/wenjun.swj/Code/mars/mars/dataframe/core.py", line 1053, in _to_str
self, session=self._executed_sessions[-1])
File "/Users/wenjun.swj/Code/mars/mars/dataframe/utils.py", line 773, in fetch_corner_data
return df_or_series.fetch(session=session)
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 376, in fetch
return session.fetch(self, **kw)
File "/Users/wenjun.swj/Code/mars/mars/session.py", line 491, in fetch
result = self._sess.fetch(*tileables, **kw)
File "/Users/wenjun.swj/Code/mars/mars/web/session.py", line 265, in fetch
result_data = dataserializer.loads(resp.content)
File "/Users/wenjun.swj/Code/mars/mars/serialize/dataserializer.py", line 259, in loads
return pickle.loads(data)
ModuleNotFoundError: No module named 'pyarrow'
|
ModuleNotFoundError
|
def lock(self, session_id, data_keys, callback):
logger.debug("Requesting lock for %r on %s", data_keys, self.uid)
self._work_items.append((None, session_id, data_keys, None, True, callback))
if len(self._cur_work_items) < self._io_parallel_num:
self._submit_next()
|
def lock(self, session_id, data_keys, callback):
logger.debug("Requesting lock for %r on %s", data_keys, self.uid)
self._work_items.append((None, session_id, data_keys, None, True, callback))
if self._lock_free or not self._cur_work_items:
self._submit_next()
|
https://github.com/mars-project/mars/issues/1479
|
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 129, in __repr__
return self._data.__repr__()
File "/Users/wenjun.swj/Code/mars/mars/dataframe/core.py", line 1083, in __repr__
return self._to_str(representation=True)
File "/Users/wenjun.swj/Code/mars/mars/dataframe/core.py", line 1053, in _to_str
self, session=self._executed_sessions[-1])
File "/Users/wenjun.swj/Code/mars/mars/dataframe/utils.py", line 773, in fetch_corner_data
return df_or_series.fetch(session=session)
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 376, in fetch
return session.fetch(self, **kw)
File "/Users/wenjun.swj/Code/mars/mars/session.py", line 491, in fetch
result = self._sess.fetch(*tileables, **kw)
File "/Users/wenjun.swj/Code/mars/mars/web/session.py", line 265, in fetch
result_data = dataserializer.loads(resp.content)
File "/Users/wenjun.swj/Code/mars/mars/serialize/dataserializer.py", line 259, in loads
return pickle.loads(data)
ModuleNotFoundError: No module named 'pyarrow'
|
ModuleNotFoundError
|
def tile(cls, op: "DataFrameDrop"):
inp = op.inputs[0]
out = op.outputs[0]
if len(op.inputs) > 1:
index_chunk = (
op.index.rechunk({0: (op.index.shape[0],)})._inplace_tile().chunks[0]
)
else:
index_chunk = op.index
col_to_args = OrderedDict()
chunks = []
for c in inp.chunks:
params = c.params.copy()
if isinstance(inp, DATAFRAME_TYPE):
new_dtypes, new_col_id = col_to_args.get(c.index[1], (None, None))
if new_dtypes is None:
new_col_id = len(col_to_args)
new_dtypes = op._filter_dtypes(c.dtypes, ignore_errors=True)
if len(new_dtypes) == 0:
continue
col_to_args[c.index[1]] = (new_dtypes, new_col_id)
params.update(
dict(
dtypes=new_dtypes,
index=(c.index[0], new_col_id),
index_value=c.index_value,
)
)
if op.index is not None:
params.update(
dict(
shape=(np.nan, len(new_dtypes)),
index_value=parse_index(None, (c.key, c.index_value.key)),
)
)
else:
params["shape"] = (c.shape[0], len(new_dtypes))
elif op.index is not None:
params.update(
dict(
shape=(np.nan,),
index_value=parse_index(None, (c.key, c.index_value.key)),
)
)
chunk_inputs = [c]
if isinstance(index_chunk, Chunk):
chunk_inputs.append(index_chunk)
new_op = op.copy().reset_key()
new_op._index = index_chunk
chunks.append(new_op.new_chunk(chunk_inputs, **params))
new_op = op.copy().reset_key()
params = out.params.copy()
if op.index is not None:
nsplits_list = [(np.nan,) * inp.chunk_shape[0]]
else:
nsplits_list = [inp.nsplits[0]]
if isinstance(inp, DATAFRAME_TYPE):
nsplits_list.append(tuple(len(dt) for dt, _ in col_to_args.values()))
params.update(dict(chunks=chunks, nsplits=tuple(nsplits_list)))
return new_op.new_tileables(op.inputs, **params)
|
def tile(cls, op: "DataFrameDrop"):
inp = op.inputs[0]
out = op.outputs[0]
if len(op.inputs) > 1:
index_chunk = (
op.index.rechunk({0: (op.index.shape[0],)})._inplace_tile().chunks[0]
)
else:
index_chunk = op.index
col_to_args = OrderedDict()
chunks = []
for c in inp.chunks:
params = c.params.copy()
if isinstance(inp, DATAFRAME_TYPE):
new_dtypes, new_col_id = col_to_args.get(c.index[1], (None, None))
if new_dtypes is None:
new_col_id = len(col_to_args)
new_dtypes = op._filter_dtypes(c.dtypes, ignore_errors=True)
if len(new_dtypes) == 0:
continue
col_to_args[c.index[1]] = (new_dtypes, new_col_id)
params.update(
dict(
dtypes=new_dtypes,
index=(c.index[0], new_col_id),
index_value=parse_index(None, (c.key, c.index_value.key)),
)
)
if op.index is not None:
params.update(
dict(
shape=(np.nan, len(new_dtypes)),
index_value=parse_index(None, (c.key, c.index_value.key)),
)
)
else:
params["shape"] = (c.shape[0], len(new_dtypes))
elif op.index is not None:
params.update(
dict(
shape=(np.nan,),
index_value=parse_index(None, (c.key, c.index_value.key)),
)
)
chunk_inputs = [c]
if isinstance(index_chunk, Chunk):
chunk_inputs.append(index_chunk)
new_op = op.copy().reset_key()
new_op._index = index_chunk
chunks.append(new_op.new_chunk(chunk_inputs, **params))
new_op = op.copy().reset_key()
params = out.params.copy()
if op.index is not None:
nsplits_list = [(np.nan,) * inp.chunk_shape[0]]
else:
nsplits_list = [inp.nsplits[0]]
if isinstance(inp, DATAFRAME_TYPE):
nsplits_list.append(tuple(len(dt) for dt, _ in col_to_args.values()))
params.update(dict(chunks=chunks, nsplits=tuple(nsplits_list)))
return new_op.new_tileables(op.inputs, **params)
|
https://github.com/mars-project/mars/issues/1463
|
Traceback (most recent call last):
File "/Users/qinxuye/Downloads/test_mars3.py", line 13, in <module>
print(c.execute())
File "/Users/qinxuye/Workspace/mars/mars/core.py", line 579, in execute
self._data.execute(session, **kw)
File "/Users/qinxuye/Workspace/mars/mars/core.py", line 367, in execute
session.run(self, **kw)
File "/Users/qinxuye/Workspace/mars/mars/session.py", line 461, in run
result = self._sess.run(*tileables, **kw)
File "/Users/qinxuye/Workspace/mars/mars/session.py", line 106, in run
res = self._executor.execute_tileables(tileables, **kw)
File "/Users/qinxuye/Workspace/mars/mars/utils.py", line 408, in _wrapped
return func(*args, **kwargs)
File "/Users/qinxuye/Workspace/mars/mars/utils.py", line 502, in inner
return func(*args, **kwargs)
File "/Users/qinxuye/Workspace/mars/mars/executor.py", line 860, in execute_tileables
tileables, tileable_graph=tileable_graph)
File "/Users/qinxuye/Workspace/mars/mars/utils.py", line 408, in _wrapped
return func(*args, **kwargs)
File "/Users/qinxuye/Workspace/mars/mars/utils.py", line 502, in inner
return func(*args, **kwargs)
File "/Users/qinxuye/Workspace/mars/mars/tiles.py", line 350, in build
tileables, tileable_graph=tileable_graph)
File "/Users/qinxuye/Workspace/mars/mars/utils.py", line 408, in _wrapped
return func(*args, **kwargs)
File "/Users/qinxuye/Workspace/mars/mars/utils.py", line 502, in inner
return func(*args, **kwargs)
File "/Users/qinxuye/Workspace/mars/mars/tiles.py", line 263, in build
self._on_tile_failure(tileable_data.op, exc_info)
File "/Users/qinxuye/Workspace/mars/mars/tiles.py", line 302, in inner
raise exc_info[1].with_traceback(exc_info[2]) from None
File "/Users/qinxuye/Workspace/mars/mars/tiles.py", line 243, in build
tiled = self._tile(tileable_data, tileable_graph)
File "/Users/qinxuye/Workspace/mars/mars/tiles.py", line 338, in _tile
return super()._tile(tileable_data, tileable_graph)
File "/Users/qinxuye/Workspace/mars/mars/tiles.py", line 201, in _tile
tds[0]._inplace_tile()
File "/Users/qinxuye/Workspace/mars/mars/core.py", line 163, in _inplace_tile
return handler.inplace_tile(self)
File "/Users/qinxuye/Workspace/mars/mars/tiles.py", line 136, in inplace_tile
dispatched = self.dispatch(to_tile.op)
File "/Users/qinxuye/Workspace/mars/mars/utils.py", line 408, in _wrapped
return func(*args, **kwargs)
File "/Users/qinxuye/Workspace/mars/mars/tiles.py", line 119, in dispatch
tiled = op_cls.tile(op)
File "/Users/qinxuye/Workspace/mars/mars/dataframe/arithmetic/core.py", line 255, in tile
return cls._tile_both_series(op)
File "/Users/qinxuye/Workspace/mars/mars/dataframe/arithmetic/core.py", line 106, in _tile_both_series
nsplits, out_shape, left_chunks, right_chunks = align_series_series(left, right)
File "/Users/qinxuye/Workspace/mars/mars/dataframe/align.py", line 731, in align_series_series
left_index_chunks, right_index_chunks)
File "/Users/qinxuye/Workspace/mars/mars/dataframe/align.py", line 490, in _calc_axis_splits
right_splits = left_splits = [[c] for c in left_chunk_index_min_max]
TypeError: 'NoneType' object is not iterable
|
TypeError
|
def _execute_and_fetch(self, session=None, **kw):
if session is None and len(self._executed_sessions) > 0:
session = self._executed_sessions[-1]
try:
# fetch first, to reduce the potential cost of submitting a graph
return self.fetch(session=session)
except ValueError:
# not execute before
return self.execute(session=session, **kw).fetch(session=session)
|
def _execute_and_fetch(self, session=None, **kw):
try:
# fetch first, to reduce the potential cost of submitting a graph
return self.fetch(session=session)
except ValueError:
# not execute before
return self.execute(session=session, **kw).fetch(session=session)
|
https://github.com/mars-project/mars/issues/1448
|
D:\Anaconda3\envs\py37\python.exe E:/mycode/read_data.py
2020-08-03 15:54:52,383 INFO sqlalchemy.engine.base.Engine SHOW VARIABLES LIKE 'sql_mode'
2020-08-03 15:54:52,383 INFO sqlalchemy.engine.base.Engine {}
2020-08-03 15:54:52,387 INFO sqlalchemy.engine.base.Engine SHOW VARIABLES LIKE 'lower_case_table_names'
2020-08-03 15:54:52,387 INFO sqlalchemy.engine.base.Engine {}
2020-08-03 15:54:52,389 INFO sqlalchemy.engine.base.Engine SELECT DATABASE()
2020-08-03 15:54:52,389 INFO sqlalchemy.engine.base.Engine {}
2020-08-03 15:54:52,390 INFO sqlalchemy.engine.base.Engine show collation where `Charset` = 'utf8mb4' and `Collation` = 'utf8mb4_bin'
2020-08-03 15:54:52,390 INFO sqlalchemy.engine.base.Engine {}
2020-08-03 15:54:52,391 INFO sqlalchemy.engine.base.Engine SELECT CAST('test plain returns' AS CHAR(60)) AS anon_1
2020-08-03 15:54:52,391 INFO sqlalchemy.engine.base.Engine {}
2020-08-03 15:54:52,392 INFO sqlalchemy.engine.base.Engine SELECT CAST('test unicode returns' AS CHAR(60)) AS anon_1
2020-08-03 15:54:52,392 INFO sqlalchemy.engine.base.Engine {}
2020-08-03 15:54:52,393 INFO sqlalchemy.engine.base.Engine SELECT CAST('test collated returns' AS CHAR CHARACTER SET utf8mb4) COLLATE utf8mb4_bin AS anon_1
2020-08-03 15:54:52,393 INFO sqlalchemy.engine.base.Engine {}
D:\Anaconda3\envs\py37\lib\site-packages\pymysql\cursors.py:170: Warning: (1366, "Incorrect string value: '\\xD6\\xD0\\xB9\\xFA\\xB1\\xEA...' for column 'VARIABLE_VALUE' at row 485")
result = self._query(query)
2020-08-03 15:54:52,404 INFO sqlalchemy.engine.base.Engine SHOW CREATE TABLE `SELECT S_INFO_UNIQUECODE, REPORT_PERIOD, MYFIRST_INDICATOR FROM databasetable1`
2020-08-03 15:54:52,404 INFO sqlalchemy.engine.base.Engine {}
2020-08-03 15:54:52,405 INFO sqlalchemy.engine.base.Engine ROLLBACK
Traceback (most recent call last):
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 1278, in _execute_context
cursor, statement, parameters, context
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\default.py", line 593, in do_execute
cursor.execute(statement, parameters)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\cursors.py", line 170, in execute
result = self._query(query)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\cursors.py", line 328, in _query
conn.query(q)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\connections.py", line 517, in query
self._affected_rows = self._read_query_result(unbuffered=unbuffered)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\connections.py", line 732, in _read_query_result
result.read()
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\connections.py", line 1075, in read
first_packet = self.connection._read_packet()
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\connections.py", line 684, in _read_packet
packet.check_error()
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\protocol.py", line 220, in check_error
err.raise_mysql_exception(self._data)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\err.py", line 109, in raise_mysql_exception
raise errorclass(errno, errval)
pymysql.err.InternalError: (1059, "Identifier name 'SELECT S_INFO_UNIQUECODE, REPORT_PERIOD, MYFIRST_INDICATOR FROM databasetable1' is too long")
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "E:/mycode/read_data.py", line 7,
df1 = md.read_sql('SELECT S_INFO_UNIQUECODE, REPORT_PERIOD, MYFIRST_INDICATOR FROM databasetable1', con=engine)
File "D:\Anaconda3\envs\py37\lib\site-packages\mars\dataframe\datasource\read_sql.py", line 577, in read_sql
low_limit=low_limit, high_limit=high_limit)
File "D:\Anaconda3\envs\py37\lib\site-packages\mars\dataframe\datasource\read_sql.py", line 479, in _read_sql
return op(test_rows, chunk_size)
File "D:\Anaconda3\envs\py37\lib\site-packages\mars\dataframe\datasource\read_sql.py", line 222, in __call__
selectable = self._get_selectable(con)
File "D:\Anaconda3\envs\py37\lib\site-packages\mars\dataframe\datasource\read_sql.py", line 175, in _get_selectable
autoload_with=engine_or_conn, schema=self._schema)
File "<string>", line 2, in __new__
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\util\deprecations.py", line 139, in warned
return fn(*args, **kwargs)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\sql\schema.py", line 559, in __new__
metadata._remove_table(name, schema)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\util\langhelpers.py", line 69, in __exit__
exc_value, with_traceback=exc_tb,
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\util\compat.py", line 178, in raise_
raise exception
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\sql\schema.py", line 554, in __new__
table._init(name, metadata, *args, **kw)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\sql\schema.py", line 648, in _init
resolve_fks=resolve_fks,
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\sql\schema.py", line 672, in _autoload
_extend_on=_extend_on,
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 1654, in run_callable
return callable_(self, *args, **kwargs)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\default.py", line 470, in reflecttable
table, include_columns, exclude_columns, resolve_fks, **opts
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\reflection.py", line 649, in reflecttable
table_name, schema, **table.dialect_kwargs
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\reflection.py", line 314, in get_table_options
self.bind, table_name, schema, info_cache=self.info_cache, **kw
File "<string>", line 2, in get_table_options
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\reflection.py", line 52, in cache
ret = fn(self, con, *args, **kw)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\dialects\mysql\base.py", line 2624, in get_table_options
connection, table_name, schema, **kw
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\dialects\mysql\base.py", line 2870, in _parsed_state_or_create
info_cache=kw.get("info_cache", None),
File "<string>", line 2, in _setup_parser
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\reflection.py", line 52, in cache
ret = fn(self, con, *args, **kw)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\dialects\mysql\base.py", line 2898, in _setup_parser
connection, None, charset, full_name=full_name
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\dialects\mysql\base.py", line 2998, in _show_create_table
).execute(st)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 1006, in execute
return self._execute_text(object_, multiparams, params)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 1181, in _execute_text
parameters,
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 1318, in _execute_context
e, statement, parameters, cursor, context
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 1512, in _handle_dbapi_exception
sqlalchemy_exception, with_traceback=exc_info[2], from_=e
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\util\compat.py", line 178, in raise_
raise exception
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 1278, in _execute_context
cursor, statement, parameters, context
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\default.py", line 593, in do_execute
cursor.execute(statement, parameters)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\cursors.py", line 170, in execute
result = self._query(query)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\cursors.py", line 328, in _query
conn.query(q)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\connections.py", line 517, in query
self._affected_rows = self._read_query_result(unbuffered=unbuffered)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\connections.py", line 732, in _read_query_result
result.read()
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\connections.py", line 1075, in read
first_packet = self.connection._read_packet()
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\connections.py", line 684, in _read_packet
packet.check_error()
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\protocol.py", line 220, in check_error
err.raise_mysql_exception(self._data)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\err.py", line 109, in raise_mysql_exception
raise errorclass(errno, errval)
sqlalchemy.exc.InternalError: (pymysql.err.InternalError) (1059, "Identifier name 'SELECT S_INFO_UNIQUECODE, REPORT_PERIOD, MYFIRST_INDICATOR FROM databasetable1' is too long")
[SQL: SHOW CREATE TABLE `SELECT S_INFO_UNIQUECODE, REPORT_PERIOD, MYFIRST_INDICATOR FROM databasetable1`]
(Background on this error at: http://sqlalche.me/e/13/2j85)
Process finished with exit code 1
|
pymysql.err.InternalError
|
def _process_pos(pos, length, is_start):
if pos is None:
return 0 if is_start else length
return pos + length if pos < 0 else pos
|
def _process_pos(pos, length):
if pos is None:
return 0
return pos + length if pos < 0 else pos
|
https://github.com/mars-project/mars/issues/1448
|
D:\Anaconda3\envs\py37\python.exe E:/mycode/read_data.py
2020-08-03 15:54:52,383 INFO sqlalchemy.engine.base.Engine SHOW VARIABLES LIKE 'sql_mode'
2020-08-03 15:54:52,383 INFO sqlalchemy.engine.base.Engine {}
2020-08-03 15:54:52,387 INFO sqlalchemy.engine.base.Engine SHOW VARIABLES LIKE 'lower_case_table_names'
2020-08-03 15:54:52,387 INFO sqlalchemy.engine.base.Engine {}
2020-08-03 15:54:52,389 INFO sqlalchemy.engine.base.Engine SELECT DATABASE()
2020-08-03 15:54:52,389 INFO sqlalchemy.engine.base.Engine {}
2020-08-03 15:54:52,390 INFO sqlalchemy.engine.base.Engine show collation where `Charset` = 'utf8mb4' and `Collation` = 'utf8mb4_bin'
2020-08-03 15:54:52,390 INFO sqlalchemy.engine.base.Engine {}
2020-08-03 15:54:52,391 INFO sqlalchemy.engine.base.Engine SELECT CAST('test plain returns' AS CHAR(60)) AS anon_1
2020-08-03 15:54:52,391 INFO sqlalchemy.engine.base.Engine {}
2020-08-03 15:54:52,392 INFO sqlalchemy.engine.base.Engine SELECT CAST('test unicode returns' AS CHAR(60)) AS anon_1
2020-08-03 15:54:52,392 INFO sqlalchemy.engine.base.Engine {}
2020-08-03 15:54:52,393 INFO sqlalchemy.engine.base.Engine SELECT CAST('test collated returns' AS CHAR CHARACTER SET utf8mb4) COLLATE utf8mb4_bin AS anon_1
2020-08-03 15:54:52,393 INFO sqlalchemy.engine.base.Engine {}
D:\Anaconda3\envs\py37\lib\site-packages\pymysql\cursors.py:170: Warning: (1366, "Incorrect string value: '\\xD6\\xD0\\xB9\\xFA\\xB1\\xEA...' for column 'VARIABLE_VALUE' at row 485")
result = self._query(query)
2020-08-03 15:54:52,404 INFO sqlalchemy.engine.base.Engine SHOW CREATE TABLE `SELECT S_INFO_UNIQUECODE, REPORT_PERIOD, MYFIRST_INDICATOR FROM databasetable1`
2020-08-03 15:54:52,404 INFO sqlalchemy.engine.base.Engine {}
2020-08-03 15:54:52,405 INFO sqlalchemy.engine.base.Engine ROLLBACK
Traceback (most recent call last):
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 1278, in _execute_context
cursor, statement, parameters, context
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\default.py", line 593, in do_execute
cursor.execute(statement, parameters)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\cursors.py", line 170, in execute
result = self._query(query)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\cursors.py", line 328, in _query
conn.query(q)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\connections.py", line 517, in query
self._affected_rows = self._read_query_result(unbuffered=unbuffered)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\connections.py", line 732, in _read_query_result
result.read()
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\connections.py", line 1075, in read
first_packet = self.connection._read_packet()
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\connections.py", line 684, in _read_packet
packet.check_error()
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\protocol.py", line 220, in check_error
err.raise_mysql_exception(self._data)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\err.py", line 109, in raise_mysql_exception
raise errorclass(errno, errval)
pymysql.err.InternalError: (1059, "Identifier name 'SELECT S_INFO_UNIQUECODE, REPORT_PERIOD, MYFIRST_INDICATOR FROM databasetable1' is too long")
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "E:/mycode/read_data.py", line 7,
df1 = md.read_sql('SELECT S_INFO_UNIQUECODE, REPORT_PERIOD, MYFIRST_INDICATOR FROM databasetable1', con=engine)
File "D:\Anaconda3\envs\py37\lib\site-packages\mars\dataframe\datasource\read_sql.py", line 577, in read_sql
low_limit=low_limit, high_limit=high_limit)
File "D:\Anaconda3\envs\py37\lib\site-packages\mars\dataframe\datasource\read_sql.py", line 479, in _read_sql
return op(test_rows, chunk_size)
File "D:\Anaconda3\envs\py37\lib\site-packages\mars\dataframe\datasource\read_sql.py", line 222, in __call__
selectable = self._get_selectable(con)
File "D:\Anaconda3\envs\py37\lib\site-packages\mars\dataframe\datasource\read_sql.py", line 175, in _get_selectable
autoload_with=engine_or_conn, schema=self._schema)
File "<string>", line 2, in __new__
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\util\deprecations.py", line 139, in warned
return fn(*args, **kwargs)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\sql\schema.py", line 559, in __new__
metadata._remove_table(name, schema)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\util\langhelpers.py", line 69, in __exit__
exc_value, with_traceback=exc_tb,
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\util\compat.py", line 178, in raise_
raise exception
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\sql\schema.py", line 554, in __new__
table._init(name, metadata, *args, **kw)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\sql\schema.py", line 648, in _init
resolve_fks=resolve_fks,
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\sql\schema.py", line 672, in _autoload
_extend_on=_extend_on,
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 1654, in run_callable
return callable_(self, *args, **kwargs)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\default.py", line 470, in reflecttable
table, include_columns, exclude_columns, resolve_fks, **opts
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\reflection.py", line 649, in reflecttable
table_name, schema, **table.dialect_kwargs
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\reflection.py", line 314, in get_table_options
self.bind, table_name, schema, info_cache=self.info_cache, **kw
File "<string>", line 2, in get_table_options
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\reflection.py", line 52, in cache
ret = fn(self, con, *args, **kw)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\dialects\mysql\base.py", line 2624, in get_table_options
connection, table_name, schema, **kw
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\dialects\mysql\base.py", line 2870, in _parsed_state_or_create
info_cache=kw.get("info_cache", None),
File "<string>", line 2, in _setup_parser
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\reflection.py", line 52, in cache
ret = fn(self, con, *args, **kw)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\dialects\mysql\base.py", line 2898, in _setup_parser
connection, None, charset, full_name=full_name
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\dialects\mysql\base.py", line 2998, in _show_create_table
).execute(st)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 1006, in execute
return self._execute_text(object_, multiparams, params)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 1181, in _execute_text
parameters,
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 1318, in _execute_context
e, statement, parameters, cursor, context
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 1512, in _handle_dbapi_exception
sqlalchemy_exception, with_traceback=exc_info[2], from_=e
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\util\compat.py", line 178, in raise_
raise exception
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 1278, in _execute_context
cursor, statement, parameters, context
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\default.py", line 593, in do_execute
cursor.execute(statement, parameters)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\cursors.py", line 170, in execute
result = self._query(query)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\cursors.py", line 328, in _query
conn.query(q)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\connections.py", line 517, in query
self._affected_rows = self._read_query_result(unbuffered=unbuffered)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\connections.py", line 732, in _read_query_result
result.read()
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\connections.py", line 1075, in read
first_packet = self.connection._read_packet()
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\connections.py", line 684, in _read_packet
packet.check_error()
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\protocol.py", line 220, in check_error
err.raise_mysql_exception(self._data)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\err.py", line 109, in raise_mysql_exception
raise errorclass(errno, errval)
sqlalchemy.exc.InternalError: (pymysql.err.InternalError) (1059, "Identifier name 'SELECT S_INFO_UNIQUECODE, REPORT_PERIOD, MYFIRST_INDICATOR FROM databasetable1' is too long")
[SQL: SHOW CREATE TABLE `SELECT S_INFO_UNIQUECODE, REPORT_PERIOD, MYFIRST_INDICATOR FROM databasetable1`]
(Background on this error at: http://sqlalche.me/e/13/2j85)
Process finished with exit code 1
|
pymysql.err.InternalError
|
def __getitem__(self, item):
has_take = hasattr(self._arrow_array, "take")
if not self._force_use_pandas and has_take:
if pd.api.types.is_scalar(item):
item = item + len(self) if item < 0 else item
return self._arrow_array.take([item]).to_pandas()[0]
elif self._can_process_slice_via_arrow(item):
length = len(self)
start, stop = item.start, item.stop
start = self._process_pos(start, length, True)
stop = self._process_pos(stop, length, False)
return ArrowStringArray(
self._arrow_array.slice(offset=start, length=stop - start)
)
elif hasattr(item, "dtype") and np.issubdtype(item.dtype, np.bool_):
return ArrowStringArray(
self._arrow_array.filter(pa.array(item, from_pandas=True))
)
elif hasattr(item, "dtype"):
length = len(self)
item = np.where(item < 0, item + length, item)
return ArrowStringArray(self._arrow_array.take(item))
array = np.asarray(self._arrow_array.to_pandas())
return ArrowStringArray(array[item])
|
def __getitem__(self, item):
has_take = hasattr(self._arrow_array, "take")
if not self._force_use_pandas and has_take:
if pd.api.types.is_scalar(item):
item = item + len(self) if item < 0 else item
return self._arrow_array.take([item]).to_pandas()[0]
elif self._can_process_slice_via_arrow(item):
length = len(self)
start, stop = item.start, item.stop
start = self._process_pos(start, length)
stop = self._process_pos(stop, length)
return ArrowStringArray(
self._arrow_array.slice(offset=start, length=stop - start)
)
elif hasattr(item, "dtype") and np.issubdtype(item.dtype, np.bool_):
return ArrowStringArray(
self._arrow_array.filter(pa.array(item, from_pandas=True))
)
elif hasattr(item, "dtype"):
length = len(self)
item = np.where(item < 0, item + length, item)
return ArrowStringArray(self._arrow_array.take(item))
array = np.asarray(self._arrow_array.to_pandas())
return ArrowStringArray(array[item])
|
https://github.com/mars-project/mars/issues/1448
|
D:\Anaconda3\envs\py37\python.exe E:/mycode/read_data.py
2020-08-03 15:54:52,383 INFO sqlalchemy.engine.base.Engine SHOW VARIABLES LIKE 'sql_mode'
2020-08-03 15:54:52,383 INFO sqlalchemy.engine.base.Engine {}
2020-08-03 15:54:52,387 INFO sqlalchemy.engine.base.Engine SHOW VARIABLES LIKE 'lower_case_table_names'
2020-08-03 15:54:52,387 INFO sqlalchemy.engine.base.Engine {}
2020-08-03 15:54:52,389 INFO sqlalchemy.engine.base.Engine SELECT DATABASE()
2020-08-03 15:54:52,389 INFO sqlalchemy.engine.base.Engine {}
2020-08-03 15:54:52,390 INFO sqlalchemy.engine.base.Engine show collation where `Charset` = 'utf8mb4' and `Collation` = 'utf8mb4_bin'
2020-08-03 15:54:52,390 INFO sqlalchemy.engine.base.Engine {}
2020-08-03 15:54:52,391 INFO sqlalchemy.engine.base.Engine SELECT CAST('test plain returns' AS CHAR(60)) AS anon_1
2020-08-03 15:54:52,391 INFO sqlalchemy.engine.base.Engine {}
2020-08-03 15:54:52,392 INFO sqlalchemy.engine.base.Engine SELECT CAST('test unicode returns' AS CHAR(60)) AS anon_1
2020-08-03 15:54:52,392 INFO sqlalchemy.engine.base.Engine {}
2020-08-03 15:54:52,393 INFO sqlalchemy.engine.base.Engine SELECT CAST('test collated returns' AS CHAR CHARACTER SET utf8mb4) COLLATE utf8mb4_bin AS anon_1
2020-08-03 15:54:52,393 INFO sqlalchemy.engine.base.Engine {}
D:\Anaconda3\envs\py37\lib\site-packages\pymysql\cursors.py:170: Warning: (1366, "Incorrect string value: '\\xD6\\xD0\\xB9\\xFA\\xB1\\xEA...' for column 'VARIABLE_VALUE' at row 485")
result = self._query(query)
2020-08-03 15:54:52,404 INFO sqlalchemy.engine.base.Engine SHOW CREATE TABLE `SELECT S_INFO_UNIQUECODE, REPORT_PERIOD, MYFIRST_INDICATOR FROM databasetable1`
2020-08-03 15:54:52,404 INFO sqlalchemy.engine.base.Engine {}
2020-08-03 15:54:52,405 INFO sqlalchemy.engine.base.Engine ROLLBACK
Traceback (most recent call last):
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 1278, in _execute_context
cursor, statement, parameters, context
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\default.py", line 593, in do_execute
cursor.execute(statement, parameters)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\cursors.py", line 170, in execute
result = self._query(query)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\cursors.py", line 328, in _query
conn.query(q)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\connections.py", line 517, in query
self._affected_rows = self._read_query_result(unbuffered=unbuffered)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\connections.py", line 732, in _read_query_result
result.read()
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\connections.py", line 1075, in read
first_packet = self.connection._read_packet()
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\connections.py", line 684, in _read_packet
packet.check_error()
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\protocol.py", line 220, in check_error
err.raise_mysql_exception(self._data)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\err.py", line 109, in raise_mysql_exception
raise errorclass(errno, errval)
pymysql.err.InternalError: (1059, "Identifier name 'SELECT S_INFO_UNIQUECODE, REPORT_PERIOD, MYFIRST_INDICATOR FROM databasetable1' is too long")
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "E:/mycode/read_data.py", line 7,
df1 = md.read_sql('SELECT S_INFO_UNIQUECODE, REPORT_PERIOD, MYFIRST_INDICATOR FROM databasetable1', con=engine)
File "D:\Anaconda3\envs\py37\lib\site-packages\mars\dataframe\datasource\read_sql.py", line 577, in read_sql
low_limit=low_limit, high_limit=high_limit)
File "D:\Anaconda3\envs\py37\lib\site-packages\mars\dataframe\datasource\read_sql.py", line 479, in _read_sql
return op(test_rows, chunk_size)
File "D:\Anaconda3\envs\py37\lib\site-packages\mars\dataframe\datasource\read_sql.py", line 222, in __call__
selectable = self._get_selectable(con)
File "D:\Anaconda3\envs\py37\lib\site-packages\mars\dataframe\datasource\read_sql.py", line 175, in _get_selectable
autoload_with=engine_or_conn, schema=self._schema)
File "<string>", line 2, in __new__
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\util\deprecations.py", line 139, in warned
return fn(*args, **kwargs)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\sql\schema.py", line 559, in __new__
metadata._remove_table(name, schema)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\util\langhelpers.py", line 69, in __exit__
exc_value, with_traceback=exc_tb,
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\util\compat.py", line 178, in raise_
raise exception
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\sql\schema.py", line 554, in __new__
table._init(name, metadata, *args, **kw)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\sql\schema.py", line 648, in _init
resolve_fks=resolve_fks,
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\sql\schema.py", line 672, in _autoload
_extend_on=_extend_on,
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 1654, in run_callable
return callable_(self, *args, **kwargs)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\default.py", line 470, in reflecttable
table, include_columns, exclude_columns, resolve_fks, **opts
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\reflection.py", line 649, in reflecttable
table_name, schema, **table.dialect_kwargs
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\reflection.py", line 314, in get_table_options
self.bind, table_name, schema, info_cache=self.info_cache, **kw
File "<string>", line 2, in get_table_options
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\reflection.py", line 52, in cache
ret = fn(self, con, *args, **kw)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\dialects\mysql\base.py", line 2624, in get_table_options
connection, table_name, schema, **kw
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\dialects\mysql\base.py", line 2870, in _parsed_state_or_create
info_cache=kw.get("info_cache", None),
File "<string>", line 2, in _setup_parser
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\reflection.py", line 52, in cache
ret = fn(self, con, *args, **kw)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\dialects\mysql\base.py", line 2898, in _setup_parser
connection, None, charset, full_name=full_name
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\dialects\mysql\base.py", line 2998, in _show_create_table
).execute(st)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 1006, in execute
return self._execute_text(object_, multiparams, params)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 1181, in _execute_text
parameters,
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 1318, in _execute_context
e, statement, parameters, cursor, context
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 1512, in _handle_dbapi_exception
sqlalchemy_exception, with_traceback=exc_info[2], from_=e
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\util\compat.py", line 178, in raise_
raise exception
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 1278, in _execute_context
cursor, statement, parameters, context
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\default.py", line 593, in do_execute
cursor.execute(statement, parameters)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\cursors.py", line 170, in execute
result = self._query(query)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\cursors.py", line 328, in _query
conn.query(q)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\connections.py", line 517, in query
self._affected_rows = self._read_query_result(unbuffered=unbuffered)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\connections.py", line 732, in _read_query_result
result.read()
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\connections.py", line 1075, in read
first_packet = self.connection._read_packet()
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\connections.py", line 684, in _read_packet
packet.check_error()
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\protocol.py", line 220, in check_error
err.raise_mysql_exception(self._data)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\err.py", line 109, in raise_mysql_exception
raise errorclass(errno, errval)
sqlalchemy.exc.InternalError: (pymysql.err.InternalError) (1059, "Identifier name 'SELECT S_INFO_UNIQUECODE, REPORT_PERIOD, MYFIRST_INDICATOR FROM databasetable1' is too long")
[SQL: SHOW CREATE TABLE `SELECT S_INFO_UNIQUECODE, REPORT_PERIOD, MYFIRST_INDICATOR FROM databasetable1`]
(Background on this error at: http://sqlalche.me/e/13/2j85)
Process finished with exit code 1
|
pymysql.err.InternalError
|
def _concat_same_type(
cls, to_concat: Sequence["ArrowStringArray"]
) -> "ArrowStringArray":
chunks = list(
itertools.chain.from_iterable(x._arrow_array.chunks for x in to_concat)
)
if len(chunks) == 0:
chunks = [pa.array([], type=pa.string())]
return cls(pa.chunked_array(chunks))
|
def _concat_same_type(
cls, to_concat: Sequence["ArrowStringArray"]
) -> "ArrowStringArray":
chunks = list(
itertools.chain.from_iterable(x._arrow_array.chunks for x in to_concat)
)
return cls(pa.chunked_array(chunks))
|
https://github.com/mars-project/mars/issues/1448
|
D:\Anaconda3\envs\py37\python.exe E:/mycode/read_data.py
2020-08-03 15:54:52,383 INFO sqlalchemy.engine.base.Engine SHOW VARIABLES LIKE 'sql_mode'
2020-08-03 15:54:52,383 INFO sqlalchemy.engine.base.Engine {}
2020-08-03 15:54:52,387 INFO sqlalchemy.engine.base.Engine SHOW VARIABLES LIKE 'lower_case_table_names'
2020-08-03 15:54:52,387 INFO sqlalchemy.engine.base.Engine {}
2020-08-03 15:54:52,389 INFO sqlalchemy.engine.base.Engine SELECT DATABASE()
2020-08-03 15:54:52,389 INFO sqlalchemy.engine.base.Engine {}
2020-08-03 15:54:52,390 INFO sqlalchemy.engine.base.Engine show collation where `Charset` = 'utf8mb4' and `Collation` = 'utf8mb4_bin'
2020-08-03 15:54:52,390 INFO sqlalchemy.engine.base.Engine {}
2020-08-03 15:54:52,391 INFO sqlalchemy.engine.base.Engine SELECT CAST('test plain returns' AS CHAR(60)) AS anon_1
2020-08-03 15:54:52,391 INFO sqlalchemy.engine.base.Engine {}
2020-08-03 15:54:52,392 INFO sqlalchemy.engine.base.Engine SELECT CAST('test unicode returns' AS CHAR(60)) AS anon_1
2020-08-03 15:54:52,392 INFO sqlalchemy.engine.base.Engine {}
2020-08-03 15:54:52,393 INFO sqlalchemy.engine.base.Engine SELECT CAST('test collated returns' AS CHAR CHARACTER SET utf8mb4) COLLATE utf8mb4_bin AS anon_1
2020-08-03 15:54:52,393 INFO sqlalchemy.engine.base.Engine {}
D:\Anaconda3\envs\py37\lib\site-packages\pymysql\cursors.py:170: Warning: (1366, "Incorrect string value: '\\xD6\\xD0\\xB9\\xFA\\xB1\\xEA...' for column 'VARIABLE_VALUE' at row 485")
result = self._query(query)
2020-08-03 15:54:52,404 INFO sqlalchemy.engine.base.Engine SHOW CREATE TABLE `SELECT S_INFO_UNIQUECODE, REPORT_PERIOD, MYFIRST_INDICATOR FROM databasetable1`
2020-08-03 15:54:52,404 INFO sqlalchemy.engine.base.Engine {}
2020-08-03 15:54:52,405 INFO sqlalchemy.engine.base.Engine ROLLBACK
Traceback (most recent call last):
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 1278, in _execute_context
cursor, statement, parameters, context
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\default.py", line 593, in do_execute
cursor.execute(statement, parameters)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\cursors.py", line 170, in execute
result = self._query(query)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\cursors.py", line 328, in _query
conn.query(q)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\connections.py", line 517, in query
self._affected_rows = self._read_query_result(unbuffered=unbuffered)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\connections.py", line 732, in _read_query_result
result.read()
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\connections.py", line 1075, in read
first_packet = self.connection._read_packet()
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\connections.py", line 684, in _read_packet
packet.check_error()
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\protocol.py", line 220, in check_error
err.raise_mysql_exception(self._data)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\err.py", line 109, in raise_mysql_exception
raise errorclass(errno, errval)
pymysql.err.InternalError: (1059, "Identifier name 'SELECT S_INFO_UNIQUECODE, REPORT_PERIOD, MYFIRST_INDICATOR FROM databasetable1' is too long")
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "E:/mycode/read_data.py", line 7,
df1 = md.read_sql('SELECT S_INFO_UNIQUECODE, REPORT_PERIOD, MYFIRST_INDICATOR FROM databasetable1', con=engine)
File "D:\Anaconda3\envs\py37\lib\site-packages\mars\dataframe\datasource\read_sql.py", line 577, in read_sql
low_limit=low_limit, high_limit=high_limit)
File "D:\Anaconda3\envs\py37\lib\site-packages\mars\dataframe\datasource\read_sql.py", line 479, in _read_sql
return op(test_rows, chunk_size)
File "D:\Anaconda3\envs\py37\lib\site-packages\mars\dataframe\datasource\read_sql.py", line 222, in __call__
selectable = self._get_selectable(con)
File "D:\Anaconda3\envs\py37\lib\site-packages\mars\dataframe\datasource\read_sql.py", line 175, in _get_selectable
autoload_with=engine_or_conn, schema=self._schema)
File "<string>", line 2, in __new__
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\util\deprecations.py", line 139, in warned
return fn(*args, **kwargs)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\sql\schema.py", line 559, in __new__
metadata._remove_table(name, schema)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\util\langhelpers.py", line 69, in __exit__
exc_value, with_traceback=exc_tb,
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\util\compat.py", line 178, in raise_
raise exception
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\sql\schema.py", line 554, in __new__
table._init(name, metadata, *args, **kw)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\sql\schema.py", line 648, in _init
resolve_fks=resolve_fks,
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\sql\schema.py", line 672, in _autoload
_extend_on=_extend_on,
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 1654, in run_callable
return callable_(self, *args, **kwargs)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\default.py", line 470, in reflecttable
table, include_columns, exclude_columns, resolve_fks, **opts
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\reflection.py", line 649, in reflecttable
table_name, schema, **table.dialect_kwargs
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\reflection.py", line 314, in get_table_options
self.bind, table_name, schema, info_cache=self.info_cache, **kw
File "<string>", line 2, in get_table_options
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\reflection.py", line 52, in cache
ret = fn(self, con, *args, **kw)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\dialects\mysql\base.py", line 2624, in get_table_options
connection, table_name, schema, **kw
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\dialects\mysql\base.py", line 2870, in _parsed_state_or_create
info_cache=kw.get("info_cache", None),
File "<string>", line 2, in _setup_parser
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\reflection.py", line 52, in cache
ret = fn(self, con, *args, **kw)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\dialects\mysql\base.py", line 2898, in _setup_parser
connection, None, charset, full_name=full_name
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\dialects\mysql\base.py", line 2998, in _show_create_table
).execute(st)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 1006, in execute
return self._execute_text(object_, multiparams, params)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 1181, in _execute_text
parameters,
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 1318, in _execute_context
e, statement, parameters, cursor, context
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 1512, in _handle_dbapi_exception
sqlalchemy_exception, with_traceback=exc_info[2], from_=e
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\util\compat.py", line 178, in raise_
raise exception
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 1278, in _execute_context
cursor, statement, parameters, context
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\default.py", line 593, in do_execute
cursor.execute(statement, parameters)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\cursors.py", line 170, in execute
result = self._query(query)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\cursors.py", line 328, in _query
conn.query(q)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\connections.py", line 517, in query
self._affected_rows = self._read_query_result(unbuffered=unbuffered)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\connections.py", line 732, in _read_query_result
result.read()
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\connections.py", line 1075, in read
first_packet = self.connection._read_packet()
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\connections.py", line 684, in _read_packet
packet.check_error()
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\protocol.py", line 220, in check_error
err.raise_mysql_exception(self._data)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\err.py", line 109, in raise_mysql_exception
raise errorclass(errno, errval)
sqlalchemy.exc.InternalError: (pymysql.err.InternalError) (1059, "Identifier name 'SELECT S_INFO_UNIQUECODE, REPORT_PERIOD, MYFIRST_INDICATOR FROM databasetable1' is too long")
[SQL: SHOW CREATE TABLE `SELECT S_INFO_UNIQUECODE, REPORT_PERIOD, MYFIRST_INDICATOR FROM databasetable1`]
(Background on this error at: http://sqlalche.me/e/13/2j85)
Process finished with exit code 1
|
pymysql.err.InternalError
|
def tile(cls, op):
check_chunks_unknown_shape(op.inputs, TilesError)
out = op.outputs[0]
new_chunk_size = op.chunk_size
if isinstance(out, DATAFRAME_TYPE):
itemsize = max(getattr(dt, "itemsize", 8) for dt in out.dtypes)
else:
itemsize = out.dtype.itemsize
steps = plan_rechunks(
op.inputs[0],
new_chunk_size,
itemsize,
threshold=op.threshold,
chunk_size_limit=op.chunk_size_limit,
)
for c in steps:
out = compute_rechunk(out.inputs[0], c)
return [out]
|
def tile(cls, op):
check_chunks_unknown_shape(op.inputs, TilesError)
out = op.outputs[0]
new_chunk_size = op.chunk_size
if isinstance(out, DATAFRAME_TYPE):
itemsize = max(dt.itemsize for dt in out.dtypes)
else:
itemsize = out.dtype.itemsize
steps = plan_rechunks(
op.inputs[0],
new_chunk_size,
itemsize,
threshold=op.threshold,
chunk_size_limit=op.chunk_size_limit,
)
for c in steps:
out = compute_rechunk(out.inputs[0], c)
return [out]
|
https://github.com/mars-project/mars/issues/1448
|
D:\Anaconda3\envs\py37\python.exe E:/mycode/read_data.py
2020-08-03 15:54:52,383 INFO sqlalchemy.engine.base.Engine SHOW VARIABLES LIKE 'sql_mode'
2020-08-03 15:54:52,383 INFO sqlalchemy.engine.base.Engine {}
2020-08-03 15:54:52,387 INFO sqlalchemy.engine.base.Engine SHOW VARIABLES LIKE 'lower_case_table_names'
2020-08-03 15:54:52,387 INFO sqlalchemy.engine.base.Engine {}
2020-08-03 15:54:52,389 INFO sqlalchemy.engine.base.Engine SELECT DATABASE()
2020-08-03 15:54:52,389 INFO sqlalchemy.engine.base.Engine {}
2020-08-03 15:54:52,390 INFO sqlalchemy.engine.base.Engine show collation where `Charset` = 'utf8mb4' and `Collation` = 'utf8mb4_bin'
2020-08-03 15:54:52,390 INFO sqlalchemy.engine.base.Engine {}
2020-08-03 15:54:52,391 INFO sqlalchemy.engine.base.Engine SELECT CAST('test plain returns' AS CHAR(60)) AS anon_1
2020-08-03 15:54:52,391 INFO sqlalchemy.engine.base.Engine {}
2020-08-03 15:54:52,392 INFO sqlalchemy.engine.base.Engine SELECT CAST('test unicode returns' AS CHAR(60)) AS anon_1
2020-08-03 15:54:52,392 INFO sqlalchemy.engine.base.Engine {}
2020-08-03 15:54:52,393 INFO sqlalchemy.engine.base.Engine SELECT CAST('test collated returns' AS CHAR CHARACTER SET utf8mb4) COLLATE utf8mb4_bin AS anon_1
2020-08-03 15:54:52,393 INFO sqlalchemy.engine.base.Engine {}
D:\Anaconda3\envs\py37\lib\site-packages\pymysql\cursors.py:170: Warning: (1366, "Incorrect string value: '\\xD6\\xD0\\xB9\\xFA\\xB1\\xEA...' for column 'VARIABLE_VALUE' at row 485")
result = self._query(query)
2020-08-03 15:54:52,404 INFO sqlalchemy.engine.base.Engine SHOW CREATE TABLE `SELECT S_INFO_UNIQUECODE, REPORT_PERIOD, MYFIRST_INDICATOR FROM databasetable1`
2020-08-03 15:54:52,404 INFO sqlalchemy.engine.base.Engine {}
2020-08-03 15:54:52,405 INFO sqlalchemy.engine.base.Engine ROLLBACK
Traceback (most recent call last):
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 1278, in _execute_context
cursor, statement, parameters, context
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\default.py", line 593, in do_execute
cursor.execute(statement, parameters)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\cursors.py", line 170, in execute
result = self._query(query)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\cursors.py", line 328, in _query
conn.query(q)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\connections.py", line 517, in query
self._affected_rows = self._read_query_result(unbuffered=unbuffered)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\connections.py", line 732, in _read_query_result
result.read()
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\connections.py", line 1075, in read
first_packet = self.connection._read_packet()
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\connections.py", line 684, in _read_packet
packet.check_error()
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\protocol.py", line 220, in check_error
err.raise_mysql_exception(self._data)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\err.py", line 109, in raise_mysql_exception
raise errorclass(errno, errval)
pymysql.err.InternalError: (1059, "Identifier name 'SELECT S_INFO_UNIQUECODE, REPORT_PERIOD, MYFIRST_INDICATOR FROM databasetable1' is too long")
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "E:/mycode/read_data.py", line 7,
df1 = md.read_sql('SELECT S_INFO_UNIQUECODE, REPORT_PERIOD, MYFIRST_INDICATOR FROM databasetable1', con=engine)
File "D:\Anaconda3\envs\py37\lib\site-packages\mars\dataframe\datasource\read_sql.py", line 577, in read_sql
low_limit=low_limit, high_limit=high_limit)
File "D:\Anaconda3\envs\py37\lib\site-packages\mars\dataframe\datasource\read_sql.py", line 479, in _read_sql
return op(test_rows, chunk_size)
File "D:\Anaconda3\envs\py37\lib\site-packages\mars\dataframe\datasource\read_sql.py", line 222, in __call__
selectable = self._get_selectable(con)
File "D:\Anaconda3\envs\py37\lib\site-packages\mars\dataframe\datasource\read_sql.py", line 175, in _get_selectable
autoload_with=engine_or_conn, schema=self._schema)
File "<string>", line 2, in __new__
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\util\deprecations.py", line 139, in warned
return fn(*args, **kwargs)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\sql\schema.py", line 559, in __new__
metadata._remove_table(name, schema)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\util\langhelpers.py", line 69, in __exit__
exc_value, with_traceback=exc_tb,
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\util\compat.py", line 178, in raise_
raise exception
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\sql\schema.py", line 554, in __new__
table._init(name, metadata, *args, **kw)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\sql\schema.py", line 648, in _init
resolve_fks=resolve_fks,
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\sql\schema.py", line 672, in _autoload
_extend_on=_extend_on,
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 1654, in run_callable
return callable_(self, *args, **kwargs)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\default.py", line 470, in reflecttable
table, include_columns, exclude_columns, resolve_fks, **opts
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\reflection.py", line 649, in reflecttable
table_name, schema, **table.dialect_kwargs
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\reflection.py", line 314, in get_table_options
self.bind, table_name, schema, info_cache=self.info_cache, **kw
File "<string>", line 2, in get_table_options
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\reflection.py", line 52, in cache
ret = fn(self, con, *args, **kw)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\dialects\mysql\base.py", line 2624, in get_table_options
connection, table_name, schema, **kw
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\dialects\mysql\base.py", line 2870, in _parsed_state_or_create
info_cache=kw.get("info_cache", None),
File "<string>", line 2, in _setup_parser
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\reflection.py", line 52, in cache
ret = fn(self, con, *args, **kw)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\dialects\mysql\base.py", line 2898, in _setup_parser
connection, None, charset, full_name=full_name
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\dialects\mysql\base.py", line 2998, in _show_create_table
).execute(st)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 1006, in execute
return self._execute_text(object_, multiparams, params)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 1181, in _execute_text
parameters,
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 1318, in _execute_context
e, statement, parameters, cursor, context
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 1512, in _handle_dbapi_exception
sqlalchemy_exception, with_traceback=exc_info[2], from_=e
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\util\compat.py", line 178, in raise_
raise exception
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 1278, in _execute_context
cursor, statement, parameters, context
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\default.py", line 593, in do_execute
cursor.execute(statement, parameters)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\cursors.py", line 170, in execute
result = self._query(query)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\cursors.py", line 328, in _query
conn.query(q)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\connections.py", line 517, in query
self._affected_rows = self._read_query_result(unbuffered=unbuffered)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\connections.py", line 732, in _read_query_result
result.read()
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\connections.py", line 1075, in read
first_packet = self.connection._read_packet()
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\connections.py", line 684, in _read_packet
packet.check_error()
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\protocol.py", line 220, in check_error
err.raise_mysql_exception(self._data)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\err.py", line 109, in raise_mysql_exception
raise errorclass(errno, errval)
sqlalchemy.exc.InternalError: (pymysql.err.InternalError) (1059, "Identifier name 'SELECT S_INFO_UNIQUECODE, REPORT_PERIOD, MYFIRST_INDICATOR FROM databasetable1' is too long")
[SQL: SHOW CREATE TABLE `SELECT S_INFO_UNIQUECODE, REPORT_PERIOD, MYFIRST_INDICATOR FROM databasetable1`]
(Background on this error at: http://sqlalche.me/e/13/2j85)
Process finished with exit code 1
|
pymysql.err.InternalError
|
def rechunk(a, chunk_size, threshold=None, chunk_size_limit=None):
if isinstance(a, DATAFRAME_TYPE):
itemsize = max(getattr(dt, "itemsize", 8) for dt in a.dtypes)
else:
itemsize = a.dtype.itemsize
chunk_size = get_nsplits(a, chunk_size, itemsize)
if chunk_size == a.nsplits:
return a
op = DataFrameRechunk(chunk_size, threshold, chunk_size_limit)
return op(a)
|
def rechunk(a, chunk_size, threshold=None, chunk_size_limit=None):
if isinstance(a, DATAFRAME_TYPE):
itemsize = max(dt.itemsize for dt in a.dtypes)
else:
itemsize = a.dtype.itemsize
chunk_size = get_nsplits(a, chunk_size, itemsize)
if chunk_size == a.nsplits:
return a
op = DataFrameRechunk(chunk_size, threshold, chunk_size_limit)
return op(a)
|
https://github.com/mars-project/mars/issues/1448
|
D:\Anaconda3\envs\py37\python.exe E:/mycode/read_data.py
2020-08-03 15:54:52,383 INFO sqlalchemy.engine.base.Engine SHOW VARIABLES LIKE 'sql_mode'
2020-08-03 15:54:52,383 INFO sqlalchemy.engine.base.Engine {}
2020-08-03 15:54:52,387 INFO sqlalchemy.engine.base.Engine SHOW VARIABLES LIKE 'lower_case_table_names'
2020-08-03 15:54:52,387 INFO sqlalchemy.engine.base.Engine {}
2020-08-03 15:54:52,389 INFO sqlalchemy.engine.base.Engine SELECT DATABASE()
2020-08-03 15:54:52,389 INFO sqlalchemy.engine.base.Engine {}
2020-08-03 15:54:52,390 INFO sqlalchemy.engine.base.Engine show collation where `Charset` = 'utf8mb4' and `Collation` = 'utf8mb4_bin'
2020-08-03 15:54:52,390 INFO sqlalchemy.engine.base.Engine {}
2020-08-03 15:54:52,391 INFO sqlalchemy.engine.base.Engine SELECT CAST('test plain returns' AS CHAR(60)) AS anon_1
2020-08-03 15:54:52,391 INFO sqlalchemy.engine.base.Engine {}
2020-08-03 15:54:52,392 INFO sqlalchemy.engine.base.Engine SELECT CAST('test unicode returns' AS CHAR(60)) AS anon_1
2020-08-03 15:54:52,392 INFO sqlalchemy.engine.base.Engine {}
2020-08-03 15:54:52,393 INFO sqlalchemy.engine.base.Engine SELECT CAST('test collated returns' AS CHAR CHARACTER SET utf8mb4) COLLATE utf8mb4_bin AS anon_1
2020-08-03 15:54:52,393 INFO sqlalchemy.engine.base.Engine {}
D:\Anaconda3\envs\py37\lib\site-packages\pymysql\cursors.py:170: Warning: (1366, "Incorrect string value: '\\xD6\\xD0\\xB9\\xFA\\xB1\\xEA...' for column 'VARIABLE_VALUE' at row 485")
result = self._query(query)
2020-08-03 15:54:52,404 INFO sqlalchemy.engine.base.Engine SHOW CREATE TABLE `SELECT S_INFO_UNIQUECODE, REPORT_PERIOD, MYFIRST_INDICATOR FROM databasetable1`
2020-08-03 15:54:52,404 INFO sqlalchemy.engine.base.Engine {}
2020-08-03 15:54:52,405 INFO sqlalchemy.engine.base.Engine ROLLBACK
Traceback (most recent call last):
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 1278, in _execute_context
cursor, statement, parameters, context
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\default.py", line 593, in do_execute
cursor.execute(statement, parameters)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\cursors.py", line 170, in execute
result = self._query(query)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\cursors.py", line 328, in _query
conn.query(q)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\connections.py", line 517, in query
self._affected_rows = self._read_query_result(unbuffered=unbuffered)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\connections.py", line 732, in _read_query_result
result.read()
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\connections.py", line 1075, in read
first_packet = self.connection._read_packet()
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\connections.py", line 684, in _read_packet
packet.check_error()
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\protocol.py", line 220, in check_error
err.raise_mysql_exception(self._data)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\err.py", line 109, in raise_mysql_exception
raise errorclass(errno, errval)
pymysql.err.InternalError: (1059, "Identifier name 'SELECT S_INFO_UNIQUECODE, REPORT_PERIOD, MYFIRST_INDICATOR FROM databasetable1' is too long")
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "E:/mycode/read_data.py", line 7,
df1 = md.read_sql('SELECT S_INFO_UNIQUECODE, REPORT_PERIOD, MYFIRST_INDICATOR FROM databasetable1', con=engine)
File "D:\Anaconda3\envs\py37\lib\site-packages\mars\dataframe\datasource\read_sql.py", line 577, in read_sql
low_limit=low_limit, high_limit=high_limit)
File "D:\Anaconda3\envs\py37\lib\site-packages\mars\dataframe\datasource\read_sql.py", line 479, in _read_sql
return op(test_rows, chunk_size)
File "D:\Anaconda3\envs\py37\lib\site-packages\mars\dataframe\datasource\read_sql.py", line 222, in __call__
selectable = self._get_selectable(con)
File "D:\Anaconda3\envs\py37\lib\site-packages\mars\dataframe\datasource\read_sql.py", line 175, in _get_selectable
autoload_with=engine_or_conn, schema=self._schema)
File "<string>", line 2, in __new__
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\util\deprecations.py", line 139, in warned
return fn(*args, **kwargs)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\sql\schema.py", line 559, in __new__
metadata._remove_table(name, schema)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\util\langhelpers.py", line 69, in __exit__
exc_value, with_traceback=exc_tb,
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\util\compat.py", line 178, in raise_
raise exception
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\sql\schema.py", line 554, in __new__
table._init(name, metadata, *args, **kw)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\sql\schema.py", line 648, in _init
resolve_fks=resolve_fks,
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\sql\schema.py", line 672, in _autoload
_extend_on=_extend_on,
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 1654, in run_callable
return callable_(self, *args, **kwargs)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\default.py", line 470, in reflecttable
table, include_columns, exclude_columns, resolve_fks, **opts
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\reflection.py", line 649, in reflecttable
table_name, schema, **table.dialect_kwargs
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\reflection.py", line 314, in get_table_options
self.bind, table_name, schema, info_cache=self.info_cache, **kw
File "<string>", line 2, in get_table_options
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\reflection.py", line 52, in cache
ret = fn(self, con, *args, **kw)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\dialects\mysql\base.py", line 2624, in get_table_options
connection, table_name, schema, **kw
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\dialects\mysql\base.py", line 2870, in _parsed_state_or_create
info_cache=kw.get("info_cache", None),
File "<string>", line 2, in _setup_parser
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\reflection.py", line 52, in cache
ret = fn(self, con, *args, **kw)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\dialects\mysql\base.py", line 2898, in _setup_parser
connection, None, charset, full_name=full_name
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\dialects\mysql\base.py", line 2998, in _show_create_table
).execute(st)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 1006, in execute
return self._execute_text(object_, multiparams, params)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 1181, in _execute_text
parameters,
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 1318, in _execute_context
e, statement, parameters, cursor, context
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 1512, in _handle_dbapi_exception
sqlalchemy_exception, with_traceback=exc_info[2], from_=e
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\util\compat.py", line 178, in raise_
raise exception
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 1278, in _execute_context
cursor, statement, parameters, context
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\default.py", line 593, in do_execute
cursor.execute(statement, parameters)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\cursors.py", line 170, in execute
result = self._query(query)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\cursors.py", line 328, in _query
conn.query(q)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\connections.py", line 517, in query
self._affected_rows = self._read_query_result(unbuffered=unbuffered)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\connections.py", line 732, in _read_query_result
result.read()
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\connections.py", line 1075, in read
first_packet = self.connection._read_packet()
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\connections.py", line 684, in _read_packet
packet.check_error()
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\protocol.py", line 220, in check_error
err.raise_mysql_exception(self._data)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\err.py", line 109, in raise_mysql_exception
raise errorclass(errno, errval)
sqlalchemy.exc.InternalError: (pymysql.err.InternalError) (1059, "Identifier name 'SELECT S_INFO_UNIQUECODE, REPORT_PERIOD, MYFIRST_INDICATOR FROM databasetable1' is too long")
[SQL: SHOW CREATE TABLE `SELECT S_INFO_UNIQUECODE, REPORT_PERIOD, MYFIRST_INDICATOR FROM databasetable1`]
(Background on this error at: http://sqlalche.me/e/13/2j85)
Process finished with exit code 1
|
pymysql.err.InternalError
|
def _get_selectable(self, engine_or_conn, columns=None):
import sqlalchemy as sa
from sqlalchemy import sql
from sqlalchemy.exc import SQLAlchemyError
# process table_name
if self._selectable is not None:
selectable = self._selectable
else:
if isinstance(self._table_or_sql, sa.Table):
selectable = self._table_or_sql
self._table_or_sql = selectable.name
else:
m = sa.MetaData()
try:
selectable = sa.Table(
self._table_or_sql,
m,
autoload=True,
autoload_with=engine_or_conn,
schema=self._schema,
)
except SQLAlchemyError:
temp_name_1 = "t1_" + binascii.b2a_hex(uuid.uuid4().bytes).decode()
temp_name_2 = "t2_" + binascii.b2a_hex(uuid.uuid4().bytes).decode()
if columns:
selectable = (
sql.text(self._table_or_sql)
.columns(*[sql.column(c) for c in columns])
.alias(temp_name_2)
)
else:
selectable = sql.select(
"*",
from_obj=sql.text(
"(%s) AS %s" % (self._table_or_sql, temp_name_1)
),
).alias(temp_name_2)
self._selectable = selectable
return selectable
|
def _get_selectable(self, engine_or_conn, columns=None):
import sqlalchemy as sa
from sqlalchemy import sql
from sqlalchemy.exc import NoSuchTableError
# process table_name
if self._selectable is not None:
selectable = self._selectable
else:
if isinstance(self._table_or_sql, sa.Table):
selectable = self._table_or_sql
self._table_or_sql = selectable.name
else:
m = sa.MetaData()
try:
selectable = sa.Table(
self._table_or_sql,
m,
autoload=True,
autoload_with=engine_or_conn,
schema=self._schema,
)
except NoSuchTableError:
temp_name_1 = "t1_" + binascii.b2a_hex(uuid.uuid4().bytes).decode()
temp_name_2 = "t2_" + binascii.b2a_hex(uuid.uuid4().bytes).decode()
if columns:
selectable = (
sql.text(self._table_or_sql)
.columns(*[sql.column(c) for c in columns])
.alias(temp_name_2)
)
else:
selectable = sql.select(
"*",
from_obj=sql.text(
"(%s) AS %s" % (self._table_or_sql, temp_name_1)
),
).alias(temp_name_2)
self._selectable = selectable
return selectable
|
https://github.com/mars-project/mars/issues/1448
|
D:\Anaconda3\envs\py37\python.exe E:/mycode/read_data.py
2020-08-03 15:54:52,383 INFO sqlalchemy.engine.base.Engine SHOW VARIABLES LIKE 'sql_mode'
2020-08-03 15:54:52,383 INFO sqlalchemy.engine.base.Engine {}
2020-08-03 15:54:52,387 INFO sqlalchemy.engine.base.Engine SHOW VARIABLES LIKE 'lower_case_table_names'
2020-08-03 15:54:52,387 INFO sqlalchemy.engine.base.Engine {}
2020-08-03 15:54:52,389 INFO sqlalchemy.engine.base.Engine SELECT DATABASE()
2020-08-03 15:54:52,389 INFO sqlalchemy.engine.base.Engine {}
2020-08-03 15:54:52,390 INFO sqlalchemy.engine.base.Engine show collation where `Charset` = 'utf8mb4' and `Collation` = 'utf8mb4_bin'
2020-08-03 15:54:52,390 INFO sqlalchemy.engine.base.Engine {}
2020-08-03 15:54:52,391 INFO sqlalchemy.engine.base.Engine SELECT CAST('test plain returns' AS CHAR(60)) AS anon_1
2020-08-03 15:54:52,391 INFO sqlalchemy.engine.base.Engine {}
2020-08-03 15:54:52,392 INFO sqlalchemy.engine.base.Engine SELECT CAST('test unicode returns' AS CHAR(60)) AS anon_1
2020-08-03 15:54:52,392 INFO sqlalchemy.engine.base.Engine {}
2020-08-03 15:54:52,393 INFO sqlalchemy.engine.base.Engine SELECT CAST('test collated returns' AS CHAR CHARACTER SET utf8mb4) COLLATE utf8mb4_bin AS anon_1
2020-08-03 15:54:52,393 INFO sqlalchemy.engine.base.Engine {}
D:\Anaconda3\envs\py37\lib\site-packages\pymysql\cursors.py:170: Warning: (1366, "Incorrect string value: '\\xD6\\xD0\\xB9\\xFA\\xB1\\xEA...' for column 'VARIABLE_VALUE' at row 485")
result = self._query(query)
2020-08-03 15:54:52,404 INFO sqlalchemy.engine.base.Engine SHOW CREATE TABLE `SELECT S_INFO_UNIQUECODE, REPORT_PERIOD, MYFIRST_INDICATOR FROM databasetable1`
2020-08-03 15:54:52,404 INFO sqlalchemy.engine.base.Engine {}
2020-08-03 15:54:52,405 INFO sqlalchemy.engine.base.Engine ROLLBACK
Traceback (most recent call last):
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 1278, in _execute_context
cursor, statement, parameters, context
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\default.py", line 593, in do_execute
cursor.execute(statement, parameters)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\cursors.py", line 170, in execute
result = self._query(query)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\cursors.py", line 328, in _query
conn.query(q)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\connections.py", line 517, in query
self._affected_rows = self._read_query_result(unbuffered=unbuffered)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\connections.py", line 732, in _read_query_result
result.read()
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\connections.py", line 1075, in read
first_packet = self.connection._read_packet()
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\connections.py", line 684, in _read_packet
packet.check_error()
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\protocol.py", line 220, in check_error
err.raise_mysql_exception(self._data)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\err.py", line 109, in raise_mysql_exception
raise errorclass(errno, errval)
pymysql.err.InternalError: (1059, "Identifier name 'SELECT S_INFO_UNIQUECODE, REPORT_PERIOD, MYFIRST_INDICATOR FROM databasetable1' is too long")
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "E:/mycode/read_data.py", line 7,
df1 = md.read_sql('SELECT S_INFO_UNIQUECODE, REPORT_PERIOD, MYFIRST_INDICATOR FROM databasetable1', con=engine)
File "D:\Anaconda3\envs\py37\lib\site-packages\mars\dataframe\datasource\read_sql.py", line 577, in read_sql
low_limit=low_limit, high_limit=high_limit)
File "D:\Anaconda3\envs\py37\lib\site-packages\mars\dataframe\datasource\read_sql.py", line 479, in _read_sql
return op(test_rows, chunk_size)
File "D:\Anaconda3\envs\py37\lib\site-packages\mars\dataframe\datasource\read_sql.py", line 222, in __call__
selectable = self._get_selectable(con)
File "D:\Anaconda3\envs\py37\lib\site-packages\mars\dataframe\datasource\read_sql.py", line 175, in _get_selectable
autoload_with=engine_or_conn, schema=self._schema)
File "<string>", line 2, in __new__
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\util\deprecations.py", line 139, in warned
return fn(*args, **kwargs)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\sql\schema.py", line 559, in __new__
metadata._remove_table(name, schema)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\util\langhelpers.py", line 69, in __exit__
exc_value, with_traceback=exc_tb,
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\util\compat.py", line 178, in raise_
raise exception
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\sql\schema.py", line 554, in __new__
table._init(name, metadata, *args, **kw)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\sql\schema.py", line 648, in _init
resolve_fks=resolve_fks,
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\sql\schema.py", line 672, in _autoload
_extend_on=_extend_on,
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 1654, in run_callable
return callable_(self, *args, **kwargs)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\default.py", line 470, in reflecttable
table, include_columns, exclude_columns, resolve_fks, **opts
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\reflection.py", line 649, in reflecttable
table_name, schema, **table.dialect_kwargs
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\reflection.py", line 314, in get_table_options
self.bind, table_name, schema, info_cache=self.info_cache, **kw
File "<string>", line 2, in get_table_options
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\reflection.py", line 52, in cache
ret = fn(self, con, *args, **kw)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\dialects\mysql\base.py", line 2624, in get_table_options
connection, table_name, schema, **kw
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\dialects\mysql\base.py", line 2870, in _parsed_state_or_create
info_cache=kw.get("info_cache", None),
File "<string>", line 2, in _setup_parser
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\reflection.py", line 52, in cache
ret = fn(self, con, *args, **kw)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\dialects\mysql\base.py", line 2898, in _setup_parser
connection, None, charset, full_name=full_name
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\dialects\mysql\base.py", line 2998, in _show_create_table
).execute(st)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 1006, in execute
return self._execute_text(object_, multiparams, params)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 1181, in _execute_text
parameters,
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 1318, in _execute_context
e, statement, parameters, cursor, context
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 1512, in _handle_dbapi_exception
sqlalchemy_exception, with_traceback=exc_info[2], from_=e
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\util\compat.py", line 178, in raise_
raise exception
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 1278, in _execute_context
cursor, statement, parameters, context
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\default.py", line 593, in do_execute
cursor.execute(statement, parameters)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\cursors.py", line 170, in execute
result = self._query(query)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\cursors.py", line 328, in _query
conn.query(q)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\connections.py", line 517, in query
self._affected_rows = self._read_query_result(unbuffered=unbuffered)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\connections.py", line 732, in _read_query_result
result.read()
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\connections.py", line 1075, in read
first_packet = self.connection._read_packet()
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\connections.py", line 684, in _read_packet
packet.check_error()
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\protocol.py", line 220, in check_error
err.raise_mysql_exception(self._data)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\err.py", line 109, in raise_mysql_exception
raise errorclass(errno, errval)
sqlalchemy.exc.InternalError: (pymysql.err.InternalError) (1059, "Identifier name 'SELECT S_INFO_UNIQUECODE, REPORT_PERIOD, MYFIRST_INDICATOR FROM databasetable1' is too long")
[SQL: SHOW CREATE TABLE `SELECT S_INFO_UNIQUECODE, REPORT_PERIOD, MYFIRST_INDICATOR FROM databasetable1`]
(Background on this error at: http://sqlalche.me/e/13/2j85)
Process finished with exit code 1
|
pymysql.err.InternalError
|
def arrow_table_to_pandas_dataframe(arrow_table, use_arrow_dtype=True, **kw):
if not use_arrow_dtype:
# if not use arrow string, just return
return arrow_table.to_pandas(**kw)
from .arrays import ArrowStringArray
table: pa.Table = arrow_table
schema: pa.Schema = arrow_table.schema
string_field_names = list()
string_arrays = list()
string_indexes = list()
other_field_names = list()
other_arrays = list()
for i, arrow_type in enumerate(schema.types):
if arrow_type == pa.string():
string_field_names.append(schema.names[i])
string_indexes.append(i)
string_arrays.append(table.columns[i])
else:
other_field_names.append(schema.names[i])
other_arrays.append(table.columns[i])
df: pd.DataFrame = pa.Table.from_arrays(
other_arrays, names=other_field_names
).to_pandas(**kw)
for string_index, string_name, string_array in zip(
string_indexes, string_field_names, string_arrays
):
df.insert(string_index, string_name, pd.Series(ArrowStringArray(string_array)))
return df
|
def arrow_table_to_pandas_dataframe(arrow_table, use_arrow_string=True, **kw):
if not use_arrow_string:
# if not use arrow string, just return
return arrow_table.to_pandas(**kw)
from .arrays import ArrowStringArray
table: pa.Table = arrow_table
schema: pa.Schema = arrow_table.schema
string_field_names = list()
string_arrays = list()
string_indexes = list()
other_field_names = list()
other_arrays = list()
for i, arrow_type in enumerate(schema.types):
if arrow_type == pa.string():
string_field_names.append(schema.names[i])
string_indexes.append(i)
string_arrays.append(table.columns[i])
else:
other_field_names.append(schema.names[i])
other_arrays.append(table.columns[i])
df: pd.DataFrame = pa.Table.from_arrays(
other_arrays, names=other_field_names
).to_pandas(**kw)
for string_index, string_name, string_array in zip(
string_indexes, string_field_names, string_arrays
):
df.insert(string_index, string_name, pd.Series(ArrowStringArray(string_array)))
return df
|
https://github.com/mars-project/mars/issues/1448
|
D:\Anaconda3\envs\py37\python.exe E:/mycode/read_data.py
2020-08-03 15:54:52,383 INFO sqlalchemy.engine.base.Engine SHOW VARIABLES LIKE 'sql_mode'
2020-08-03 15:54:52,383 INFO sqlalchemy.engine.base.Engine {}
2020-08-03 15:54:52,387 INFO sqlalchemy.engine.base.Engine SHOW VARIABLES LIKE 'lower_case_table_names'
2020-08-03 15:54:52,387 INFO sqlalchemy.engine.base.Engine {}
2020-08-03 15:54:52,389 INFO sqlalchemy.engine.base.Engine SELECT DATABASE()
2020-08-03 15:54:52,389 INFO sqlalchemy.engine.base.Engine {}
2020-08-03 15:54:52,390 INFO sqlalchemy.engine.base.Engine show collation where `Charset` = 'utf8mb4' and `Collation` = 'utf8mb4_bin'
2020-08-03 15:54:52,390 INFO sqlalchemy.engine.base.Engine {}
2020-08-03 15:54:52,391 INFO sqlalchemy.engine.base.Engine SELECT CAST('test plain returns' AS CHAR(60)) AS anon_1
2020-08-03 15:54:52,391 INFO sqlalchemy.engine.base.Engine {}
2020-08-03 15:54:52,392 INFO sqlalchemy.engine.base.Engine SELECT CAST('test unicode returns' AS CHAR(60)) AS anon_1
2020-08-03 15:54:52,392 INFO sqlalchemy.engine.base.Engine {}
2020-08-03 15:54:52,393 INFO sqlalchemy.engine.base.Engine SELECT CAST('test collated returns' AS CHAR CHARACTER SET utf8mb4) COLLATE utf8mb4_bin AS anon_1
2020-08-03 15:54:52,393 INFO sqlalchemy.engine.base.Engine {}
D:\Anaconda3\envs\py37\lib\site-packages\pymysql\cursors.py:170: Warning: (1366, "Incorrect string value: '\\xD6\\xD0\\xB9\\xFA\\xB1\\xEA...' for column 'VARIABLE_VALUE' at row 485")
result = self._query(query)
2020-08-03 15:54:52,404 INFO sqlalchemy.engine.base.Engine SHOW CREATE TABLE `SELECT S_INFO_UNIQUECODE, REPORT_PERIOD, MYFIRST_INDICATOR FROM databasetable1`
2020-08-03 15:54:52,404 INFO sqlalchemy.engine.base.Engine {}
2020-08-03 15:54:52,405 INFO sqlalchemy.engine.base.Engine ROLLBACK
Traceback (most recent call last):
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 1278, in _execute_context
cursor, statement, parameters, context
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\default.py", line 593, in do_execute
cursor.execute(statement, parameters)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\cursors.py", line 170, in execute
result = self._query(query)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\cursors.py", line 328, in _query
conn.query(q)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\connections.py", line 517, in query
self._affected_rows = self._read_query_result(unbuffered=unbuffered)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\connections.py", line 732, in _read_query_result
result.read()
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\connections.py", line 1075, in read
first_packet = self.connection._read_packet()
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\connections.py", line 684, in _read_packet
packet.check_error()
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\protocol.py", line 220, in check_error
err.raise_mysql_exception(self._data)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\err.py", line 109, in raise_mysql_exception
raise errorclass(errno, errval)
pymysql.err.InternalError: (1059, "Identifier name 'SELECT S_INFO_UNIQUECODE, REPORT_PERIOD, MYFIRST_INDICATOR FROM databasetable1' is too long")
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "E:/mycode/read_data.py", line 7,
df1 = md.read_sql('SELECT S_INFO_UNIQUECODE, REPORT_PERIOD, MYFIRST_INDICATOR FROM databasetable1', con=engine)
File "D:\Anaconda3\envs\py37\lib\site-packages\mars\dataframe\datasource\read_sql.py", line 577, in read_sql
low_limit=low_limit, high_limit=high_limit)
File "D:\Anaconda3\envs\py37\lib\site-packages\mars\dataframe\datasource\read_sql.py", line 479, in _read_sql
return op(test_rows, chunk_size)
File "D:\Anaconda3\envs\py37\lib\site-packages\mars\dataframe\datasource\read_sql.py", line 222, in __call__
selectable = self._get_selectable(con)
File "D:\Anaconda3\envs\py37\lib\site-packages\mars\dataframe\datasource\read_sql.py", line 175, in _get_selectable
autoload_with=engine_or_conn, schema=self._schema)
File "<string>", line 2, in __new__
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\util\deprecations.py", line 139, in warned
return fn(*args, **kwargs)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\sql\schema.py", line 559, in __new__
metadata._remove_table(name, schema)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\util\langhelpers.py", line 69, in __exit__
exc_value, with_traceback=exc_tb,
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\util\compat.py", line 178, in raise_
raise exception
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\sql\schema.py", line 554, in __new__
table._init(name, metadata, *args, **kw)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\sql\schema.py", line 648, in _init
resolve_fks=resolve_fks,
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\sql\schema.py", line 672, in _autoload
_extend_on=_extend_on,
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 1654, in run_callable
return callable_(self, *args, **kwargs)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\default.py", line 470, in reflecttable
table, include_columns, exclude_columns, resolve_fks, **opts
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\reflection.py", line 649, in reflecttable
table_name, schema, **table.dialect_kwargs
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\reflection.py", line 314, in get_table_options
self.bind, table_name, schema, info_cache=self.info_cache, **kw
File "<string>", line 2, in get_table_options
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\reflection.py", line 52, in cache
ret = fn(self, con, *args, **kw)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\dialects\mysql\base.py", line 2624, in get_table_options
connection, table_name, schema, **kw
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\dialects\mysql\base.py", line 2870, in _parsed_state_or_create
info_cache=kw.get("info_cache", None),
File "<string>", line 2, in _setup_parser
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\reflection.py", line 52, in cache
ret = fn(self, con, *args, **kw)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\dialects\mysql\base.py", line 2898, in _setup_parser
connection, None, charset, full_name=full_name
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\dialects\mysql\base.py", line 2998, in _show_create_table
).execute(st)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 1006, in execute
return self._execute_text(object_, multiparams, params)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 1181, in _execute_text
parameters,
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 1318, in _execute_context
e, statement, parameters, cursor, context
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 1512, in _handle_dbapi_exception
sqlalchemy_exception, with_traceback=exc_info[2], from_=e
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\util\compat.py", line 178, in raise_
raise exception
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 1278, in _execute_context
cursor, statement, parameters, context
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\default.py", line 593, in do_execute
cursor.execute(statement, parameters)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\cursors.py", line 170, in execute
result = self._query(query)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\cursors.py", line 328, in _query
conn.query(q)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\connections.py", line 517, in query
self._affected_rows = self._read_query_result(unbuffered=unbuffered)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\connections.py", line 732, in _read_query_result
result.read()
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\connections.py", line 1075, in read
first_packet = self.connection._read_packet()
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\connections.py", line 684, in _read_packet
packet.check_error()
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\protocol.py", line 220, in check_error
err.raise_mysql_exception(self._data)
File "D:\Anaconda3\envs\py37\lib\site-packages\pymysql\err.py", line 109, in raise_mysql_exception
raise errorclass(errno, errval)
sqlalchemy.exc.InternalError: (pymysql.err.InternalError) (1059, "Identifier name 'SELECT S_INFO_UNIQUECODE, REPORT_PERIOD, MYFIRST_INDICATOR FROM databasetable1' is too long")
[SQL: SHOW CREATE TABLE `SELECT S_INFO_UNIQUECODE, REPORT_PERIOD, MYFIRST_INDICATOR FROM databasetable1`]
(Background on this error at: http://sqlalche.me/e/13/2j85)
Process finished with exit code 1
|
pymysql.err.InternalError
|
def to_pandas(self):
data = getattr(self, "_data", None)
if data is None:
sortorder = getattr(self, "_sortorder", None)
return pd.MultiIndex.from_arrays(
[[] for _ in range(len(self._names))],
sortorder=sortorder,
names=self._names,
)
return pd.MultiIndex.from_tuples(
[tuple(d) for d in data], sortorder=self._sortorder, names=self._names
)
|
def to_pandas(self):
data = getattr(self, "_data", None)
if data is None:
return pd.MultiIndex.from_arrays(
[[] for _ in range(len(self._names))],
sortorder=self._sortorder,
names=self._names,
)
return pd.MultiIndex.from_tuples(
[tuple(d) for d in data], sortorder=self._sortorder, names=self._names
)
|
https://github.com/mars-project/mars/issues/1433
|
In [3]: import pandas as pd
In [4]: data = pd.DataFrame(np.arange(20).reshape((4, 5)) + 1, columns=['a', 'b', 'c', 'd', 'e'])
In [6]: df = md.DataFrame(data)
In [7]: df.groupby(['a','b']).size().execute()
Unexpected exception occurred in enter_build_mode.<locals>.inner.
Traceback (most recent call last):
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 353, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 493, in inner
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/scheduler/graph.py", line 633, in prepare_graph
self._target_tileable_datas + fetch_tileables, tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 399, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 493, in inner
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 350, in build
tileables, tileable_graph=tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 399, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 493, in inner
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 263, in build
self._on_tile_failure(tileable_data.op, exc_info)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 302, in inner
raise exc_info[1].with_traceback(exc_info[2]) from None
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 243, in build
tiled = self._tile(tileable_data, tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 338, in _tile
return super()._tile(tileable_data, tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 203, in _tile
tds = on_tile(tileable_data.op.outputs, tds)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/scheduler/graph.py", line 615, in on_tile
return self.context.wraps(handler.dispatch)(first.op)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/context.py", line 69, in h
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 399, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 119, in dispatch
tiled = op_cls.tile(op)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/groupby/aggregation.py", line 481, in tile
return cls._tile_with_tree(op)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/groupby/aggregation.py", line 412, in _tile_with_tree
index = out_df.index_value.to_pandas()
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/core.py", line 283, in to_pandas
return self._index_value.to_pandas()
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/core.py", line 197, in to_pandas
sortorder=self._sortorder, names=self._names)
AttributeError: _sortorder
|
AttributeError
|
def _tile_offset(cls, op: "DataFrameReadSQL"):
df = op.outputs[0]
if op.row_memory_usage is not None:
# Data selected
chunk_size = df.extra_params.raw_chunk_size or options.chunk_size
if chunk_size is None:
chunk_size = (
int(options.chunk_store_limit / op.row_memory_usage),
df.shape[1],
)
row_chunk_sizes = normalize_chunk_sizes(df.shape, chunk_size)[0]
else:
# No data selected
row_chunk_sizes = (0,)
offsets = np.cumsum((0,) + row_chunk_sizes).tolist()
out_chunks = []
for i, row_size in enumerate(row_chunk_sizes):
chunk_op = op.copy().reset_key()
chunk_op._row_memory_usage = None # no need for chunk
offset = chunk_op._offset = offsets[i]
if df.index_value.has_value():
# range index
index_value = parse_index(
df.index_value.to_pandas()[offset : offsets[i + 1]]
)
else:
index_value = parse_index(
df.index_value.to_pandas(),
op.table_or_sql or str(op.selectable),
op.con,
i,
row_size,
)
out_chunk = chunk_op.new_chunk(
None,
shape=(row_size, df.shape[1]),
columns_value=df.columns_value,
index_value=index_value,
dtypes=df.dtypes,
index=(i, 0),
)
out_chunks.append(out_chunk)
nsplits = (row_chunk_sizes, (df.shape[1],))
new_op = op.copy()
return new_op.new_dataframes(None, chunks=out_chunks, nsplits=nsplits, **df.params)
|
def _tile_offset(cls, op: "DataFrameReadSQL"):
df = op.outputs[0]
if op.row_memory_usage is not None:
# Data selected
chunk_size = df.extra_params.raw_chunk_size or options.chunk_size
if chunk_size is None:
chunk_size = (
int(options.chunk_store_limit / op.row_memory_usage),
df.shape[1],
)
row_chunk_sizes = normalize_chunk_sizes(df.shape, chunk_size)[0]
else:
# No data selected
row_chunk_sizes = (0,)
offsets = np.cumsum((0,) + row_chunk_sizes)
out_chunks = []
for i, row_size in enumerate(row_chunk_sizes):
chunk_op = op.copy().reset_key()
chunk_op._row_memory_usage = None # no need for chunk
offset = chunk_op._offset = offsets[i]
if df.index_value.has_value():
# range index
index_value = parse_index(
df.index_value.to_pandas()[offset : offsets[i + 1]]
)
else:
index_value = parse_index(
df.index_value.to_pandas(),
op.table_or_sql or str(op.selectable),
op.con,
i,
row_size,
)
out_chunk = chunk_op.new_chunk(
None,
shape=(row_size, df.shape[1]),
columns_value=df.columns_value,
index_value=index_value,
dtypes=df.dtypes,
index=(i, 0),
)
out_chunks.append(out_chunk)
nsplits = (row_chunk_sizes, (df.shape[1],))
new_op = op.copy()
return new_op.new_dataframes(None, chunks=out_chunks, nsplits=nsplits, **df.params)
|
https://github.com/mars-project/mars/issues/1433
|
In [3]: import pandas as pd
In [4]: data = pd.DataFrame(np.arange(20).reshape((4, 5)) + 1, columns=['a', 'b', 'c', 'd', 'e'])
In [6]: df = md.DataFrame(data)
In [7]: df.groupby(['a','b']).size().execute()
Unexpected exception occurred in enter_build_mode.<locals>.inner.
Traceback (most recent call last):
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 353, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 493, in inner
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/scheduler/graph.py", line 633, in prepare_graph
self._target_tileable_datas + fetch_tileables, tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 399, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 493, in inner
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 350, in build
tileables, tileable_graph=tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 399, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 493, in inner
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 263, in build
self._on_tile_failure(tileable_data.op, exc_info)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 302, in inner
raise exc_info[1].with_traceback(exc_info[2]) from None
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 243, in build
tiled = self._tile(tileable_data, tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 338, in _tile
return super()._tile(tileable_data, tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 203, in _tile
tds = on_tile(tileable_data.op.outputs, tds)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/scheduler/graph.py", line 615, in on_tile
return self.context.wraps(handler.dispatch)(first.op)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/context.py", line 69, in h
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 399, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 119, in dispatch
tiled = op_cls.tile(op)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/groupby/aggregation.py", line 481, in tile
return cls._tile_with_tree(op)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/groupby/aggregation.py", line 412, in _tile_with_tree
index = out_df.index_value.to_pandas()
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/core.py", line 283, in to_pandas
return self._index_value.to_pandas()
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/core.py", line 197, in to_pandas
sortorder=self._sortorder, names=self._names)
AttributeError: _sortorder
|
AttributeError
|
def _calc_bool_index_param(
cls, input_index_value: IndexValue, pd_index: pd.Index, inp, index, axis: int
) -> Dict:
param = dict()
if input_index_value.has_value():
if isinstance(index, np.ndarray):
filtered_index = pd_index[index]
param["shape"] = len(filtered_index)
param["index_value"] = parse_index(filtered_index, store_data=axis == 1)
if axis == 1:
param["dtypes"] = inp.dtypes[index]
else:
# tensor, cannot be indexer on axis 1
assert axis == 0
param["shape"] = np.nan
param["index_value"] = parse_index(
pd.Index([], dtype=pd_index.dtype), inp, index, store_data=False
)
else:
assert axis == 0
if isinstance(index, np.ndarray):
param["shape"] = int(index.sum())
else:
param["shape"] = np.nan
param["index_value"] = parse_index(pd_index, inp, index, store_data=False)
return param
|
def _calc_bool_index_param(
cls, input_index_value: IndexValue, pd_index: pd.Index, inp, index, axis: int
) -> Dict:
param = dict()
if input_index_value.has_value():
if isinstance(index, np.ndarray):
filtered_index = pd_index[index]
param["shape"] = len(filtered_index)
param["index_value"] = parse_index(filtered_index, store_data=axis == 1)
if axis == 1:
param["dtypes"] = inp.dtypes[index]
else:
# tensor, cannot be indexer on axis 1
assert axis == 0
param["shape"] = np.nan
param["index_value"] = parse_index(
pd.Index([], dtype=pd_index.dtype), inp, index, store_data=False
)
else:
assert axis == 0
if isinstance(index, np.ndarray):
param["shape"] = index.sum()
else:
param["shape"] = np.nan
param["index_value"] = parse_index(pd_index, inp, index, store_data=False)
return param
|
https://github.com/mars-project/mars/issues/1433
|
In [3]: import pandas as pd
In [4]: data = pd.DataFrame(np.arange(20).reshape((4, 5)) + 1, columns=['a', 'b', 'c', 'd', 'e'])
In [6]: df = md.DataFrame(data)
In [7]: df.groupby(['a','b']).size().execute()
Unexpected exception occurred in enter_build_mode.<locals>.inner.
Traceback (most recent call last):
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 353, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 493, in inner
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/scheduler/graph.py", line 633, in prepare_graph
self._target_tileable_datas + fetch_tileables, tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 399, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 493, in inner
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 350, in build
tileables, tileable_graph=tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 399, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 493, in inner
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 263, in build
self._on_tile_failure(tileable_data.op, exc_info)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 302, in inner
raise exc_info[1].with_traceback(exc_info[2]) from None
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 243, in build
tiled = self._tile(tileable_data, tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 338, in _tile
return super()._tile(tileable_data, tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 203, in _tile
tds = on_tile(tileable_data.op.outputs, tds)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/scheduler/graph.py", line 615, in on_tile
return self.context.wraps(handler.dispatch)(first.op)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/context.py", line 69, in h
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 399, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 119, in dispatch
tiled = op_cls.tile(op)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/groupby/aggregation.py", line 481, in tile
return cls._tile_with_tree(op)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/groupby/aggregation.py", line 412, in _tile_with_tree
index = out_df.index_value.to_pandas()
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/core.py", line 283, in to_pandas
return self._index_value.to_pandas()
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/core.py", line 197, in to_pandas
sortorder=self._sortorder, names=self._names)
AttributeError: _sortorder
|
AttributeError
|
def shuffle(*arrays, **options):
arrays = [convert_to_tensor_or_dataframe(ar) for ar in arrays]
axes = options.pop("axes", (0,))
if not isinstance(axes, Iterable):
axes = (axes,)
elif not isinstance(axes, tuple):
axes = tuple(axes)
random_state = check_random_state(options.pop("random_state", None)).to_numpy()
if options:
raise TypeError(
"shuffle() got an unexpected keyword argument {0}".format(
next(iter(options))
)
)
max_ndim = max(ar.ndim for ar in arrays)
axes = tuple(np.unique([validate_axis(max_ndim, ax) for ax in axes]).tolist())
seeds = gen_random_seeds(len(axes), random_state)
# verify shape
for ax in axes:
shapes = {ar.shape[ax] for ar in arrays if ax < ar.ndim}
if len(shapes) > 1:
raise ValueError("arrays do not have same shape on axis {0}".format(ax))
op = LearnShuffle(axes=axes, seeds=seeds, output_types=get_output_types(*arrays))
shuffled_arrays = op(arrays)
if len(arrays) == 1:
return shuffled_arrays[0]
else:
return ExecutableTuple(shuffled_arrays)
|
def shuffle(*arrays, **options):
arrays = [convert_to_tensor_or_dataframe(ar) for ar in arrays]
axes = options.pop("axes", (0,))
if not isinstance(axes, Iterable):
axes = (axes,)
elif not isinstance(axes, tuple):
axes = tuple(axes)
random_state = check_random_state(options.pop("random_state", None)).to_numpy()
if options:
raise TypeError(
"shuffle() got an unexpected keyword argument {0}".format(
next(iter(options))
)
)
max_ndim = max(ar.ndim for ar in arrays)
axes = tuple(np.unique([validate_axis(max_ndim, ax) for ax in axes]))
seeds = gen_random_seeds(len(axes), random_state)
# verify shape
for ax in axes:
shapes = {ar.shape[ax] for ar in arrays if ax < ar.ndim}
if len(shapes) > 1:
raise ValueError("arrays do not have same shape on axis {0}".format(ax))
op = LearnShuffle(
axes=axes, seeds=tuple(seeds), output_types=get_output_types(*arrays)
)
shuffled_arrays = op(arrays)
if len(arrays) == 1:
return shuffled_arrays[0]
else:
return ExecutableTuple(shuffled_arrays)
|
https://github.com/mars-project/mars/issues/1433
|
In [3]: import pandas as pd
In [4]: data = pd.DataFrame(np.arange(20).reshape((4, 5)) + 1, columns=['a', 'b', 'c', 'd', 'e'])
In [6]: df = md.DataFrame(data)
In [7]: df.groupby(['a','b']).size().execute()
Unexpected exception occurred in enter_build_mode.<locals>.inner.
Traceback (most recent call last):
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 353, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 493, in inner
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/scheduler/graph.py", line 633, in prepare_graph
self._target_tileable_datas + fetch_tileables, tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 399, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 493, in inner
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 350, in build
tileables, tileable_graph=tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 399, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 493, in inner
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 263, in build
self._on_tile_failure(tileable_data.op, exc_info)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 302, in inner
raise exc_info[1].with_traceback(exc_info[2]) from None
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 243, in build
tiled = self._tile(tileable_data, tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 338, in _tile
return super()._tile(tileable_data, tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 203, in _tile
tds = on_tile(tileable_data.op.outputs, tds)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/scheduler/graph.py", line 615, in on_tile
return self.context.wraps(handler.dispatch)(first.op)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/context.py", line 69, in h
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 399, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 119, in dispatch
tiled = op_cls.tile(op)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/groupby/aggregation.py", line 481, in tile
return cls._tile_with_tree(op)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/groupby/aggregation.py", line 412, in _tile_with_tree
index = out_df.index_value.to_pandas()
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/core.py", line 283, in to_pandas
return self._index_value.to_pandas()
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/core.py", line 197, in to_pandas
sortorder=self._sortorder, names=self._names)
AttributeError: _sortorder
|
AttributeError
|
def __call__(self, a, repeats):
axis = self._axis
a = astensor(a)
if axis is None:
a = ravel(a)
ax = axis or 0
if not isinstance(repeats, Integral):
if not isinstance(repeats, Tensor):
repeats = np.asarray(repeats)
if repeats.size == 1:
repeats = int(repeats[0])
size = repeats * a.shape[axis or 0]
elif a.shape[ax] == 1:
size = repeats = int(repeats.sum())
else:
size = int(repeats.sum())
else:
size = np.nan
if not isinstance(repeats, Integral):
if repeats.ndim != 1:
raise ValueError("repeats should be 1-d tensor")
broadcast_shape(repeats.shape, a.shape[ax : ax + 1])
else:
size = a.shape[axis or 0] * repeats
shape = a.shape[:ax] + (size,) + a.shape[ax + 1 :]
self._dtype = a.dtype
self._sparse = a.issparse()
inputs = [a]
if isinstance(repeats, Tensor):
inputs.append(repeats)
else:
self._repeats = repeats
return self.new_tensor(inputs, shape, order=TensorOrder.C_ORDER)
|
def __call__(self, a, repeats):
axis = self._axis
a = astensor(a)
if axis is None:
a = ravel(a)
ax = axis or 0
if not isinstance(repeats, Integral):
if not isinstance(repeats, Tensor):
repeats = np.asarray(repeats)
if repeats.size == 1:
repeats = int(repeats[0])
size = repeats * a.shape[axis or 0]
elif a.shape[ax] == 1:
size = repeats = int(repeats.sum())
else:
size = repeats.sum()
else:
size = np.nan
if not isinstance(repeats, Integral):
if repeats.ndim != 1:
raise ValueError("repeats should be 1-d tensor")
broadcast_shape(repeats.shape, a.shape[ax : ax + 1])
else:
size = a.shape[axis or 0] * repeats
shape = a.shape[:ax] + (size,) + a.shape[ax + 1 :]
self._dtype = a.dtype
self._sparse = a.issparse()
inputs = [a]
if isinstance(repeats, Tensor):
inputs.append(repeats)
else:
self._repeats = repeats
return self.new_tensor(inputs, shape, order=TensorOrder.C_ORDER)
|
https://github.com/mars-project/mars/issues/1433
|
In [3]: import pandas as pd
In [4]: data = pd.DataFrame(np.arange(20).reshape((4, 5)) + 1, columns=['a', 'b', 'c', 'd', 'e'])
In [6]: df = md.DataFrame(data)
In [7]: df.groupby(['a','b']).size().execute()
Unexpected exception occurred in enter_build_mode.<locals>.inner.
Traceback (most recent call last):
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 353, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 493, in inner
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/scheduler/graph.py", line 633, in prepare_graph
self._target_tileable_datas + fetch_tileables, tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 399, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 493, in inner
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 350, in build
tileables, tileable_graph=tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 399, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 493, in inner
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 263, in build
self._on_tile_failure(tileable_data.op, exc_info)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 302, in inner
raise exc_info[1].with_traceback(exc_info[2]) from None
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 243, in build
tiled = self._tile(tileable_data, tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 338, in _tile
return super()._tile(tileable_data, tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 203, in _tile
tds = on_tile(tileable_data.op.outputs, tds)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/scheduler/graph.py", line 615, in on_tile
return self.context.wraps(handler.dispatch)(first.op)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/context.py", line 69, in h
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 399, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 119, in dispatch
tiled = op_cls.tile(op)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/groupby/aggregation.py", line 481, in tile
return cls._tile_with_tree(op)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/groupby/aggregation.py", line 412, in _tile_with_tree
index = out_df.index_value.to_pandas()
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/core.py", line 283, in to_pandas
return self._index_value.to_pandas()
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/core.py", line 197, in to_pandas
sortorder=self._sortorder, names=self._names)
AttributeError: _sortorder
|
AttributeError
|
def tile(cls, op):
a = op.input
repeats = op.repeats
axis = op.axis
ax = axis or 0
out = op.outputs[0]
check_chunks_unknown_shape(op.inputs, TilesError)
if isinstance(repeats, TENSOR_TYPE):
a, repeats = unify_chunks(a, (repeats, (ax,)))
nsplit = a.nsplits[axis or 0]
if isinstance(repeats, Integral):
new_nsplit = []
for split in nsplit:
s = max(split // repeats, 1)
c = split // s
new_nsplit.extend([s] * c)
if split % s != 0:
new_nsplit.append(split % s)
a = a.rechunk({ax: new_nsplit})._inplace_tile()
out_chunks = []
ax_cum_count = np.cumsum((0,) + a.nsplits[ax])
is_repeats_ndarray = isinstance(repeats, np.ndarray)
for out_idx in itertools.product(*[range(len(s)) for s in a.nsplits]):
in_chunk = a.cix[out_idx]
ax_idx = out_idx[ax]
if is_repeats_ndarray:
start = ax_cum_count[ax_idx]
stop = ax_cum_count[ax_idx + 1]
rp = repeats[start:stop]
size = int(rp.sum())
elif not isinstance(repeats, Integral):
rp = repeats.cix[ax_idx,]
size = np.nan
else:
rp = repeats
size = in_chunk.shape[ax] * rp
chunk_inputs = [in_chunk]
if isinstance(rp, TENSOR_CHUNK_TYPE):
chunk_inputs.append(rp)
chunk_shape = in_chunk.shape[:ax] + (size,) + in_chunk.shape[ax + 1 :]
chunk_op = op.copy().reset_key()
if len(chunk_inputs) < 2:
# repeats is not chunk
chunk_op._repeats = rp
out_chunk = chunk_op.new_chunk(
chunk_inputs, shape=chunk_shape, index=out_idx, order=out.order
)
out_chunks.append(out_chunk)
nsplits = [
tuple(
c.shape[i]
for c in out_chunks
if all(idx == 0 for j, idx in enumerate(c.index) if j != i)
)
for i in range(len(out_chunks[0].shape))
]
new_op = op.copy()
return new_op.new_tensors(
op.inputs, out.shape, order=out.order, chunks=out_chunks, nsplits=nsplits
)
|
def tile(cls, op):
a = op.input
repeats = op.repeats
axis = op.axis
ax = axis or 0
out = op.outputs[0]
check_chunks_unknown_shape(op.inputs, TilesError)
if isinstance(repeats, TENSOR_TYPE):
a, repeats = unify_chunks(a, (repeats, (ax,)))
nsplit = a.nsplits[axis or 0]
if isinstance(repeats, Integral):
new_nsplit = []
for split in nsplit:
s = max(split // repeats, 1)
c = split // s
new_nsplit.extend([s] * c)
if split % s != 0:
new_nsplit.append(split % s)
a = a.rechunk({ax: new_nsplit})._inplace_tile()
out_chunks = []
ax_cum_count = np.cumsum((0,) + a.nsplits[ax])
is_repeats_ndarray = isinstance(repeats, np.ndarray)
for out_idx in itertools.product(*[range(len(s)) for s in a.nsplits]):
in_chunk = a.cix[out_idx]
ax_idx = out_idx[ax]
if is_repeats_ndarray:
start = ax_cum_count[ax_idx]
stop = ax_cum_count[ax_idx + 1]
rp = repeats[start:stop]
size = rp.sum()
elif not isinstance(repeats, Integral):
rp = repeats.cix[ax_idx,]
size = np.nan
else:
rp = repeats
size = in_chunk.shape[ax] * rp
chunk_inputs = [in_chunk]
if isinstance(rp, TENSOR_CHUNK_TYPE):
chunk_inputs.append(rp)
chunk_shape = in_chunk.shape[:ax] + (size,) + in_chunk.shape[ax + 1 :]
chunk_op = op.copy().reset_key()
if len(chunk_inputs) < 2:
# repeats is not chunk
chunk_op._repeats = rp
out_chunk = chunk_op.new_chunk(
chunk_inputs, shape=chunk_shape, index=out_idx, order=out.order
)
out_chunks.append(out_chunk)
nsplits = [
tuple(
c.shape[i]
for c in out_chunks
if all(idx == 0 for j, idx in enumerate(c.index) if j != i)
)
for i in range(len(out_chunks[0].shape))
]
new_op = op.copy()
return new_op.new_tensors(
op.inputs, out.shape, order=out.order, chunks=out_chunks, nsplits=nsplits
)
|
https://github.com/mars-project/mars/issues/1433
|
In [3]: import pandas as pd
In [4]: data = pd.DataFrame(np.arange(20).reshape((4, 5)) + 1, columns=['a', 'b', 'c', 'd', 'e'])
In [6]: df = md.DataFrame(data)
In [7]: df.groupby(['a','b']).size().execute()
Unexpected exception occurred in enter_build_mode.<locals>.inner.
Traceback (most recent call last):
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 353, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 493, in inner
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/scheduler/graph.py", line 633, in prepare_graph
self._target_tileable_datas + fetch_tileables, tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 399, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 493, in inner
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 350, in build
tileables, tileable_graph=tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 399, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 493, in inner
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 263, in build
self._on_tile_failure(tileable_data.op, exc_info)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 302, in inner
raise exc_info[1].with_traceback(exc_info[2]) from None
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 243, in build
tiled = self._tile(tileable_data, tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 338, in _tile
return super()._tile(tileable_data, tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 203, in _tile
tds = on_tile(tileable_data.op.outputs, tds)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/scheduler/graph.py", line 615, in on_tile
return self.context.wraps(handler.dispatch)(first.op)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/context.py", line 69, in h
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 399, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 119, in dispatch
tiled = op_cls.tile(op)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/groupby/aggregation.py", line 481, in tile
return cls._tile_with_tree(op)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/groupby/aggregation.py", line 412, in _tile_with_tree
index = out_df.index_value.to_pandas()
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/core.py", line 283, in to_pandas
return self._index_value.to_pandas()
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/core.py", line 197, in to_pandas
sortorder=self._sortorder, names=self._names)
AttributeError: _sortorder
|
AttributeError
|
def _tile_via_shuffle(cls, op):
# rechunk the axes except the axis to do unique into 1 chunk
inp = op.inputs[0]
if inp.ndim > 1:
new_chunk_size = dict()
for axis in range(inp.ndim):
if axis == op.axis:
continue
if np.isnan(inp.shape[axis]):
raise TilesError(
"input tensor has unknown shape on axis {}".format(axis)
)
new_chunk_size[axis] = inp.shape[axis]
check_chunks_unknown_shape([inp], TilesError)
inp = inp.rechunk(new_chunk_size)._inplace_tile()
aggregate_size = op.aggregate_size
if aggregate_size is None:
aggregate_size = max(inp.chunk_shape[op.axis] // options.combine_size, 1)
unique_on_chunk_sizes = inp.nsplits[op.axis]
start_poses = np.cumsum((0,) + unique_on_chunk_sizes).tolist()[:-1]
map_chunks = []
for c in inp.chunks:
map_op = TensorUnique(
stage=OperandStage.map,
return_index=op.return_index,
return_inverse=op.return_inverse,
return_counts=op.return_counts,
axis=op.axis,
aggregate_size=aggregate_size,
start_pos=start_poses[c.index[op.axis]],
dtype=inp.dtype,
)
shape = list(c.shape)
shape[op.axis] = np.nan
map_chunks.append(map_op.new_chunk([c], shape=tuple(shape), index=c.index))
shuffle_chunk = TensorShuffleProxy(
dtype=inp.dtype, _tensor_keys=[inp.op.key]
).new_chunk(map_chunks, shape=())
reduce_chunks = [list() for _ in range(len(op.outputs))]
for i in range(aggregate_size):
reduce_op = TensorUnique(
stage=OperandStage.reduce,
return_index=op.return_index,
return_inverse=op.return_inverse,
return_counts=op.return_counts,
axis=op.axis,
aggregate_id=i,
shuffle_key=str(i),
)
kws = cls._gen_kws(op, inp, chunk=True, chunk_index=i)
chunks = reduce_op.new_chunks(
[shuffle_chunk], kws=kws, order=op.outputs[0].order
)
for j, c in enumerate(chunks):
reduce_chunks[j].append(c)
if op.return_inverse:
inverse_pos = 2 if op.return_index else 1
map_inverse_chunks = reduce_chunks[inverse_pos]
inverse_shuffle_chunk = TensorShuffleProxy(
dtype=map_inverse_chunks[0].dtype
).new_chunk(map_inverse_chunks, shape=())
inverse_chunks = []
for j, cs in enumerate(unique_on_chunk_sizes):
chunk_op = TensorUniqueInverseReduce(
dtype=map_inverse_chunks[0].dtype, shuffle_key=str(j)
)
inverse_chunk = chunk_op.new_chunk(
[inverse_shuffle_chunk], shape=(cs,), index=(j,)
)
inverse_chunks.append(inverse_chunk)
reduce_chunks[inverse_pos] = inverse_chunks
kws = [out.params for out in op.outputs]
for kw, chunks in zip(kws, reduce_chunks):
kw["chunks"] = chunks
unique_nsplits = list(inp.nsplits)
unique_nsplits[op.axis] = (np.nan,) * len(reduce_chunks[0])
kws[0]["nsplits"] = tuple(unique_nsplits)
i = 1
if op.return_index:
kws[i]["nsplits"] = ((np.nan,) * len(reduce_chunks[i]),)
i += 1
if op.return_inverse:
kws[i]["nsplits"] = (inp.nsplits[op.axis],)
i += 1
if op.return_counts:
kws[i]["nsplits"] = ((np.nan,) * len(reduce_chunks[i]),)
new_op = op.copy()
return new_op.new_tensors(op.inputs, kws=kws)
|
def _tile_via_shuffle(cls, op):
# rechunk the axes except the axis to do unique into 1 chunk
inp = op.inputs[0]
if inp.ndim > 1:
new_chunk_size = dict()
for axis in range(inp.ndim):
if axis == op.axis:
continue
if np.isnan(inp.shape[axis]):
raise TilesError(
"input tensor has unknown shape on axis {}".format(axis)
)
new_chunk_size[axis] = inp.shape[axis]
check_chunks_unknown_shape([inp], TilesError)
inp = inp.rechunk(new_chunk_size)._inplace_tile()
aggregate_size = op.aggregate_size
if aggregate_size is None:
aggregate_size = max(inp.chunk_shape[op.axis] // options.combine_size, 1)
unique_on_chunk_sizes = inp.nsplits[op.axis]
start_poses = np.cumsum((0,) + unique_on_chunk_sizes)[:-1]
map_chunks = []
for c in inp.chunks:
map_op = TensorUnique(
stage=OperandStage.map,
return_index=op.return_index,
return_inverse=op.return_inverse,
return_counts=op.return_counts,
axis=op.axis,
aggregate_size=aggregate_size,
start_pos=start_poses[c.index[op.axis]],
dtype=inp.dtype,
)
shape = list(c.shape)
shape[op.axis] = np.nan
map_chunks.append(map_op.new_chunk([c], shape=tuple(shape), index=c.index))
shuffle_chunk = TensorShuffleProxy(
dtype=inp.dtype, _tensor_keys=[inp.op.key]
).new_chunk(map_chunks, shape=())
reduce_chunks = [list() for _ in range(len(op.outputs))]
for i in range(aggregate_size):
reduce_op = TensorUnique(
stage=OperandStage.reduce,
return_index=op.return_index,
return_inverse=op.return_inverse,
return_counts=op.return_counts,
axis=op.axis,
aggregate_id=i,
shuffle_key=str(i),
)
kws = cls._gen_kws(op, inp, chunk=True, chunk_index=i)
chunks = reduce_op.new_chunks(
[shuffle_chunk], kws=kws, order=op.outputs[0].order
)
for j, c in enumerate(chunks):
reduce_chunks[j].append(c)
if op.return_inverse:
inverse_pos = 2 if op.return_index else 1
map_inverse_chunks = reduce_chunks[inverse_pos]
inverse_shuffle_chunk = TensorShuffleProxy(
dtype=map_inverse_chunks[0].dtype
).new_chunk(map_inverse_chunks, shape=())
inverse_chunks = []
for j, cs in enumerate(unique_on_chunk_sizes):
chunk_op = TensorUniqueInverseReduce(
dtype=map_inverse_chunks[0].dtype, shuffle_key=str(j)
)
inverse_chunk = chunk_op.new_chunk(
[inverse_shuffle_chunk], shape=(cs,), index=(j,)
)
inverse_chunks.append(inverse_chunk)
reduce_chunks[inverse_pos] = inverse_chunks
kws = [out.params for out in op.outputs]
for kw, chunks in zip(kws, reduce_chunks):
kw["chunks"] = chunks
unique_nsplits = list(inp.nsplits)
unique_nsplits[op.axis] = (np.nan,) * len(reduce_chunks[0])
kws[0]["nsplits"] = tuple(unique_nsplits)
i = 1
if op.return_index:
kws[i]["nsplits"] = ((np.nan,) * len(reduce_chunks[i]),)
i += 1
if op.return_inverse:
kws[i]["nsplits"] = (inp.nsplits[op.axis],)
i += 1
if op.return_counts:
kws[i]["nsplits"] = ((np.nan,) * len(reduce_chunks[i]),)
new_op = op.copy()
return new_op.new_tensors(op.inputs, kws=kws)
|
https://github.com/mars-project/mars/issues/1433
|
In [3]: import pandas as pd
In [4]: data = pd.DataFrame(np.arange(20).reshape((4, 5)) + 1, columns=['a', 'b', 'c', 'd', 'e'])
In [6]: df = md.DataFrame(data)
In [7]: df.groupby(['a','b']).size().execute()
Unexpected exception occurred in enter_build_mode.<locals>.inner.
Traceback (most recent call last):
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 353, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 493, in inner
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/scheduler/graph.py", line 633, in prepare_graph
self._target_tileable_datas + fetch_tileables, tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 399, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 493, in inner
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 350, in build
tileables, tileable_graph=tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 399, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 493, in inner
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 263, in build
self._on_tile_failure(tileable_data.op, exc_info)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 302, in inner
raise exc_info[1].with_traceback(exc_info[2]) from None
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 243, in build
tiled = self._tile(tileable_data, tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 338, in _tile
return super()._tile(tileable_data, tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 203, in _tile
tds = on_tile(tileable_data.op.outputs, tds)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/scheduler/graph.py", line 615, in on_tile
return self.context.wraps(handler.dispatch)(first.op)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/context.py", line 69, in h
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 399, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 119, in dispatch
tiled = op_cls.tile(op)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/groupby/aggregation.py", line 481, in tile
return cls._tile_with_tree(op)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/groupby/aggregation.py", line 412, in _tile_with_tree
index = out_df.index_value.to_pandas()
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/core.py", line 283, in to_pandas
return self._index_value.to_pandas()
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/core.py", line 197, in to_pandas
sortorder=self._sortorder, names=self._names)
AttributeError: _sortorder
|
AttributeError
|
def tile(cls, op):
if op.inputs:
check_chunks_unknown_shape(op.inputs, TilesError)
tensor = op.outputs[0]
# op can be TensorDiag or TensorEye
k = op.k
nsplits = op._get_nsplits(op)
fx = lambda x, y: x - y + k
cum_size = [np.cumsum(s).tolist() for s in nsplits]
out_chunks = []
for out_idx in itertools.product(*[range(len(s)) for s in nsplits]):
i, j = out_idx
ld_pos = cum_size[0][i] - 1, cum_size[1][j] - nsplits[1][j]
ru_pos = cum_size[0][i] - nsplits[0][i], cum_size[1][j] - 1
ld_fx = fx(*ld_pos)
ru_fx = fx(*ru_pos)
chunk_shape = (nsplits[0][i], nsplits[1][j])
if (ld_fx > 0 and ru_fx > 0) or (ld_fx < 0 and ru_fx < 0):
# does not cross, fill with zeros
chunk_op = TensorZeros(dtype=op.dtype, gpu=op.gpu, sparse=op.sparse)
chunk = chunk_op.new_chunk(None, shape=chunk_shape, index=out_idx)
else:
lu_pos = ru_pos[0], ld_pos[1]
chunk_k = fx(*lu_pos)
chunk = op._get_chunk(op, chunk_k, chunk_shape, out_idx)
out_chunks.append(chunk)
new_op = op.copy()
return new_op.new_tensors(
op.inputs, tensor.shape, chunks=out_chunks, nsplits=nsplits
)
|
def tile(cls, op):
if op.inputs:
check_chunks_unknown_shape(op.inputs, TilesError)
tensor = op.outputs[0]
# op can be TensorDiag or TensorEye
k = op.k
nsplits = op._get_nsplits(op)
fx = lambda x, y: x - y + k
cum_size = [np.cumsum(s) for s in nsplits]
out_chunks = []
for out_idx in itertools.product(*[range(len(s)) for s in nsplits]):
i, j = out_idx
ld_pos = cum_size[0][i] - 1, cum_size[1][j] - nsplits[1][j]
ru_pos = cum_size[0][i] - nsplits[0][i], cum_size[1][j] - 1
ld_fx = fx(*ld_pos)
ru_fx = fx(*ru_pos)
chunk_shape = (nsplits[0][i], nsplits[1][j])
if (ld_fx > 0 and ru_fx > 0) or (ld_fx < 0 and ru_fx < 0):
# does not cross, fill with zeros
chunk_op = TensorZeros(dtype=op.dtype, gpu=op.gpu, sparse=op.sparse)
chunk = chunk_op.new_chunk(None, shape=chunk_shape, index=out_idx)
else:
lu_pos = ru_pos[0], ld_pos[1]
chunk_k = fx(*lu_pos)
chunk = op._get_chunk(op, chunk_k, chunk_shape, out_idx)
out_chunks.append(chunk)
new_op = op.copy()
return new_op.new_tensors(
op.inputs, tensor.shape, chunks=out_chunks, nsplits=nsplits
)
|
https://github.com/mars-project/mars/issues/1433
|
In [3]: import pandas as pd
In [4]: data = pd.DataFrame(np.arange(20).reshape((4, 5)) + 1, columns=['a', 'b', 'c', 'd', 'e'])
In [6]: df = md.DataFrame(data)
In [7]: df.groupby(['a','b']).size().execute()
Unexpected exception occurred in enter_build_mode.<locals>.inner.
Traceback (most recent call last):
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 353, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 493, in inner
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/scheduler/graph.py", line 633, in prepare_graph
self._target_tileable_datas + fetch_tileables, tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 399, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 493, in inner
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 350, in build
tileables, tileable_graph=tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 399, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 493, in inner
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 263, in build
self._on_tile_failure(tileable_data.op, exc_info)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 302, in inner
raise exc_info[1].with_traceback(exc_info[2]) from None
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 243, in build
tiled = self._tile(tileable_data, tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 338, in _tile
return super()._tile(tileable_data, tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 203, in _tile
tds = on_tile(tileable_data.op.outputs, tds)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/scheduler/graph.py", line 615, in on_tile
return self.context.wraps(handler.dispatch)(first.op)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/context.py", line 69, in h
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 399, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 119, in dispatch
tiled = op_cls.tile(op)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/groupby/aggregation.py", line 481, in tile
return cls._tile_with_tree(op)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/groupby/aggregation.py", line 412, in _tile_with_tree
index = out_df.index_value.to_pandas()
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/core.py", line 283, in to_pandas
return self._index_value.to_pandas()
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/core.py", line 197, in to_pandas
sortorder=self._sortorder, names=self._names)
AttributeError: _sortorder
|
AttributeError
|
def tile(cls, op):
tensor = op.outputs[0]
v = op.input
k = op.k
idx = itertools.count(0)
if v.ndim == 2:
check_chunks_unknown_shape(op.inputs, TilesError)
chunks = []
nsplit = []
fx = lambda x, y: x - y + k
in_nsplits = v.nsplits
cum_size = [np.cumsum(s).tolist() for s in in_nsplits]
for c in v.chunks:
i, j = c.index
ld_pos = cum_size[0][i] - 1, cum_size[1][j] - in_nsplits[1][j]
ru_pos = cum_size[0][i] - in_nsplits[0][i], cum_size[1][j] - 1
ld_fx = fx(*ld_pos)
ru_fx = fx(*ru_pos)
if (ld_fx > 0 and ru_fx > 0) or (ld_fx < 0 and ru_fx < 0):
continue
lu_pos = ru_pos[0], ld_pos[1]
chunk_k = fx(*lu_pos)
chunk_shape = _get_diag_shape(c.shape, chunk_k)
chunk_idx = (next(idx),)
chunk_op = op.to_chunk_op(chunk_k)
chunk = chunk_op.new_chunk(
[c], shape=chunk_shape, index=chunk_idx, order=tensor.order
)
nsplit.append(chunk_shape[0])
chunks.append(chunk)
new_op = op.copy()
return new_op.new_tensors(
op.inputs,
op.outputs[0].shape,
order=tensor.order,
chunks=chunks,
nsplits=(tuple(nsplit),),
)
else:
return super().tile(op)
|
def tile(cls, op):
tensor = op.outputs[0]
v = op.input
k = op.k
idx = itertools.count(0)
if v.ndim == 2:
check_chunks_unknown_shape(op.inputs, TilesError)
chunks = []
nsplit = []
fx = lambda x, y: x - y + k
in_nsplits = v.nsplits
cum_size = [np.cumsum(s) for s in in_nsplits]
for c in v.chunks:
i, j = c.index
ld_pos = cum_size[0][i] - 1, cum_size[1][j] - in_nsplits[1][j]
ru_pos = cum_size[0][i] - in_nsplits[0][i], cum_size[1][j] - 1
ld_fx = fx(*ld_pos)
ru_fx = fx(*ru_pos)
if (ld_fx > 0 and ru_fx > 0) or (ld_fx < 0 and ru_fx < 0):
continue
lu_pos = ru_pos[0], ld_pos[1]
chunk_k = fx(*lu_pos)
chunk_shape = _get_diag_shape(c.shape, chunk_k)
chunk_idx = (next(idx),)
chunk_op = op.to_chunk_op(chunk_k)
chunk = chunk_op.new_chunk(
[c], shape=chunk_shape, index=chunk_idx, order=tensor.order
)
nsplit.append(chunk_shape[0])
chunks.append(chunk)
new_op = op.copy()
return new_op.new_tensors(
op.inputs,
op.outputs[0].shape,
order=tensor.order,
chunks=chunks,
nsplits=(tuple(nsplit),),
)
else:
return super().tile(op)
|
https://github.com/mars-project/mars/issues/1433
|
In [3]: import pandas as pd
In [4]: data = pd.DataFrame(np.arange(20).reshape((4, 5)) + 1, columns=['a', 'b', 'c', 'd', 'e'])
In [6]: df = md.DataFrame(data)
In [7]: df.groupby(['a','b']).size().execute()
Unexpected exception occurred in enter_build_mode.<locals>.inner.
Traceback (most recent call last):
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 353, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 493, in inner
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/scheduler/graph.py", line 633, in prepare_graph
self._target_tileable_datas + fetch_tileables, tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 399, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 493, in inner
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 350, in build
tileables, tileable_graph=tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 399, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 493, in inner
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 263, in build
self._on_tile_failure(tileable_data.op, exc_info)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 302, in inner
raise exc_info[1].with_traceback(exc_info[2]) from None
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 243, in build
tiled = self._tile(tileable_data, tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 338, in _tile
return super()._tile(tileable_data, tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 203, in _tile
tds = on_tile(tileable_data.op.outputs, tds)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/scheduler/graph.py", line 615, in on_tile
return self.context.wraps(handler.dispatch)(first.op)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/context.py", line 69, in h
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 399, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 119, in dispatch
tiled = op_cls.tile(op)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/groupby/aggregation.py", line 481, in tile
return cls._tile_with_tree(op)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/groupby/aggregation.py", line 412, in _tile_with_tree
index = out_df.index_value.to_pandas()
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/core.py", line 283, in to_pandas
return self._index_value.to_pandas()
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/core.py", line 197, in to_pandas
sortorder=self._sortorder, names=self._names)
AttributeError: _sortorder
|
AttributeError
|
def fromtiledb(uri, ctx=None, key=None, timestamp=None, gpu=False):
import tiledb
raw_ctx = ctx
if raw_ctx is None:
ctx = tiledb.Ctx()
# get metadata from tiledb
try:
tiledb_arr = tiledb.DenseArray(uri=uri, ctx=ctx, key=key, timestamp=timestamp)
sparse = False
except ValueError:
# if the array is not dense, ValueError will be raised by tiledb
tiledb_arr = tiledb.SparseArray(uri=uri, ctx=ctx, key=key, timestamp=timestamp)
sparse = True
if tiledb_arr.nattr > 1:
raise NotImplementedError(
"Does not supported TileDB array schema with more than 1 attr"
)
tiledb_dim_starts = tuple(
tiledb_arr.domain.dim(j).domain[0].item() for j in range(tiledb_arr.ndim)
)
if any(isinstance(s, float) for s in tiledb_dim_starts):
raise ValueError(
"Does not support TileDB array schema whose dimensions has float domain"
)
dtype = tiledb_arr.attr(0).dtype
tiledb_config = None if raw_ctx is None else ctx.config().dict()
tensor_order = (
TensorOrder.C_ORDER
if tiledb_arr.schema.cell_order == "row-major"
else TensorOrder.F_ORDER
)
op = TensorTileDBDataSource(
tiledb_config=tiledb_config,
tiledb_uri=uri,
tiledb_key=key,
tiledb_timstamp=timestamp,
tiledb_dim_starts=tiledb_dim_starts,
gpu=gpu,
sparse=sparse,
dtype=dtype,
)
chunk_size = tuple(
int(tiledb_arr.domain.dim(i).tile) for i in range(tiledb_arr.domain.ndim)
)
return op(tiledb_arr.shape, chunk_size=chunk_size, order=tensor_order)
|
def fromtiledb(uri, ctx=None, key=None, timestamp=None, gpu=False):
import tiledb
raw_ctx = ctx
if raw_ctx is None:
ctx = tiledb.Ctx()
# get metadata from tiledb
try:
tiledb_arr = tiledb.DenseArray(uri=uri, ctx=ctx, key=key, timestamp=timestamp)
sparse = False
except ValueError:
# if the array is not dense, ValueError will be raised by tiledb
tiledb_arr = tiledb.SparseArray(uri=uri, ctx=ctx, key=key, timestamp=timestamp)
sparse = True
if tiledb_arr.nattr > 1:
raise NotImplementedError(
"Does not supported TileDB array schema with more than 1 attr"
)
tiledb_dim_starts = tuple(
tiledb_arr.domain.dim(j).domain[0] for j in range(tiledb_arr.ndim)
)
if any(isinstance(s, float) for s in tiledb_dim_starts):
raise ValueError(
"Does not support TileDB array schema whose dimensions has float domain"
)
dtype = tiledb_arr.attr(0).dtype
tiledb_config = None if raw_ctx is None else ctx.config().dict()
tensor_order = (
TensorOrder.C_ORDER
if tiledb_arr.schema.cell_order == "row-major"
else TensorOrder.F_ORDER
)
op = TensorTileDBDataSource(
tiledb_config=tiledb_config,
tiledb_uri=uri,
tiledb_key=key,
tiledb_timstamp=timestamp,
tiledb_dim_starts=tiledb_dim_starts,
gpu=gpu,
sparse=sparse,
dtype=dtype,
)
chunk_size = tuple(
int(tiledb_arr.domain.dim(i).tile) for i in range(tiledb_arr.domain.ndim)
)
return op(tiledb_arr.shape, chunk_size=chunk_size, order=tensor_order)
|
https://github.com/mars-project/mars/issues/1433
|
In [3]: import pandas as pd
In [4]: data = pd.DataFrame(np.arange(20).reshape((4, 5)) + 1, columns=['a', 'b', 'c', 'd', 'e'])
In [6]: df = md.DataFrame(data)
In [7]: df.groupby(['a','b']).size().execute()
Unexpected exception occurred in enter_build_mode.<locals>.inner.
Traceback (most recent call last):
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 353, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 493, in inner
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/scheduler/graph.py", line 633, in prepare_graph
self._target_tileable_datas + fetch_tileables, tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 399, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 493, in inner
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 350, in build
tileables, tileable_graph=tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 399, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 493, in inner
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 263, in build
self._on_tile_failure(tileable_data.op, exc_info)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 302, in inner
raise exc_info[1].with_traceback(exc_info[2]) from None
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 243, in build
tiled = self._tile(tileable_data, tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 338, in _tile
return super()._tile(tileable_data, tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 203, in _tile
tds = on_tile(tileable_data.op.outputs, tds)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/scheduler/graph.py", line 615, in on_tile
return self.context.wraps(handler.dispatch)(first.op)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/context.py", line 69, in h
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 399, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 119, in dispatch
tiled = op_cls.tile(op)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/groupby/aggregation.py", line 481, in tile
return cls._tile_with_tree(op)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/groupby/aggregation.py", line 412, in _tile_with_tree
index = out_df.index_value.to_pandas()
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/core.py", line 283, in to_pandas
return self._index_value.to_pandas()
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/core.py", line 197, in to_pandas
sortorder=self._sortorder, names=self._names)
AttributeError: _sortorder
|
AttributeError
|
def tile(cls, op):
check_chunks_unknown_shape(op.inputs, TilesError)
tensor = op.outputs[0]
m = op.input
k = op.k
is_triu = type(op) == TensorTriu
fx = lambda x, y: x - y + k
nsplits = m.nsplits
cum_size = [np.cumsum(s).tolist() for s in nsplits]
out_chunks = []
for out_idx in itertools.product(*[range(len(s)) for s in nsplits]):
i, j = out_idx[-2:]
ld_pos = cum_size[-2][i] - 1, cum_size[-1][j] - nsplits[-1][j]
ru_pos = cum_size[-2][i] - nsplits[-2][i], cum_size[-1][j] - 1
ld_fx = fx(*ld_pos)
ru_fx = fx(*ru_pos)
chunk_shape = tuple(nsplits[i][idx] for i, idx in enumerate(out_idx))
if (is_triu and ld_fx > 0 and ru_fx > 0) or (
not is_triu and ld_fx < 0 and ru_fx < 0
):
# does not cross, fill with zeros
chunk_op = TensorZeros(dtype=op.dtype, gpu=op.gpu, sparse=op.sparse)
out_chunk = chunk_op.new_chunk(
None, shape=chunk_shape, index=out_idx, order=tensor.order
)
else:
lu_pos = ru_pos[0], ld_pos[1]
chunk_k = fx(*lu_pos)
input_chunk = m.cix[out_idx]
chunk_op = op.to_chunk_op(chunk_k)
out_chunk = chunk_op.new_chunk(
[input_chunk], shape=chunk_shape, index=out_idx, order=tensor.order
)
out_chunks.append(out_chunk)
new_op = op.copy()
return new_op.new_tensors(
op.inputs, tensor.shape, chunks=out_chunks, nsplits=m.nsplits
)
|
def tile(cls, op):
check_chunks_unknown_shape(op.inputs, TilesError)
tensor = op.outputs[0]
m = op.input
k = op.k
is_triu = type(op) == TensorTriu
fx = lambda x, y: x - y + k
nsplits = m.nsplits
cum_size = [np.cumsum(s) for s in nsplits]
out_chunks = []
for out_idx in itertools.product(*[range(len(s)) for s in nsplits]):
i, j = out_idx[-2:]
ld_pos = cum_size[-2][i] - 1, cum_size[-1][j] - nsplits[-1][j]
ru_pos = cum_size[-2][i] - nsplits[-2][i], cum_size[-1][j] - 1
ld_fx = fx(*ld_pos)
ru_fx = fx(*ru_pos)
chunk_shape = tuple(nsplits[i][idx] for i, idx in enumerate(out_idx))
if (is_triu and ld_fx > 0 and ru_fx > 0) or (
not is_triu and ld_fx < 0 and ru_fx < 0
):
# does not cross, fill with zeros
chunk_op = TensorZeros(dtype=op.dtype, gpu=op.gpu, sparse=op.sparse)
out_chunk = chunk_op.new_chunk(
None, shape=chunk_shape, index=out_idx, order=tensor.order
)
else:
lu_pos = ru_pos[0], ld_pos[1]
chunk_k = fx(*lu_pos)
input_chunk = m.cix[out_idx]
chunk_op = op.to_chunk_op(chunk_k)
out_chunk = chunk_op.new_chunk(
[input_chunk], shape=chunk_shape, index=out_idx, order=tensor.order
)
out_chunks.append(out_chunk)
new_op = op.copy()
return new_op.new_tensors(
op.inputs, tensor.shape, chunks=out_chunks, nsplits=m.nsplits
)
|
https://github.com/mars-project/mars/issues/1433
|
In [3]: import pandas as pd
In [4]: data = pd.DataFrame(np.arange(20).reshape((4, 5)) + 1, columns=['a', 'b', 'c', 'd', 'e'])
In [6]: df = md.DataFrame(data)
In [7]: df.groupby(['a','b']).size().execute()
Unexpected exception occurred in enter_build_mode.<locals>.inner.
Traceback (most recent call last):
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 353, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 493, in inner
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/scheduler/graph.py", line 633, in prepare_graph
self._target_tileable_datas + fetch_tileables, tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 399, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 493, in inner
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 350, in build
tileables, tileable_graph=tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 399, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 493, in inner
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 263, in build
self._on_tile_failure(tileable_data.op, exc_info)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 302, in inner
raise exc_info[1].with_traceback(exc_info[2]) from None
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 243, in build
tiled = self._tile(tileable_data, tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 338, in _tile
return super()._tile(tileable_data, tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 203, in _tile
tds = on_tile(tileable_data.op.outputs, tds)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/scheduler/graph.py", line 615, in on_tile
return self.context.wraps(handler.dispatch)(first.op)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/context.py", line 69, in h
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 399, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 119, in dispatch
tiled = op_cls.tile(op)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/groupby/aggregation.py", line 481, in tile
return cls._tile_with_tree(op)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/groupby/aggregation.py", line 412, in _tile_with_tree
index = out_df.index_value.to_pandas()
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/core.py", line 283, in to_pandas
return self._index_value.to_pandas()
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/core.py", line 197, in to_pandas
sortorder=self._sortorder, names=self._names)
AttributeError: _sortorder
|
AttributeError
|
def calc_shape(tensor_shape, index):
shape = []
in_axis = 0
out_axis = 0
fancy_index = None
fancy_index_shapes = []
for ind in index:
if (
isinstance(ind, TENSOR_TYPE + TENSOR_CHUNK_TYPE + (np.ndarray,))
and ind.dtype == np.bool_
):
# bool
shape.append(np.nan if not isinstance(ind, np.ndarray) else int(ind.sum()))
for i, t_size, size in zip(
itertools.count(0),
ind.shape,
tensor_shape[in_axis : ind.ndim + in_axis],
):
if not np.isnan(t_size) and not np.isnan(size) and t_size != size:
raise IndexError(
"boolean index did not match indexed array along dimension {0}; "
"dimension is {1} but corresponding boolean dimension is {2}".format(
in_axis + i, size, t_size
)
)
in_axis += ind.ndim
out_axis += 1
elif isinstance(ind, TENSOR_TYPE + TENSOR_CHUNK_TYPE + (np.ndarray,)):
first_fancy_index = False
if fancy_index is None:
first_fancy_index = True
fancy_index = out_axis
if isinstance(ind, np.ndarray) and np.any(ind >= tensor_shape[in_axis]):
out_of_range_index = next(
i for i in ind.flat if i >= tensor_shape[in_axis]
)
raise IndexError(
"IndexError: index {0} is out of bounds with size {1}".format(
out_of_range_index, tensor_shape[in_axis]
)
)
fancy_index_shapes.append(ind.shape)
in_axis += 1
if first_fancy_index:
out_axis += ind.ndim
elif isinstance(ind, slice):
if np.isnan(tensor_shape[in_axis]):
shape.append(np.nan)
else:
shape.append(calc_sliced_size(tensor_shape[in_axis], ind))
in_axis += 1
out_axis += 1
elif isinstance(ind, Integral):
size = tensor_shape[in_axis]
if not np.isnan(size) and ind >= size:
raise IndexError(
"index {0} is out of bounds for axis {1} with size {2}".format(
ind, in_axis, size
)
)
in_axis += 1
else:
assert ind is None
shape.append(1)
if fancy_index is not None:
try:
if any(np.isnan(np.prod(s)) for s in fancy_index_shapes):
fancy_index_shape = (np.nan,) * len(fancy_index_shapes[0])
else:
fancy_index_shape = broadcast_shape(*fancy_index_shapes)
shape = shape[:fancy_index] + list(fancy_index_shape) + shape[fancy_index:]
except ValueError:
raise IndexError(
"shape mismatch: indexing arrays could not be broadcast together "
"with shapes {0}".format(" ".join(str(s) for s in fancy_index_shapes))
)
return shape
|
def calc_shape(tensor_shape, index):
shape = []
in_axis = 0
out_axis = 0
fancy_index = None
fancy_index_shapes = []
for ind in index:
if (
isinstance(ind, TENSOR_TYPE + TENSOR_CHUNK_TYPE + (np.ndarray,))
and ind.dtype == np.bool_
):
# bool
shape.append(np.nan if not isinstance(ind, np.ndarray) else ind.sum())
for i, t_size, size in zip(
itertools.count(0),
ind.shape,
tensor_shape[in_axis : ind.ndim + in_axis],
):
if not np.isnan(t_size) and not np.isnan(size) and t_size != size:
raise IndexError(
"boolean index did not match indexed array along dimension {0}; "
"dimension is {1} but corresponding boolean dimension is {2}".format(
in_axis + i, size, t_size
)
)
in_axis += ind.ndim
out_axis += 1
elif isinstance(ind, TENSOR_TYPE + TENSOR_CHUNK_TYPE + (np.ndarray,)):
first_fancy_index = False
if fancy_index is None:
first_fancy_index = True
fancy_index = out_axis
if isinstance(ind, np.ndarray) and np.any(ind >= tensor_shape[in_axis]):
out_of_range_index = next(
i for i in ind.flat if i >= tensor_shape[in_axis]
)
raise IndexError(
"IndexError: index {0} is out of bounds with size {1}".format(
out_of_range_index, tensor_shape[in_axis]
)
)
fancy_index_shapes.append(ind.shape)
in_axis += 1
if first_fancy_index:
out_axis += ind.ndim
elif isinstance(ind, slice):
if np.isnan(tensor_shape[in_axis]):
shape.append(np.nan)
else:
shape.append(calc_sliced_size(tensor_shape[in_axis], ind))
in_axis += 1
out_axis += 1
elif isinstance(ind, Integral):
size = tensor_shape[in_axis]
if not np.isnan(size) and ind >= size:
raise IndexError(
"index {0} is out of bounds for axis {1} with size {2}".format(
ind, in_axis, size
)
)
in_axis += 1
else:
assert ind is None
shape.append(1)
if fancy_index is not None:
try:
if any(np.isnan(np.prod(s)) for s in fancy_index_shapes):
fancy_index_shape = (np.nan,) * len(fancy_index_shapes[0])
else:
fancy_index_shape = broadcast_shape(*fancy_index_shapes)
shape = shape[:fancy_index] + list(fancy_index_shape) + shape[fancy_index:]
except ValueError:
raise IndexError(
"shape mismatch: indexing arrays could not be broadcast together "
"with shapes {0}".format(" ".join(str(s) for s in fancy_index_shapes))
)
return shape
|
https://github.com/mars-project/mars/issues/1433
|
In [3]: import pandas as pd
In [4]: data = pd.DataFrame(np.arange(20).reshape((4, 5)) + 1, columns=['a', 'b', 'c', 'd', 'e'])
In [6]: df = md.DataFrame(data)
In [7]: df.groupby(['a','b']).size().execute()
Unexpected exception occurred in enter_build_mode.<locals>.inner.
Traceback (most recent call last):
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 353, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 493, in inner
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/scheduler/graph.py", line 633, in prepare_graph
self._target_tileable_datas + fetch_tileables, tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 399, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 493, in inner
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 350, in build
tileables, tileable_graph=tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 399, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 493, in inner
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 263, in build
self._on_tile_failure(tileable_data.op, exc_info)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 302, in inner
raise exc_info[1].with_traceback(exc_info[2]) from None
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 243, in build
tiled = self._tile(tileable_data, tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 338, in _tile
return super()._tile(tileable_data, tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 203, in _tile
tds = on_tile(tileable_data.op.outputs, tds)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/scheduler/graph.py", line 615, in on_tile
return self.context.wraps(handler.dispatch)(first.op)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/context.py", line 69, in h
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 399, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 119, in dispatch
tiled = op_cls.tile(op)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/groupby/aggregation.py", line 481, in tile
return cls._tile_with_tree(op)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/groupby/aggregation.py", line 412, in _tile_with_tree
index = out_df.index_value.to_pandas()
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/core.py", line 283, in to_pandas
return self._index_value.to_pandas()
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/core.py", line 197, in to_pandas
sortorder=self._sortorder, names=self._names)
AttributeError: _sortorder
|
AttributeError
|
def process(self, index_info: IndexInfo, context: IndexHandlerContext) -> None:
tileable = context.tileable
input_axis = index_info.input_axis
is_first_bool_index = self._is_first_bool_index(context, index_info)
axes = list(range(input_axis, input_axis + index_info.raw_index.ndim))
cum_sizes = []
for axis in axes:
cum_sizes.append(np.cumsum((0,) + tileable.nsplits[axis]))
other_index_to_iter = dict()
for chunk_index, chunk_index_info in context.chunk_index_to_info.items():
slcs = []
for j, axis in enumerate(axes):
axis_index = chunk_index[axis]
slcs.append(slice(cum_sizes[j][axis_index], cum_sizes[j][axis_index + 1]))
other_index = chunk_index[: axes[0]] + chunk_index[axes[-1] + 1 :]
if other_index not in other_index_to_iter:
other_index_to_iter[other_index] = itertools.count()
index = index_info.raw_index[tuple(slcs)]
output_axis_index = next(other_index_to_iter[other_index])
# if more than 1 bool index, getitem will rewrite them into fancy
# but for now, setitem will keep them, thus we cannot record
# index or shape for this one
output_axis_index = None if not is_first_bool_index else output_axis_index
output_size = None if not is_first_bool_index else int(index.sum())
self.set_chunk_index_info(
context,
index_info,
chunk_index,
chunk_index_info,
output_axis_index,
index,
output_size,
)
|
def process(self, index_info: IndexInfo, context: IndexHandlerContext) -> None:
tileable = context.tileable
input_axis = index_info.input_axis
is_first_bool_index = self._is_first_bool_index(context, index_info)
axes = list(range(input_axis, input_axis + index_info.raw_index.ndim))
cum_sizes = []
for axis in axes:
cum_sizes.append(np.cumsum((0,) + tileable.nsplits[axis]))
other_index_to_iter = dict()
for chunk_index, chunk_index_info in context.chunk_index_to_info.items():
slcs = []
for j, axis in enumerate(axes):
axis_index = chunk_index[axis]
slcs.append(slice(cum_sizes[j][axis_index], cum_sizes[j][axis_index + 1]))
other_index = chunk_index[: axes[0]] + chunk_index[axes[-1] + 1 :]
if other_index not in other_index_to_iter:
other_index_to_iter[other_index] = itertools.count()
index = index_info.raw_index[tuple(slcs)]
output_axis_index = next(other_index_to_iter[other_index])
# if more than 1 bool index, getitem will rewrite them into fancy
# but for now, setitem will keep them, thus we cannot record
# index or shape for this one
output_axis_index = None if not is_first_bool_index else output_axis_index
output_size = None if not is_first_bool_index else index.sum()
self.set_chunk_index_info(
context,
index_info,
chunk_index,
chunk_index_info,
output_axis_index,
index,
output_size,
)
|
https://github.com/mars-project/mars/issues/1433
|
In [3]: import pandas as pd
In [4]: data = pd.DataFrame(np.arange(20).reshape((4, 5)) + 1, columns=['a', 'b', 'c', 'd', 'e'])
In [6]: df = md.DataFrame(data)
In [7]: df.groupby(['a','b']).size().execute()
Unexpected exception occurred in enter_build_mode.<locals>.inner.
Traceback (most recent call last):
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 353, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 493, in inner
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/scheduler/graph.py", line 633, in prepare_graph
self._target_tileable_datas + fetch_tileables, tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 399, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 493, in inner
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 350, in build
tileables, tileable_graph=tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 399, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 493, in inner
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 263, in build
self._on_tile_failure(tileable_data.op, exc_info)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 302, in inner
raise exc_info[1].with_traceback(exc_info[2]) from None
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 243, in build
tiled = self._tile(tileable_data, tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 338, in _tile
return super()._tile(tileable_data, tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 203, in _tile
tds = on_tile(tileable_data.op.outputs, tds)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/scheduler/graph.py", line 615, in on_tile
return self.context.wraps(handler.dispatch)(first.op)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/context.py", line 69, in h
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 399, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 119, in dispatch
tiled = op_cls.tile(op)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/groupby/aggregation.py", line 481, in tile
return cls._tile_with_tree(op)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/groupby/aggregation.py", line 412, in _tile_with_tree
index = out_df.index_value.to_pandas()
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/core.py", line 283, in to_pandas
return self._index_value.to_pandas()
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/core.py", line 197, in to_pandas
sortorder=self._sortorder, names=self._names)
AttributeError: _sortorder
|
AttributeError
|
def tile(cls, op):
from ..merge.concatenate import TensorConcatenate
from ..indexing.slice import TensorSlice
from .dot import TensorDot
from .qr import TensorQR
from .svd import TensorSVD
calc_svd = getattr(op, "_is_svd", lambda: None)() or False
a = op.input
tinyq, tinyr = np.linalg.qr(np.ones((1, 1), dtype=a.dtype))
q_dtype, r_dtype = tinyq.dtype, tinyr.dtype
if a.chunk_shape[1] != 1:
check_chunks_unknown_shape([a], TilesError)
new_chunk_size = decide_chunk_sizes(a.shape, {1: a.shape[1]}, a.dtype.itemsize)
a = a.rechunk(new_chunk_size)._inplace_tile()
# stage 1, map phase
stage1_q_chunks, stage1_r_chunks = stage1_chunks = [[], []] # Q and R chunks
for c in a.chunks:
x, y = c.shape
q_shape, r_shape = (c.shape, (y, y)) if x > y else ((x, x), c.shape)
qr_op = TensorQR()
qr_chunks = qr_op.new_chunks(
[c],
index=c.index,
kws=[
{"side": "q", "dtype": q_dtype, "shape": q_shape},
{"side": "r", "dtype": r_dtype, "shape": r_shape},
],
)
stage1_chunks[0].append(qr_chunks[0])
stage1_chunks[1].append(qr_chunks[1])
# stage 2, reduce phase
# concatenate all r chunks into one
shape = (sum(c.shape[0] for c in stage1_r_chunks), stage1_r_chunks[0].shape[1])
concat_op = TensorConcatenate(axis=0, dtype=stage1_r_chunks[0].dtype)
concat_r_chunk = concat_op.new_chunk(
stage1_r_chunks, shape=shape, index=(0, 0), order=TensorOrder.C_ORDER
)
qr_op = TensorQR()
qr_chunks = qr_op.new_chunks(
[concat_r_chunk],
index=concat_r_chunk.index,
kws=[
{
"side": "q",
"dtype": q_dtype,
"order": TensorOrder.C_ORDER,
"shape": (concat_r_chunk.shape[0], min(concat_r_chunk.shape)),
},
{
"side": "r",
"dtype": r_dtype,
"order": TensorOrder.C_ORDER,
"shape": (min(concat_r_chunk.shape), concat_r_chunk.shape[1]),
},
],
)
stage2_q_chunk, stage2_r_chunk = qr_chunks
# stage 3, map phase
# split stage2_q_chunk into the same size as stage1_q_chunks
q_splits = np.cumsum([c.shape[1] for c in stage1_q_chunks]).tolist()
q_slices = [
slice(q_splits[i]) if i == 0 else slice(q_splits[i - 1], q_splits[i])
for i in range(len(q_splits))
]
stage2_q_chunks = []
for c, s in zip(stage1_q_chunks, q_slices):
slice_op = TensorSlice(slices=[s], dtype=c.dtype)
slice_length = s.stop - (s.start or 0)
stage2_q_chunks.append(
slice_op.new_chunk(
[stage2_q_chunk],
index=c.index,
order=TensorOrder.C_ORDER,
shape=(slice_length, stage2_q_chunk.shape[1]),
)
)
stage3_q_chunks = []
for c1, c2 in zip(stage1_q_chunks, stage2_q_chunks):
dot_op = TensorDot(dtype=q_dtype)
shape = (c1.shape[0], c2.shape[1])
stage3_q_chunks.append(
dot_op.new_chunk(
[c1, c2], shape=shape, index=c1.index, order=TensorOrder.C_ORDER
)
)
if not calc_svd:
q, r = op.outputs
new_op = op.copy()
q_nsplits = (
tuple(c.shape[0] for c in stage3_q_chunks),
(stage3_q_chunks[0].shape[1],),
)
r_nsplits = ((stage2_r_chunk.shape[0],), (stage2_r_chunk.shape[1],))
kws = [
# Q
{
"chunks": stage3_q_chunks,
"nsplits": q_nsplits,
"dtype": q.dtype,
"shape": q.shape,
},
# R, calculate from stage2
{
"chunks": [stage2_r_chunk],
"nsplits": r_nsplits,
"dtype": r.dtype,
"shape": r.shape,
},
]
return new_op.new_tensors(op.inputs, kws=kws)
else:
U, s, V = op.outputs
U_dtype, s_dtype, V_dtype = U.dtype, s.dtype, V.dtype
U_shape, s_shape, V_shape = U.shape, s.shape, V.shape
svd_op = TensorSVD()
u_shape, s_shape, v_shape = calc_svd_shapes(stage2_r_chunk)
stage2_usv_chunks = svd_op.new_chunks(
[stage2_r_chunk],
kws=[
{
"side": "U",
"dtype": U_dtype,
"index": stage2_r_chunk.index,
"shape": u_shape,
"order": TensorOrder.C_ORDER,
},
{
"side": "s",
"dtype": s_dtype,
"index": stage2_r_chunk.index[1:],
"shape": s_shape,
"order": TensorOrder.C_ORDER,
},
{
"side": "V",
"dtype": V_dtype,
"index": stage2_r_chunk.index,
"shape": v_shape,
"order": TensorOrder.C_ORDER,
},
],
)
stage2_u_chunk, stage2_s_chunk, stage2_v_chunk = stage2_usv_chunks
# stage 4, U = Q @ u
stage4_u_chunks = []
if U is not None: # U is not garbage collected
for c1 in stage3_q_chunks:
dot_op = TensorDot(dtype=U_dtype)
shape = (c1.shape[0], stage2_u_chunk.shape[1])
stage4_u_chunks.append(
dot_op.new_chunk(
[c1, stage2_u_chunk],
shape=shape,
index=c1.index,
order=TensorOrder.C_ORDER,
)
)
new_op = op.copy()
u_nsplits = (
tuple(c.shape[0] for c in stage4_u_chunks),
(stage4_u_chunks[0].shape[1],),
)
s_nsplits = ((stage2_s_chunk.shape[0],),)
v_nsplits = ((stage2_v_chunk.shape[0],), (stage2_v_chunk.shape[1],))
kws = [
{
"chunks": stage4_u_chunks,
"nsplits": u_nsplits,
"dtype": U_dtype,
"shape": U_shape,
"order": U.order,
}, # U
{
"chunks": [stage2_s_chunk],
"nsplits": s_nsplits,
"dtype": s_dtype,
"shape": s_shape,
"order": s.order,
}, # s
{
"chunks": [stage2_v_chunk],
"nsplits": v_nsplits,
"dtype": V_dtype,
"shape": V_shape,
"order": V.order,
}, # V
]
return new_op.new_tensors(op.inputs, kws=kws)
|
def tile(cls, op):
from ..merge.concatenate import TensorConcatenate
from ..indexing.slice import TensorSlice
from .dot import TensorDot
from .qr import TensorQR
from .svd import TensorSVD
calc_svd = getattr(op, "_is_svd", lambda: None)() or False
a = op.input
tinyq, tinyr = np.linalg.qr(np.ones((1, 1), dtype=a.dtype))
q_dtype, r_dtype = tinyq.dtype, tinyr.dtype
if a.chunk_shape[1] != 1:
check_chunks_unknown_shape([a], TilesError)
new_chunk_size = decide_chunk_sizes(a.shape, {1: a.shape[1]}, a.dtype.itemsize)
a = a.rechunk(new_chunk_size)._inplace_tile()
# stage 1, map phase
stage1_q_chunks, stage1_r_chunks = stage1_chunks = [[], []] # Q and R chunks
for c in a.chunks:
x, y = c.shape
q_shape, r_shape = (c.shape, (y, y)) if x > y else ((x, x), c.shape)
qr_op = TensorQR()
qr_chunks = qr_op.new_chunks(
[c],
index=c.index,
kws=[
{"side": "q", "dtype": q_dtype, "shape": q_shape},
{"side": "r", "dtype": r_dtype, "shape": r_shape},
],
)
stage1_chunks[0].append(qr_chunks[0])
stage1_chunks[1].append(qr_chunks[1])
# stage 2, reduce phase
# concatenate all r chunks into one
shape = (sum(c.shape[0] for c in stage1_r_chunks), stage1_r_chunks[0].shape[1])
concat_op = TensorConcatenate(axis=0, dtype=stage1_r_chunks[0].dtype)
concat_r_chunk = concat_op.new_chunk(
stage1_r_chunks, shape=shape, index=(0, 0), order=TensorOrder.C_ORDER
)
qr_op = TensorQR()
qr_chunks = qr_op.new_chunks(
[concat_r_chunk],
index=concat_r_chunk.index,
kws=[
{
"side": "q",
"dtype": q_dtype,
"order": TensorOrder.C_ORDER,
"shape": (concat_r_chunk.shape[0], min(concat_r_chunk.shape)),
},
{
"side": "r",
"dtype": r_dtype,
"order": TensorOrder.C_ORDER,
"shape": (min(concat_r_chunk.shape), concat_r_chunk.shape[1]),
},
],
)
stage2_q_chunk, stage2_r_chunk = qr_chunks
# stage 3, map phase
# split stage2_q_chunk into the same size as stage1_q_chunks
q_splits = np.cumsum([c.shape[1] for c in stage1_q_chunks])
q_slices = [
slice(q_splits[i]) if i == 0 else slice(q_splits[i - 1], q_splits[i])
for i in range(len(q_splits))
]
stage2_q_chunks = []
for c, s in zip(stage1_q_chunks, q_slices):
slice_op = TensorSlice(slices=[s], dtype=c.dtype)
slice_length = s.stop - (s.start or 0)
stage2_q_chunks.append(
slice_op.new_chunk(
[stage2_q_chunk],
index=c.index,
order=TensorOrder.C_ORDER,
shape=(slice_length, stage2_q_chunk.shape[1]),
)
)
stage3_q_chunks = []
for c1, c2 in zip(stage1_q_chunks, stage2_q_chunks):
dot_op = TensorDot(dtype=q_dtype)
shape = (c1.shape[0], c2.shape[1])
stage3_q_chunks.append(
dot_op.new_chunk(
[c1, c2], shape=shape, index=c1.index, order=TensorOrder.C_ORDER
)
)
if not calc_svd:
q, r = op.outputs
new_op = op.copy()
q_nsplits = (
tuple(c.shape[0] for c in stage3_q_chunks),
(stage3_q_chunks[0].shape[1],),
)
r_nsplits = ((stage2_r_chunk.shape[0],), (stage2_r_chunk.shape[1],))
kws = [
# Q
{
"chunks": stage3_q_chunks,
"nsplits": q_nsplits,
"dtype": q.dtype,
"shape": q.shape,
},
# R, calculate from stage2
{
"chunks": [stage2_r_chunk],
"nsplits": r_nsplits,
"dtype": r.dtype,
"shape": r.shape,
},
]
return new_op.new_tensors(op.inputs, kws=kws)
else:
U, s, V = op.outputs
U_dtype, s_dtype, V_dtype = U.dtype, s.dtype, V.dtype
U_shape, s_shape, V_shape = U.shape, s.shape, V.shape
svd_op = TensorSVD()
u_shape, s_shape, v_shape = calc_svd_shapes(stage2_r_chunk)
stage2_usv_chunks = svd_op.new_chunks(
[stage2_r_chunk],
kws=[
{
"side": "U",
"dtype": U_dtype,
"index": stage2_r_chunk.index,
"shape": u_shape,
"order": TensorOrder.C_ORDER,
},
{
"side": "s",
"dtype": s_dtype,
"index": stage2_r_chunk.index[1:],
"shape": s_shape,
"order": TensorOrder.C_ORDER,
},
{
"side": "V",
"dtype": V_dtype,
"index": stage2_r_chunk.index,
"shape": v_shape,
"order": TensorOrder.C_ORDER,
},
],
)
stage2_u_chunk, stage2_s_chunk, stage2_v_chunk = stage2_usv_chunks
# stage 4, U = Q @ u
stage4_u_chunks = []
if U is not None: # U is not garbage collected
for c1 in stage3_q_chunks:
dot_op = TensorDot(dtype=U_dtype)
shape = (c1.shape[0], stage2_u_chunk.shape[1])
stage4_u_chunks.append(
dot_op.new_chunk(
[c1, stage2_u_chunk],
shape=shape,
index=c1.index,
order=TensorOrder.C_ORDER,
)
)
new_op = op.copy()
u_nsplits = (
tuple(c.shape[0] for c in stage4_u_chunks),
(stage4_u_chunks[0].shape[1],),
)
s_nsplits = ((stage2_s_chunk.shape[0],),)
v_nsplits = ((stage2_v_chunk.shape[0],), (stage2_v_chunk.shape[1],))
kws = [
{
"chunks": stage4_u_chunks,
"nsplits": u_nsplits,
"dtype": U_dtype,
"shape": U_shape,
"order": U.order,
}, # U
{
"chunks": [stage2_s_chunk],
"nsplits": s_nsplits,
"dtype": s_dtype,
"shape": s_shape,
"order": s.order,
}, # s
{
"chunks": [stage2_v_chunk],
"nsplits": v_nsplits,
"dtype": V_dtype,
"shape": V_shape,
"order": V.order,
}, # V
]
return new_op.new_tensors(op.inputs, kws=kws)
|
https://github.com/mars-project/mars/issues/1433
|
In [3]: import pandas as pd
In [4]: data = pd.DataFrame(np.arange(20).reshape((4, 5)) + 1, columns=['a', 'b', 'c', 'd', 'e'])
In [6]: df = md.DataFrame(data)
In [7]: df.groupby(['a','b']).size().execute()
Unexpected exception occurred in enter_build_mode.<locals>.inner.
Traceback (most recent call last):
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 353, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 493, in inner
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/scheduler/graph.py", line 633, in prepare_graph
self._target_tileable_datas + fetch_tileables, tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 399, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 493, in inner
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 350, in build
tileables, tileable_graph=tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 399, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 493, in inner
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 263, in build
self._on_tile_failure(tileable_data.op, exc_info)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 302, in inner
raise exc_info[1].with_traceback(exc_info[2]) from None
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 243, in build
tiled = self._tile(tileable_data, tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 338, in _tile
return super()._tile(tileable_data, tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 203, in _tile
tds = on_tile(tileable_data.op.outputs, tds)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/scheduler/graph.py", line 615, in on_tile
return self.context.wraps(handler.dispatch)(first.op)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/context.py", line 69, in h
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 399, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 119, in dispatch
tiled = op_cls.tile(op)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/groupby/aggregation.py", line 481, in tile
return cls._tile_with_tree(op)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/groupby/aggregation.py", line 412, in _tile_with_tree
index = out_df.index_value.to_pandas()
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/core.py", line 283, in to_pandas
return self._index_value.to_pandas()
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/core.py", line 197, in to_pandas
sortorder=self._sortorder, names=self._names)
AttributeError: _sortorder
|
AttributeError
|
def _handle_size(cls, size):
if size is None:
return size
try:
return tuple(int(s) for s in size)
except TypeError:
return (size,)
|
def _handle_size(cls, size):
if size is None:
return size
try:
return tuple(size)
except TypeError:
return (size,)
|
https://github.com/mars-project/mars/issues/1433
|
In [3]: import pandas as pd
In [4]: data = pd.DataFrame(np.arange(20).reshape((4, 5)) + 1, columns=['a', 'b', 'c', 'd', 'e'])
In [6]: df = md.DataFrame(data)
In [7]: df.groupby(['a','b']).size().execute()
Unexpected exception occurred in enter_build_mode.<locals>.inner.
Traceback (most recent call last):
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 353, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 493, in inner
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/scheduler/graph.py", line 633, in prepare_graph
self._target_tileable_datas + fetch_tileables, tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 399, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 493, in inner
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 350, in build
tileables, tileable_graph=tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 399, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 493, in inner
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 263, in build
self._on_tile_failure(tileable_data.op, exc_info)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 302, in inner
raise exc_info[1].with_traceback(exc_info[2]) from None
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 243, in build
tiled = self._tile(tileable_data, tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 338, in _tile
return super()._tile(tileable_data, tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 203, in _tile
tds = on_tile(tileable_data.op.outputs, tds)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/scheduler/graph.py", line 615, in on_tile
return self.context.wraps(handler.dispatch)(first.op)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/context.py", line 69, in h
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 399, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 119, in dispatch
tiled = op_cls.tile(op)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/groupby/aggregation.py", line 481, in tile
return cls._tile_with_tree(op)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/groupby/aggregation.py", line 412, in _tile_with_tree
index = out_df.index_value.to_pandas()
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/core.py", line 283, in to_pandas
return self._index_value.to_pandas()
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/core.py", line 197, in to_pandas
sortorder=self._sortorder, names=self._names)
AttributeError: _sortorder
|
AttributeError
|
def tile(cls, op):
tensor = op.outputs[0]
chunk_size = tensor.extra_params.raw_chunk_size or options.chunk_size
nsplits = decide_chunk_sizes(tensor.shape, chunk_size, tensor.dtype.itemsize)
fields = getattr(op, "_input_fields_", [])
to_one_chunk_fields = set(getattr(op, "_into_one_chunk_fields_", list()))
new_inputs = []
changed = False
for field in fields:
t = getattr(op, field)
if not isinstance(t, TENSOR_TYPE):
continue
if field not in to_one_chunk_fields:
t_nsplits = nsplits
else:
t_nsplits = t.shape # into 1 chunk
rechunked = t.rechunk(t_nsplits)
if rechunked is not t:
rechunked._inplace_tile()
changed = True
new_inputs.append(rechunked)
else:
new_inputs.append(t)
if changed:
op.inputs = new_inputs
idxes = list(itertools.product(*[range(len(s)) for s in nsplits]))
seeds = gen_random_seeds(len(idxes), op.state)
out_chunks = []
for seed, idx, shape in zip(seeds, idxes, itertools.product(*nsplits)):
inputs = []
for inp in op.inputs:
if len(inp.chunks) == 1:
inputs.append(inp.chunks[0])
else:
inputs.append(inp.cix[idx])
try:
s = len(tuple(op.size))
size = shape[:s]
except TypeError:
if op.size is None:
size = None
else:
size = shape[:1]
except AttributeError:
size = shape
chunk_op = op.copy().reset_key()
chunk_op._seed = int(seed)
chunk_op._state = None
chunk_op._size = size
out_chunk = chunk_op.new_chunk(
inputs, shape=shape, index=idx, order=tensor.order
)
out_chunks.append(out_chunk)
new_op = op.copy()
return new_op.new_tensors(
op.inputs,
tensor.shape,
order=tensor.order,
chunks=out_chunks,
nsplits=nsplits,
**tensor.extra_params,
)
|
def tile(cls, op):
tensor = op.outputs[0]
chunk_size = tensor.extra_params.raw_chunk_size or options.chunk_size
nsplits = decide_chunk_sizes(tensor.shape, chunk_size, tensor.dtype.itemsize)
fields = getattr(op, "_input_fields_", [])
to_one_chunk_fields = set(getattr(op, "_into_one_chunk_fields_", list()))
new_inputs = []
changed = False
for field in fields:
t = getattr(op, field)
if not isinstance(t, TENSOR_TYPE):
continue
if field not in to_one_chunk_fields:
t_nsplits = nsplits
else:
t_nsplits = t.shape # into 1 chunk
rechunked = t.rechunk(t_nsplits)
if rechunked is not t:
rechunked._inplace_tile()
changed = True
new_inputs.append(rechunked)
else:
new_inputs.append(t)
if changed:
op.inputs = new_inputs
idxes = list(itertools.product(*[range(len(s)) for s in nsplits]))
seeds = gen_random_seeds(len(idxes), op.state)
out_chunks = []
for seed, idx, shape in zip(seeds, idxes, itertools.product(*nsplits)):
inputs = []
for inp in op.inputs:
if len(inp.chunks) == 1:
inputs.append(inp.chunks[0])
else:
inputs.append(inp.cix[idx])
try:
s = len(tuple(op.size))
size = shape[:s]
except TypeError:
if op.size is None:
size = None
else:
size = shape[:1]
except AttributeError:
size = shape
chunk_op = op.copy().reset_key()
chunk_op._seed = seed
chunk_op._state = None
chunk_op._size = size
out_chunk = chunk_op.new_chunk(
inputs, shape=shape, index=idx, order=tensor.order
)
out_chunks.append(out_chunk)
new_op = op.copy()
return new_op.new_tensors(
op.inputs,
tensor.shape,
order=tensor.order,
chunks=out_chunks,
nsplits=nsplits,
**tensor.extra_params,
)
|
https://github.com/mars-project/mars/issues/1433
|
In [3]: import pandas as pd
In [4]: data = pd.DataFrame(np.arange(20).reshape((4, 5)) + 1, columns=['a', 'b', 'c', 'd', 'e'])
In [6]: df = md.DataFrame(data)
In [7]: df.groupby(['a','b']).size().execute()
Unexpected exception occurred in enter_build_mode.<locals>.inner.
Traceback (most recent call last):
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 353, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 493, in inner
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/scheduler/graph.py", line 633, in prepare_graph
self._target_tileable_datas + fetch_tileables, tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 399, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 493, in inner
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 350, in build
tileables, tileable_graph=tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 399, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 493, in inner
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 263, in build
self._on_tile_failure(tileable_data.op, exc_info)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 302, in inner
raise exc_info[1].with_traceback(exc_info[2]) from None
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 243, in build
tiled = self._tile(tileable_data, tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 338, in _tile
return super()._tile(tileable_data, tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 203, in _tile
tds = on_tile(tileable_data.op.outputs, tds)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/scheduler/graph.py", line 615, in on_tile
return self.context.wraps(handler.dispatch)(first.op)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/context.py", line 69, in h
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 399, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 119, in dispatch
tiled = op_cls.tile(op)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/groupby/aggregation.py", line 481, in tile
return cls._tile_with_tree(op)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/groupby/aggregation.py", line 412, in _tile_with_tree
index = out_df.index_value.to_pandas()
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/core.py", line 283, in to_pandas
return self._index_value.to_pandas()
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/core.py", line 197, in to_pandas
sortorder=self._sortorder, names=self._names)
AttributeError: _sortorder
|
AttributeError
|
def _gen_reshape_rechunk_nsplits(old_shape, new_shape, nsplits):
old_idx = len(old_shape) - 1
new_idx = len(new_shape) - 1
rechunk_nsplists = [None for _ in old_shape]
reshape_nsplists = [None for _ in new_shape]
while old_idx >= 0 or new_idx >= 0:
old_dim_size = old_shape[old_idx]
new_dim_size = new_shape[new_idx]
if old_dim_size == new_dim_size:
# nothing need to do
rechunk_nsplists[old_idx] = nsplits[old_idx]
reshape_nsplists[new_idx] = nsplits[old_idx]
old_idx -= 1
new_idx -= 1
continue
if old_dim_size == 1:
rechunk_nsplists[old_idx] = (1,)
old_idx -= 1
elif new_dim_size == 1:
reshape_nsplists[new_idx] = (1,)
new_idx -= 1
elif old_dim_size < new_dim_size:
left_old_idx = old_idx - 1
while (
left_old_idx >= 0
and np.prod(old_shape[left_old_idx : old_idx + 1]) < new_dim_size
):
left_old_idx -= 1
if np.prod(old_shape[left_old_idx : old_idx + 1]) != new_dim_size:
raise ValueError("shapes not compatible")
for i in range(left_old_idx + 1, old_idx + 1):
# rechunk the higher dimension into 1 chunk
# e.g. ((2, 2, 2), [(3, 3), (4, 4))] -> [6, 8]
rechunk_nsplists[i] = (old_shape[i],)
chunk_reduce = np.prod(
[len(c) for c in nsplits[left_old_idx + 1 : old_idx + 1]]
).item()
# cause the higher dimension has been concatenated,
# the lowest dimension should be expanded to reduce size
rechunk_nsplists[left_old_idx] = TensorReshape._expand_nsplit_by_reduce(
nsplits[left_old_idx], chunk_reduce
)
size_reduce = np.prod(old_shape[left_old_idx + 1 : old_idx + 1]).item()
reshape_nsplists[new_idx] = tuple(
size_reduce * c for c in rechunk_nsplists[left_old_idx]
)
old_idx = left_old_idx - 1
new_idx -= 1
else:
assert old_dim_size > new_dim_size
lef_new_idx = new_idx - 1
while (
lef_new_idx >= 0
and np.prod(new_shape[lef_new_idx : new_idx + 1]) < old_dim_size
):
lef_new_idx -= 1
if np.prod(new_shape[lef_new_idx : new_idx + 1]) != old_dim_size:
raise ValueError("shapes not compatible")
chunk_expand = np.prod(new_shape[lef_new_idx + 1 : new_idx + 1]).item()
rechunk_nsplists[old_idx] = TensorReshape._reduce_nsplit_by_expand(
nsplits[old_idx], chunk_expand
)
for i in range(lef_new_idx + 1, new_idx + 1):
reshape_nsplists[i] = (new_shape[i],)
reshape_nsplists[lef_new_idx] = tuple(
c // chunk_expand for c in rechunk_nsplists[old_idx]
)
old_idx -= 1
new_idx = lef_new_idx - 1
assert np.prod([len(s) for s in rechunk_nsplists]) == np.prod(
[len(s) for s in reshape_nsplists]
)
return rechunk_nsplists, reshape_nsplists
|
def _gen_reshape_rechunk_nsplits(old_shape, new_shape, nsplits):
old_idx = len(old_shape) - 1
new_idx = len(new_shape) - 1
rechunk_nsplists = [None for _ in old_shape]
reshape_nsplists = [None for _ in new_shape]
while old_idx >= 0 or new_idx >= 0:
old_dim_size = old_shape[old_idx]
new_dim_size = new_shape[new_idx]
if old_dim_size == new_dim_size:
# nothing need to do
rechunk_nsplists[old_idx] = nsplits[old_idx]
reshape_nsplists[new_idx] = nsplits[old_idx]
old_idx -= 1
new_idx -= 1
continue
if old_dim_size == 1:
rechunk_nsplists[old_idx] = (1,)
old_idx -= 1
elif new_dim_size == 1:
reshape_nsplists[new_idx] = (1,)
new_idx -= 1
elif old_dim_size < new_dim_size:
left_old_idx = old_idx - 1
while (
left_old_idx >= 0
and np.prod(old_shape[left_old_idx : old_idx + 1]) < new_dim_size
):
left_old_idx -= 1
if np.prod(old_shape[left_old_idx : old_idx + 1]) != new_dim_size:
raise ValueError("shapes not compatible")
for i in range(left_old_idx + 1, old_idx + 1):
# rechunk the higher dimension into 1 chunk
# e.g. ((2, 2, 2), [(3, 3), (4, 4))] -> [6, 8]
rechunk_nsplists[i] = (old_shape[i],)
chunk_reduce = np.prod(
[len(c) for c in nsplits[left_old_idx + 1 : old_idx + 1]]
)
# cause the higher dimension has been concatenated,
# the lowest dimension should be expanded to reduce size
rechunk_nsplists[left_old_idx] = TensorReshape._expand_nsplit_by_reduce(
nsplits[left_old_idx], chunk_reduce
)
size_reduce = np.prod(old_shape[left_old_idx + 1 : old_idx + 1])
reshape_nsplists[new_idx] = tuple(
size_reduce * c for c in rechunk_nsplists[left_old_idx]
)
old_idx = left_old_idx - 1
new_idx -= 1
else:
assert old_dim_size > new_dim_size
lef_new_idx = new_idx - 1
while (
lef_new_idx >= 0
and np.prod(new_shape[lef_new_idx : new_idx + 1]) < old_dim_size
):
lef_new_idx -= 1
if np.prod(new_shape[lef_new_idx : new_idx + 1]) != old_dim_size:
raise ValueError("shapes not compatible")
chunk_expand = np.prod(new_shape[lef_new_idx + 1 : new_idx + 1])
rechunk_nsplists[old_idx] = TensorReshape._reduce_nsplit_by_expand(
nsplits[old_idx], chunk_expand
)
for i in range(lef_new_idx + 1, new_idx + 1):
reshape_nsplists[i] = (new_shape[i],)
reshape_nsplists[lef_new_idx] = tuple(
c // chunk_expand for c in rechunk_nsplists[old_idx]
)
old_idx -= 1
new_idx = lef_new_idx - 1
assert np.prod([len(s) for s in rechunk_nsplists]) == np.prod(
[len(s) for s in reshape_nsplists]
)
return rechunk_nsplists, reshape_nsplists
|
https://github.com/mars-project/mars/issues/1433
|
In [3]: import pandas as pd
In [4]: data = pd.DataFrame(np.arange(20).reshape((4, 5)) + 1, columns=['a', 'b', 'c', 'd', 'e'])
In [6]: df = md.DataFrame(data)
In [7]: df.groupby(['a','b']).size().execute()
Unexpected exception occurred in enter_build_mode.<locals>.inner.
Traceback (most recent call last):
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 353, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 493, in inner
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/scheduler/graph.py", line 633, in prepare_graph
self._target_tileable_datas + fetch_tileables, tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 399, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 493, in inner
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 350, in build
tileables, tileable_graph=tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 399, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 493, in inner
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 263, in build
self._on_tile_failure(tileable_data.op, exc_info)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 302, in inner
raise exc_info[1].with_traceback(exc_info[2]) from None
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 243, in build
tiled = self._tile(tileable_data, tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 338, in _tile
return super()._tile(tileable_data, tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 203, in _tile
tds = on_tile(tileable_data.op.outputs, tds)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/scheduler/graph.py", line 615, in on_tile
return self.context.wraps(handler.dispatch)(first.op)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/context.py", line 69, in h
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 399, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 119, in dispatch
tiled = op_cls.tile(op)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/groupby/aggregation.py", line 481, in tile
return cls._tile_with_tree(op)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/groupby/aggregation.py", line 412, in _tile_with_tree
index = out_df.index_value.to_pandas()
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/core.py", line 283, in to_pandas
return self._index_value.to_pandas()
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/core.py", line 197, in to_pandas
sortorder=self._sortorder, names=self._names)
AttributeError: _sortorder
|
AttributeError
|
def _tile_chunks(cls, op, in_tensor, w, v, vi):
out_tensor = op.outputs[0]
extra_inputs = []
for val in [w, v, vi]:
if val is not None:
extra_inputs.append(val.chunks[0])
n = in_tensor.shape[0]
aggregate_size = op.aggregate_size
if aggregate_size is None:
aggregate_size = (
np.ceil(
out_tensor.size * out_tensor.dtype.itemsize / options.chunk_store_limit
)
.astype(int)
.item()
)
out_sizes = [out_tensor.size // aggregate_size for _ in range(aggregate_size)]
for i in range(out_tensor.size % aggregate_size):
out_sizes[i] += 1
chunk_size = in_tensor.chunk_shape[0]
map_chunks = []
axis_0_cum_size = np.cumsum(in_tensor.nsplits[0]).tolist()
for i in range(chunk_size):
for j in range(i, chunk_size):
kw = {
"stage": OperandStage.map,
"a": in_tensor.cix[i, 0],
"a_offset": axis_0_cum_size[i - 1] if i > 0 else 0,
"out_sizes": tuple(out_sizes),
"n": n,
"metric": op.metric,
"p": op.p,
"w": w.chunks[0] if w is not None else None,
"v": v.chunks[0] if v is not None else None,
"vi": vi.chunks[0] if vi is not None else None,
"dtype": out_tensor.dtype,
}
if i != j:
kw["b"] = in_tensor.cix[j, 0]
kw["b_offset"] = axis_0_cum_size[j - 1] if j > 0 else 0
map_op = TensorPdist(**kw)
map_chunk_inputs = [kw["a"]]
if "b" in kw:
map_chunk_inputs.append(kw["b"])
if kw["w"] is not None:
map_chunk_inputs.append(kw["w"])
if kw["v"] is not None:
map_chunk_inputs.append(kw["v"])
if kw["vi"] is not None:
map_chunk_inputs.append(kw["vi"])
# calc chunk shape
if i == j:
a_axis_0_size = kw["a"].shape[0]
chunk_shape = (a_axis_0_size * (a_axis_0_size - 1) // 2,)
else:
chunk_shape = (kw["a"].shape[0] * kw["b"].shape[0],)
map_chunk = map_op.new_chunk(
map_chunk_inputs,
shape=chunk_shape,
order=out_tensor.order,
index=(i * chunk_size + j,),
)
map_chunks.append(map_chunk)
proxy_chunk = TensorShuffleProxy(dtype=out_tensor.dtype).new_chunk(
map_chunks, shape=()
)
reduce_chunks = []
for p in range(aggregate_size):
reduce_chunk_op = TensorPdist(
stage=OperandStage.reduce, shuffle_key=str(p), dtype=out_tensor.dtype
)
reduce_chunk = reduce_chunk_op.new_chunk(
[proxy_chunk], shape=(out_sizes[p],), order=out_tensor.order, index=(p,)
)
reduce_chunks.append(reduce_chunk)
new_op = op.copy()
return new_op.new_tensors(
op.inputs,
shape=out_tensor.shape,
order=out_tensor.order,
nsplits=(tuple(out_sizes),),
chunks=reduce_chunks,
)
|
def _tile_chunks(cls, op, in_tensor, w, v, vi):
out_tensor = op.outputs[0]
extra_inputs = []
for val in [w, v, vi]:
if val is not None:
extra_inputs.append(val.chunks[0])
n = in_tensor.shape[0]
aggregate_size = op.aggregate_size
if aggregate_size is None:
aggregate_size = (
np.ceil(
out_tensor.size * out_tensor.dtype.itemsize / options.chunk_store_limit
)
.astype(int)
.item()
)
out_sizes = [out_tensor.size // aggregate_size for _ in range(aggregate_size)]
for i in range(out_tensor.size % aggregate_size):
out_sizes[i] += 1
chunk_size = in_tensor.chunk_shape[0]
map_chunks = []
axis_0_cum_size = np.cumsum(in_tensor.nsplits[0])
for i in range(chunk_size):
for j in range(i, chunk_size):
kw = {
"stage": OperandStage.map,
"a": in_tensor.cix[i, 0],
"a_offset": axis_0_cum_size[i - 1] if i > 0 else 0,
"out_sizes": tuple(out_sizes),
"n": n,
"metric": op.metric,
"p": op.p,
"w": w.chunks[0] if w is not None else None,
"v": v.chunks[0] if v is not None else None,
"vi": vi.chunks[0] if vi is not None else None,
"dtype": out_tensor.dtype,
}
if i != j:
kw["b"] = in_tensor.cix[j, 0]
kw["b_offset"] = axis_0_cum_size[j - 1] if j > 0 else 0
map_op = TensorPdist(**kw)
map_chunk_inputs = [kw["a"]]
if "b" in kw:
map_chunk_inputs.append(kw["b"])
if kw["w"] is not None:
map_chunk_inputs.append(kw["w"])
if kw["v"] is not None:
map_chunk_inputs.append(kw["v"])
if kw["vi"] is not None:
map_chunk_inputs.append(kw["vi"])
# calc chunk shape
if i == j:
a_axis_0_size = kw["a"].shape[0]
chunk_shape = (a_axis_0_size * (a_axis_0_size - 1) // 2,)
else:
chunk_shape = (kw["a"].shape[0] * kw["b"].shape[0],)
map_chunk = map_op.new_chunk(
map_chunk_inputs,
shape=chunk_shape,
order=out_tensor.order,
index=(i * chunk_size + j,),
)
map_chunks.append(map_chunk)
proxy_chunk = TensorShuffleProxy(dtype=out_tensor.dtype).new_chunk(
map_chunks, shape=()
)
reduce_chunks = []
for p in range(aggregate_size):
reduce_chunk_op = TensorPdist(
stage=OperandStage.reduce, shuffle_key=str(p), dtype=out_tensor.dtype
)
reduce_chunk = reduce_chunk_op.new_chunk(
[proxy_chunk], shape=(out_sizes[p],), order=out_tensor.order, index=(p,)
)
reduce_chunks.append(reduce_chunk)
new_op = op.copy()
return new_op.new_tensors(
op.inputs,
shape=out_tensor.shape,
order=out_tensor.order,
nsplits=(tuple(out_sizes),),
chunks=reduce_chunks,
)
|
https://github.com/mars-project/mars/issues/1433
|
In [3]: import pandas as pd
In [4]: data = pd.DataFrame(np.arange(20).reshape((4, 5)) + 1, columns=['a', 'b', 'c', 'd', 'e'])
In [6]: df = md.DataFrame(data)
In [7]: df.groupby(['a','b']).size().execute()
Unexpected exception occurred in enter_build_mode.<locals>.inner.
Traceback (most recent call last):
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 353, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 493, in inner
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/scheduler/graph.py", line 633, in prepare_graph
self._target_tileable_datas + fetch_tileables, tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 399, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 493, in inner
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 350, in build
tileables, tileable_graph=tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 399, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 493, in inner
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 263, in build
self._on_tile_failure(tileable_data.op, exc_info)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 302, in inner
raise exc_info[1].with_traceback(exc_info[2]) from None
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 243, in build
tiled = self._tile(tileable_data, tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 338, in _tile
return super()._tile(tileable_data, tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 203, in _tile
tds = on_tile(tileable_data.op.outputs, tds)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/scheduler/graph.py", line 615, in on_tile
return self.context.wraps(handler.dispatch)(first.op)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/context.py", line 69, in h
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 399, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 119, in dispatch
tiled = op_cls.tile(op)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/groupby/aggregation.py", line 481, in tile
return cls._tile_with_tree(op)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/groupby/aggregation.py", line 412, in _tile_with_tree
index = out_df.index_value.to_pandas()
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/core.py", line 283, in to_pandas
return self._index_value.to_pandas()
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/core.py", line 197, in to_pandas
sortorder=self._sortorder, names=self._names)
AttributeError: _sortorder
|
AttributeError
|
def gen_random_seeds(n, random_state):
assert isinstance(random_state, np.random.RandomState)
return tuple(np.frombuffer(random_state.bytes(n * 4), dtype=np.uint32).tolist())
|
def gen_random_seeds(n, random_state):
assert isinstance(random_state, np.random.RandomState)
return np.frombuffer(random_state.bytes(n * 4), dtype=np.uint32)
|
https://github.com/mars-project/mars/issues/1433
|
In [3]: import pandas as pd
In [4]: data = pd.DataFrame(np.arange(20).reshape((4, 5)) + 1, columns=['a', 'b', 'c', 'd', 'e'])
In [6]: df = md.DataFrame(data)
In [7]: df.groupby(['a','b']).size().execute()
Unexpected exception occurred in enter_build_mode.<locals>.inner.
Traceback (most recent call last):
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 353, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 493, in inner
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/scheduler/graph.py", line 633, in prepare_graph
self._target_tileable_datas + fetch_tileables, tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 399, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 493, in inner
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 350, in build
tileables, tileable_graph=tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 399, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 493, in inner
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 263, in build
self._on_tile_failure(tileable_data.op, exc_info)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 302, in inner
raise exc_info[1].with_traceback(exc_info[2]) from None
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 243, in build
tiled = self._tile(tileable_data, tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 338, in _tile
return super()._tile(tileable_data, tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 203, in _tile
tds = on_tile(tileable_data.op.outputs, tds)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/scheduler/graph.py", line 615, in on_tile
return self.context.wraps(handler.dispatch)(first.op)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/context.py", line 69, in h
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 399, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 119, in dispatch
tiled = op_cls.tile(op)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/groupby/aggregation.py", line 481, in tile
return cls._tile_with_tree(op)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/groupby/aggregation.py", line 412, in _tile_with_tree
index = out_df.index_value.to_pandas()
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/core.py", line 283, in to_pandas
return self._index_value.to_pandas()
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/dataframe/core.py", line 197, in to_pandas
sortorder=self._sortorder, names=self._names)
AttributeError: _sortorder
|
AttributeError
|
def _infer_df_func_returns(self, in_dtypes, dtypes):
if self.output_types[0] == OutputType.dataframe:
empty_df = build_empty_df(in_dtypes, index=pd.RangeIndex(2))
try:
with np.errstate(all="ignore"):
if self.call_agg:
infer_df = empty_df.agg(
self._func, axis=self._axis, *self.args, **self.kwds
)
else:
infer_df = empty_df.transform(
self._func, axis=self._axis, *self.args, **self.kwds
)
except: # noqa: E722
infer_df = None
else:
empty_df = build_empty_series(
in_dtypes[1], index=pd.RangeIndex(2), name=in_dtypes[0]
)
try:
with np.errstate(all="ignore"):
if self.call_agg:
infer_df = empty_df.agg(self._func, args=self.args, **self.kwds)
else:
infer_df = empty_df.transform(
self._func,
convert_dtype=self.convert_dtype,
args=self.args,
**self.kwds,
)
except: # noqa: E722
infer_df = None
if infer_df is None and dtypes is None:
raise TypeError("Failed to infer dtype, please specify dtypes as arguments.")
if infer_df is None:
is_df = self.output_types[0] == OutputType.dataframe
else:
is_df = isinstance(infer_df, pd.DataFrame)
if is_df:
new_dtypes = dtypes or infer_df.dtypes
self.output_types = [OutputType.dataframe]
else:
new_dtypes = dtypes or (infer_df.name, infer_df.dtype)
self.output_types = [OutputType.series]
return new_dtypes
|
def _infer_df_func_returns(self, in_dtypes, dtypes):
if self.output_types[0] == OutputType.dataframe:
empty_df = build_empty_df(in_dtypes, index=pd.RangeIndex(2))
with np.errstate(all="ignore"):
if self.call_agg:
infer_df = empty_df.agg(
self._func, axis=self._axis, *self.args, **self.kwds
)
else:
infer_df = empty_df.transform(
self._func, axis=self._axis, *self.args, **self.kwds
)
else:
empty_df = build_empty_series(
in_dtypes[1], index=pd.RangeIndex(2), name=in_dtypes[0]
)
with np.errstate(all="ignore"):
if self.call_agg:
infer_df = empty_df.agg(self._func, args=self.args, **self.kwds)
else:
infer_df = empty_df.transform(
self._func,
convert_dtype=self.convert_dtype,
args=self.args,
**self.kwds,
)
if isinstance(infer_df, pd.DataFrame):
new_dtypes = dtypes or infer_df.dtypes
self.output_types = [OutputType.dataframe]
else:
new_dtypes = dtypes or (infer_df.name, infer_df.dtype)
self.output_types = [OutputType.series]
return new_dtypes
|
https://github.com/mars-project/mars/issues/1423
|
In [1]: import pandas as pd
...: import mars.dataframe as md
...: mdf = md.Series(pd.Series(list('abc')))
...: mdf.transform(lambda x: x + 's').execute()
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in aggregate(self, func, axis, *args, **kwargs)
3704 try:
-> 3705 result = self.apply(func, *args, **kwargs)
3706 except (ValueError, AttributeError, TypeError):
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in apply(self, func, convert_dtype, args, **kwds)
3847 values = self.astype(object).values
-> 3848 mapped = lib.map_infer(values, f, convert=convert_dtype)
3849
pandas/_libs/lib.pyx in pandas._libs.lib.map_infer()
<ipython-input-1-13040ef89e14> in <lambda>(x)
3 mdf = md.Series(pd.Series(list('abc')))
----> 4 mdf.transform(lambda x: x + 's').execute()
TypeError: unsupported operand type(s) for +: 'float' and 'str'
During handling of the above exception, another exception occurred:
TypeError Traceback (most recent call last)
<ipython-input-1-13040ef89e14> in <module>
2 import mars.dataframe as md
3 mdf = md.Series(pd.Series(list('abc')))
----> 4 mdf.transform(lambda x: x + 's').execute()
~/Documents/mars_dev/mars/mars/dataframe/base/transform.py in series_transform(series, func, convert_dtype, axis, dtype, *args, **kwargs)
246 output_types=[OutputType.series], call_agg=kwargs.pop('_call_agg', False))
247 dtypes = (series.name, dtype) if dtype is not None else None
--> 248 return op(series, dtypes=dtypes)
~/Documents/mars_dev/mars/mars/dataframe/base/transform.py in __call__(self, df, dtypes, index)
203 dtypes = self._infer_df_func_returns(df.dtypes, dtypes)
204 else:
--> 205 dtypes = self._infer_df_func_returns((df.name, df.dtype), dtypes)
206
207 for arg, desc in zip((self.output_types, dtypes), ('output_types', 'dtypes')):
~/Documents/mars_dev/mars/mars/dataframe/base/transform.py in _infer_df_func_returns(self, in_dtypes, dtypes)
185 else:
186 infer_df = empty_df.transform(self._func, convert_dtype=self.convert_dtype,
--> 187 args=self.args, **self.kwds)
188
189 if isinstance(infer_df, pd.DataFrame):
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in transform(self, func, axis, *args, **kwargs)
3715 # Validate the axis parameter
3716 self._get_axis_number(axis)
-> 3717 return super().transform(func, *args, **kwargs)
3718
3719 def apply(self, func, convert_dtype=True, args=(), **kwds):
~/miniconda3/lib/python3.7/site-packages/pandas/core/generic.py in transform(self, func, *args, **kwargs)
10423 @Appender(_shared_docs["transform"] % dict(axis="", **_shared_doc_kwargs))
10424 def transform(self, func, *args, **kwargs):
10425 result = self.agg(func, *args, **kwargs)
10426 if is_scalar(result) or len(result) != len(self):
10427 raise ValueError("transforms cannot produce aggregated results")
~/miniconda3/lib/python3.7/site-packages/pandas/core/series.py in aggregate(self, func, axis, *args, **kwargs)
3705 result = self.apply(func, *args, **kwargs)
3706 except (ValueError, AttributeError, TypeError):
-> 3707 result = func(self, *args, **kwargs)
3708
3709 return result
TypeError: <lambda>() got an unexpected keyword argument 'convert_dtype'
|
TypeError
|
def get_chunk_metas(self, chunk_keys, filter_fields=None):
metas = []
for chunk_key in chunk_keys:
chunk_data = self.get(chunk_key)
if chunk_data is None:
metas.append(None)
continue
if hasattr(chunk_data, "nbytes"):
# ndarray
size = chunk_data.nbytes
shape = chunk_data.shape
elif hasattr(chunk_data, "memory_usage"):
# DataFrame
size = chunk_data.memory_usage(deep=True).sum()
shape = chunk_data.shape
else:
# other
size = sys.getsizeof(chunk_data)
shape = ()
metas.append(ChunkMeta(chunk_size=size, chunk_shape=shape, workers=None))
selected_metas = []
for chunk_meta in metas:
if filter_fields is not None:
chunk_meta = [getattr(chunk_meta, field) for field in filter_fields]
selected_metas.append(chunk_meta)
return selected_metas
|
def get_chunk_metas(self, chunk_keys, filter_fields=None):
if filter_fields is not None: # pragma: no cover
raise NotImplementedError("Local context doesn't support filter fields now")
metas = []
for chunk_key in chunk_keys:
chunk_data = self.get(chunk_key)
if chunk_data is None:
metas.append(None)
continue
if hasattr(chunk_data, "nbytes"):
# ndarray
size = chunk_data.nbytes
shape = chunk_data.shape
elif hasattr(chunk_data, "memory_usage"):
# DataFrame
size = chunk_data.memory_usage(deep=True).sum()
shape = chunk_data.shape
else:
# other
size = sys.getsizeof(chunk_data)
shape = ()
metas.append(ChunkMeta(chunk_size=size, chunk_shape=shape, workers=None))
return metas
|
https://github.com/mars-project/mars/issues/1413
|
In [25]: merge_df = parsing_df.append(pFold_4_df)
In [26]: mDf_g = merge_df.groupby(["query","template"])
In [27]: mDf_g.execute()
Out[27]: ---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
~/anaconda3/envs/py37/lib/python3.7/site-packages/IPython/core/formatters.py in __call__(self, obj)
700 type_pprinters=self.type_printers,
701 deferred_pprinters=self.deferred_printers)
--> 702 printer.pretty(obj)
703 printer.flush()
704 return stream.getvalue()
~/anaconda3/envs/py37/lib/python3.7/site-packages/IPython/lib/pretty.py in pretty(self, obj)
392 if cls is not object \
393 and callable(cls.__dict__.get('__repr__')):
--> 394 return _repr_pprint(obj, self, cycle)
395
396 return _default_pprint(obj, self, cycle)
~/anaconda3/envs/py37/lib/python3.7/site-packages/IPython/lib/pretty.py in _repr_pprint(obj, p, cycle)
698 """A pprint that just redirects to the normal repr function."""
699 # Find newlines and replace them with p.break_()
--> 700 output = repr(obj)
701 lines = output.splitlines()
702 with p.group():
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/core.py in __repr__(self)
998
999 def __repr__(self):
-> 1000 return self._to_str(representation=True)
1001
1002 def _repr_html_(self):
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/core.py in _to_str(self, representation)
968 else:
969 corner_data = fetch_corner_data(
--> 970 self, session=self._executed_sessions[-1])
971
972 buf = StringIO()
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/utils.py in fetch_corner_data(df_or_series, session)
768 return df_or_series.fetch(session=session)
769 else:
--> 770 head = iloc(df_or_series)[:index_size]
771 tail = iloc(df_or_series)[-index_size:]
772 head_data, tail_data = \
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/indexing/iloc.py in __getitem__(self, indexes)
111 else:
112 op = SeriesIlocGetItem(indexes=process_iloc_indexes(self._obj, indexes))
--> 113 return op(self._obj)
114
115 def __setitem__(self, indexes, value):
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/indexing/iloc.py in __call__(self, series)
485 index_value = indexing_index_value(series.index_value, self.indexes[0])
486 inputs = [series] + [index for index in self._indexes if isinstance(index, (Base, Entity))]
--> 487 return self.new_series(inputs, shape=shape, dtype=series.dtype,
488 index_value=index_value, name=series.name)
489
AttributeError: 'DataFrameGroupByData' object has no attribute 'dtype'
|
AttributeError
|
def __init__(self, scheduler_address, session_id, actor_ctx=None, **kw):
from .worker.api import WorkerAPI
from .scheduler.resource import ResourceActor
from .scheduler.utils import SchedulerClusterInfoActor
from .actors import new_client
self._session_id = session_id
self._scheduler_address = scheduler_address
self._worker_api = WorkerAPI()
self._meta_api_thread_local = threading.local()
self._running_mode = None
self._actor_ctx = actor_ctx or new_client()
self._cluster_info = self._actor_ctx.actor_ref(
SchedulerClusterInfoActor.default_uid(), address=scheduler_address
)
is_distributed = self._cluster_info.is_distributed()
self._running_mode = (
RunningMode.local_cluster if not is_distributed else RunningMode.distributed
)
self._resource_actor_ref = self._actor_ctx.actor_ref(
ResourceActor.default_uid(), address=scheduler_address
)
self._address = kw.pop("address", None)
self._extra_info = kw
|
def __init__(self, scheduler_address, session_id, actor_ctx=None, **kw):
from .worker.api import WorkerAPI
from .scheduler.api import MetaAPI
from .scheduler.resource import ResourceActor
from .scheduler.utils import SchedulerClusterInfoActor
from .actors import new_client
self._session_id = session_id
self._scheduler_address = scheduler_address
self._worker_api = WorkerAPI()
self._meta_api = MetaAPI(actor_ctx=actor_ctx, scheduler_endpoint=scheduler_address)
self._running_mode = None
self._actor_ctx = actor_ctx or new_client()
self._cluster_info = self._actor_ctx.actor_ref(
SchedulerClusterInfoActor.default_uid(), address=scheduler_address
)
is_distributed = self._cluster_info.is_distributed()
self._running_mode = (
RunningMode.local_cluster if not is_distributed else RunningMode.distributed
)
self._resource_actor_ref = self._actor_ctx.actor_ref(
ResourceActor.default_uid(), address=scheduler_address
)
self._address = kw.pop("address", None)
self._extra_info = kw
|
https://github.com/mars-project/mars/issues/1413
|
In [25]: merge_df = parsing_df.append(pFold_4_df)
In [26]: mDf_g = merge_df.groupby(["query","template"])
In [27]: mDf_g.execute()
Out[27]: ---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
~/anaconda3/envs/py37/lib/python3.7/site-packages/IPython/core/formatters.py in __call__(self, obj)
700 type_pprinters=self.type_printers,
701 deferred_pprinters=self.deferred_printers)
--> 702 printer.pretty(obj)
703 printer.flush()
704 return stream.getvalue()
~/anaconda3/envs/py37/lib/python3.7/site-packages/IPython/lib/pretty.py in pretty(self, obj)
392 if cls is not object \
393 and callable(cls.__dict__.get('__repr__')):
--> 394 return _repr_pprint(obj, self, cycle)
395
396 return _default_pprint(obj, self, cycle)
~/anaconda3/envs/py37/lib/python3.7/site-packages/IPython/lib/pretty.py in _repr_pprint(obj, p, cycle)
698 """A pprint that just redirects to the normal repr function."""
699 # Find newlines and replace them with p.break_()
--> 700 output = repr(obj)
701 lines = output.splitlines()
702 with p.group():
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/core.py in __repr__(self)
998
999 def __repr__(self):
-> 1000 return self._to_str(representation=True)
1001
1002 def _repr_html_(self):
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/core.py in _to_str(self, representation)
968 else:
969 corner_data = fetch_corner_data(
--> 970 self, session=self._executed_sessions[-1])
971
972 buf = StringIO()
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/utils.py in fetch_corner_data(df_or_series, session)
768 return df_or_series.fetch(session=session)
769 else:
--> 770 head = iloc(df_or_series)[:index_size]
771 tail = iloc(df_or_series)[-index_size:]
772 head_data, tail_data = \
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/indexing/iloc.py in __getitem__(self, indexes)
111 else:
112 op = SeriesIlocGetItem(indexes=process_iloc_indexes(self._obj, indexes))
--> 113 return op(self._obj)
114
115 def __setitem__(self, indexes, value):
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/indexing/iloc.py in __call__(self, series)
485 index_value = indexing_index_value(series.index_value, self.indexes[0])
486 inputs = [series] + [index for index in self._indexes if isinstance(index, (Base, Entity))]
--> 487 return self.new_series(inputs, shape=shape, dtype=series.dtype,
488 index_value=index_value, name=series.name)
489
AttributeError: 'DataFrameGroupByData' object has no attribute 'dtype'
|
AttributeError
|
def get_tileable_metas(self, tileable_keys, filter_fields: List[str] = None) -> List:
return self.meta_api.get_tileable_metas(
self._session_id, tileable_keys, filter_fields
)
|
def get_tileable_metas(self, tileable_keys, filter_fields: List[str] = None) -> List:
return self._meta_api.get_tileable_metas(
self._session_id, tileable_keys, filter_fields
)
|
https://github.com/mars-project/mars/issues/1413
|
In [25]: merge_df = parsing_df.append(pFold_4_df)
In [26]: mDf_g = merge_df.groupby(["query","template"])
In [27]: mDf_g.execute()
Out[27]: ---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
~/anaconda3/envs/py37/lib/python3.7/site-packages/IPython/core/formatters.py in __call__(self, obj)
700 type_pprinters=self.type_printers,
701 deferred_pprinters=self.deferred_printers)
--> 702 printer.pretty(obj)
703 printer.flush()
704 return stream.getvalue()
~/anaconda3/envs/py37/lib/python3.7/site-packages/IPython/lib/pretty.py in pretty(self, obj)
392 if cls is not object \
393 and callable(cls.__dict__.get('__repr__')):
--> 394 return _repr_pprint(obj, self, cycle)
395
396 return _default_pprint(obj, self, cycle)
~/anaconda3/envs/py37/lib/python3.7/site-packages/IPython/lib/pretty.py in _repr_pprint(obj, p, cycle)
698 """A pprint that just redirects to the normal repr function."""
699 # Find newlines and replace them with p.break_()
--> 700 output = repr(obj)
701 lines = output.splitlines()
702 with p.group():
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/core.py in __repr__(self)
998
999 def __repr__(self):
-> 1000 return self._to_str(representation=True)
1001
1002 def _repr_html_(self):
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/core.py in _to_str(self, representation)
968 else:
969 corner_data = fetch_corner_data(
--> 970 self, session=self._executed_sessions[-1])
971
972 buf = StringIO()
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/utils.py in fetch_corner_data(df_or_series, session)
768 return df_or_series.fetch(session=session)
769 else:
--> 770 head = iloc(df_or_series)[:index_size]
771 tail = iloc(df_or_series)[-index_size:]
772 head_data, tail_data = \
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/indexing/iloc.py in __getitem__(self, indexes)
111 else:
112 op = SeriesIlocGetItem(indexes=process_iloc_indexes(self._obj, indexes))
--> 113 return op(self._obj)
114
115 def __setitem__(self, indexes, value):
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/indexing/iloc.py in __call__(self, series)
485 index_value = indexing_index_value(series.index_value, self.indexes[0])
486 inputs = [series] + [index for index in self._indexes if isinstance(index, (Base, Entity))]
--> 487 return self.new_series(inputs, shape=shape, dtype=series.dtype,
488 index_value=index_value, name=series.name)
489
AttributeError: 'DataFrameGroupByData' object has no attribute 'dtype'
|
AttributeError
|
def get_chunk_metas(self, chunk_keys, filter_fields: List[str] = None) -> List:
return self.meta_api.get_chunk_metas(self._session_id, chunk_keys, filter_fields)
|
def get_chunk_metas(self, chunk_keys, filter_fields: List[str] = None) -> List:
return self._meta_api.get_chunk_metas(self._session_id, chunk_keys, filter_fields)
|
https://github.com/mars-project/mars/issues/1413
|
In [25]: merge_df = parsing_df.append(pFold_4_df)
In [26]: mDf_g = merge_df.groupby(["query","template"])
In [27]: mDf_g.execute()
Out[27]: ---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
~/anaconda3/envs/py37/lib/python3.7/site-packages/IPython/core/formatters.py in __call__(self, obj)
700 type_pprinters=self.type_printers,
701 deferred_pprinters=self.deferred_printers)
--> 702 printer.pretty(obj)
703 printer.flush()
704 return stream.getvalue()
~/anaconda3/envs/py37/lib/python3.7/site-packages/IPython/lib/pretty.py in pretty(self, obj)
392 if cls is not object \
393 and callable(cls.__dict__.get('__repr__')):
--> 394 return _repr_pprint(obj, self, cycle)
395
396 return _default_pprint(obj, self, cycle)
~/anaconda3/envs/py37/lib/python3.7/site-packages/IPython/lib/pretty.py in _repr_pprint(obj, p, cycle)
698 """A pprint that just redirects to the normal repr function."""
699 # Find newlines and replace them with p.break_()
--> 700 output = repr(obj)
701 lines = output.splitlines()
702 with p.group():
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/core.py in __repr__(self)
998
999 def __repr__(self):
-> 1000 return self._to_str(representation=True)
1001
1002 def _repr_html_(self):
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/core.py in _to_str(self, representation)
968 else:
969 corner_data = fetch_corner_data(
--> 970 self, session=self._executed_sessions[-1])
971
972 buf = StringIO()
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/utils.py in fetch_corner_data(df_or_series, session)
768 return df_or_series.fetch(session=session)
769 else:
--> 770 head = iloc(df_or_series)[:index_size]
771 tail = iloc(df_or_series)[-index_size:]
772 head_data, tail_data = \
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/indexing/iloc.py in __getitem__(self, indexes)
111 else:
112 op = SeriesIlocGetItem(indexes=process_iloc_indexes(self._obj, indexes))
--> 113 return op(self._obj)
114
115 def __setitem__(self, indexes, value):
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/indexing/iloc.py in __call__(self, series)
485 index_value = indexing_index_value(series.index_value, self.indexes[0])
486 inputs = [series] + [index for index in self._indexes if isinstance(index, (Base, Entity))]
--> 487 return self.new_series(inputs, shape=shape, dtype=series.dtype,
488 index_value=index_value, name=series.name)
489
AttributeError: 'DataFrameGroupByData' object has no attribute 'dtype'
|
AttributeError
|
def get_named_tileable_infos(self, name: str):
tileable_key = self.meta_api.get_tileable_key_by_name(self._session_id, name)
nsplits = self.get_tileable_metas([tileable_key], filter_fields=["nsplits"])[0][0]
shape = tuple(sum(s) for s in nsplits)
return TileableInfos(tileable_key, shape)
|
def get_named_tileable_infos(self, name: str):
tileable_key = self._meta_api.get_tileable_key_by_name(self._session_id, name)
nsplits = self.get_tileable_metas([tileable_key], filter_fields=["nsplits"])[0][0]
shape = tuple(sum(s) for s in nsplits)
return TileableInfos(tileable_key, shape)
|
https://github.com/mars-project/mars/issues/1413
|
In [25]: merge_df = parsing_df.append(pFold_4_df)
In [26]: mDf_g = merge_df.groupby(["query","template"])
In [27]: mDf_g.execute()
Out[27]: ---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
~/anaconda3/envs/py37/lib/python3.7/site-packages/IPython/core/formatters.py in __call__(self, obj)
700 type_pprinters=self.type_printers,
701 deferred_pprinters=self.deferred_printers)
--> 702 printer.pretty(obj)
703 printer.flush()
704 return stream.getvalue()
~/anaconda3/envs/py37/lib/python3.7/site-packages/IPython/lib/pretty.py in pretty(self, obj)
392 if cls is not object \
393 and callable(cls.__dict__.get('__repr__')):
--> 394 return _repr_pprint(obj, self, cycle)
395
396 return _default_pprint(obj, self, cycle)
~/anaconda3/envs/py37/lib/python3.7/site-packages/IPython/lib/pretty.py in _repr_pprint(obj, p, cycle)
698 """A pprint that just redirects to the normal repr function."""
699 # Find newlines and replace them with p.break_()
--> 700 output = repr(obj)
701 lines = output.splitlines()
702 with p.group():
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/core.py in __repr__(self)
998
999 def __repr__(self):
-> 1000 return self._to_str(representation=True)
1001
1002 def _repr_html_(self):
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/core.py in _to_str(self, representation)
968 else:
969 corner_data = fetch_corner_data(
--> 970 self, session=self._executed_sessions[-1])
971
972 buf = StringIO()
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/utils.py in fetch_corner_data(df_or_series, session)
768 return df_or_series.fetch(session=session)
769 else:
--> 770 head = iloc(df_or_series)[:index_size]
771 tail = iloc(df_or_series)[-index_size:]
772 head_data, tail_data = \
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/indexing/iloc.py in __getitem__(self, indexes)
111 else:
112 op = SeriesIlocGetItem(indexes=process_iloc_indexes(self._obj, indexes))
--> 113 return op(self._obj)
114
115 def __setitem__(self, indexes, value):
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/indexing/iloc.py in __call__(self, series)
485 index_value = indexing_index_value(series.index_value, self.indexes[0])
486 inputs = [series] + [index for index in self._indexes if isinstance(index, (Base, Entity))]
--> 487 return self.new_series(inputs, shape=shape, dtype=series.dtype,
488 index_value=index_value, name=series.name)
489
AttributeError: 'DataFrameGroupByData' object has no attribute 'dtype'
|
AttributeError
|
def _install():
from ..core import DATAFRAME_TYPE, SERIES_TYPE, INDEX_TYPE
from .standardize_range_index import ChunkStandardizeRangeIndex
from .string_ import _string_method_to_handlers
from .datetimes import _datetime_method_to_handlers
from .accessor import StringAccessor, DatetimeAccessor, CachedAccessor
for t in DATAFRAME_TYPE:
setattr(t, "to_gpu", to_gpu)
setattr(t, "to_cpu", to_cpu)
setattr(t, "rechunk", rechunk)
setattr(t, "describe", describe)
setattr(t, "apply", df_apply)
setattr(t, "transform", df_transform)
setattr(t, "fillna", fillna)
setattr(t, "ffill", ffill)
setattr(t, "bfill", bfill)
setattr(t, "isna", isna)
setattr(t, "isnull", isnull)
setattr(t, "notna", notna)
setattr(t, "notnull", notnull)
setattr(t, "dropna", df_dropna)
setattr(t, "shift", shift)
setattr(t, "tshift", tshift)
setattr(t, "diff", df_diff)
setattr(t, "astype", astype)
setattr(t, "drop", df_drop)
setattr(t, "pop", df_pop)
setattr(
t, "__delitem__", lambda df, items: df_drop(df, items, axis=1, inplace=True)
)
setattr(t, "drop_duplicates", df_drop_duplicates)
setattr(t, "melt", melt)
for t in SERIES_TYPE:
setattr(t, "to_gpu", to_gpu)
setattr(t, "to_cpu", to_cpu)
setattr(t, "rechunk", rechunk)
setattr(t, "map", map_)
setattr(t, "describe", describe)
setattr(t, "apply", series_apply)
setattr(t, "transform", series_transform)
setattr(t, "fillna", fillna)
setattr(t, "ffill", ffill)
setattr(t, "bfill", bfill)
setattr(t, "isin", isin)
setattr(t, "isna", isna)
setattr(t, "isnull", isnull)
setattr(t, "notna", notna)
setattr(t, "notnull", notnull)
setattr(t, "dropna", series_dropna)
setattr(t, "shift", shift)
setattr(t, "tshift", tshift)
setattr(t, "diff", series_diff)
setattr(t, "value_counts", value_counts)
setattr(t, "astype", astype)
setattr(t, "drop", series_drop)
setattr(t, "drop_duplicates", series_drop_duplicates)
for t in INDEX_TYPE:
setattr(t, "rechunk", rechunk)
setattr(t, "drop", index_drop)
setattr(t, "drop_duplicates", index_drop_duplicates)
for method in _string_method_to_handlers:
if not hasattr(StringAccessor, method):
StringAccessor._register(method)
for method in _datetime_method_to_handlers:
if not hasattr(DatetimeAccessor, method):
DatetimeAccessor._register(method)
for series in SERIES_TYPE:
series.str = CachedAccessor("str", StringAccessor)
series.dt = CachedAccessor("dt", DatetimeAccessor)
|
def _install():
from ..core import DATAFRAME_TYPE, SERIES_TYPE, INDEX_TYPE
from .string_ import _string_method_to_handlers
from .datetimes import _datetime_method_to_handlers
from .accessor import StringAccessor, DatetimeAccessor, CachedAccessor
for t in DATAFRAME_TYPE:
setattr(t, "to_gpu", to_gpu)
setattr(t, "to_cpu", to_cpu)
setattr(t, "rechunk", rechunk)
setattr(t, "describe", describe)
setattr(t, "apply", df_apply)
setattr(t, "transform", df_transform)
setattr(t, "fillna", fillna)
setattr(t, "ffill", ffill)
setattr(t, "bfill", bfill)
setattr(t, "isna", isna)
setattr(t, "isnull", isnull)
setattr(t, "notna", notna)
setattr(t, "notnull", notnull)
setattr(t, "dropna", df_dropna)
setattr(t, "shift", shift)
setattr(t, "tshift", tshift)
setattr(t, "diff", df_diff)
setattr(t, "astype", astype)
setattr(t, "drop", df_drop)
setattr(t, "pop", df_pop)
setattr(
t, "__delitem__", lambda df, items: df_drop(df, items, axis=1, inplace=True)
)
setattr(t, "drop_duplicates", df_drop_duplicates)
setattr(t, "melt", melt)
for t in SERIES_TYPE:
setattr(t, "to_gpu", to_gpu)
setattr(t, "to_cpu", to_cpu)
setattr(t, "rechunk", rechunk)
setattr(t, "map", map_)
setattr(t, "describe", describe)
setattr(t, "apply", series_apply)
setattr(t, "transform", series_transform)
setattr(t, "fillna", fillna)
setattr(t, "ffill", ffill)
setattr(t, "bfill", bfill)
setattr(t, "isin", isin)
setattr(t, "isna", isna)
setattr(t, "isnull", isnull)
setattr(t, "notna", notna)
setattr(t, "notnull", notnull)
setattr(t, "dropna", series_dropna)
setattr(t, "shift", shift)
setattr(t, "tshift", tshift)
setattr(t, "diff", series_diff)
setattr(t, "value_counts", value_counts)
setattr(t, "astype", astype)
setattr(t, "drop", series_drop)
setattr(t, "drop_duplicates", series_drop_duplicates)
for t in INDEX_TYPE:
setattr(t, "rechunk", rechunk)
setattr(t, "drop", index_drop)
setattr(t, "drop_duplicates", index_drop_duplicates)
for method in _string_method_to_handlers:
if not hasattr(StringAccessor, method):
StringAccessor._register(method)
for method in _datetime_method_to_handlers:
if not hasattr(DatetimeAccessor, method):
DatetimeAccessor._register(method)
for series in SERIES_TYPE:
series.str = CachedAccessor("str", StringAccessor)
series.dt = CachedAccessor("dt", DatetimeAccessor)
|
https://github.com/mars-project/mars/issues/1413
|
In [25]: merge_df = parsing_df.append(pFold_4_df)
In [26]: mDf_g = merge_df.groupby(["query","template"])
In [27]: mDf_g.execute()
Out[27]: ---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
~/anaconda3/envs/py37/lib/python3.7/site-packages/IPython/core/formatters.py in __call__(self, obj)
700 type_pprinters=self.type_printers,
701 deferred_pprinters=self.deferred_printers)
--> 702 printer.pretty(obj)
703 printer.flush()
704 return stream.getvalue()
~/anaconda3/envs/py37/lib/python3.7/site-packages/IPython/lib/pretty.py in pretty(self, obj)
392 if cls is not object \
393 and callable(cls.__dict__.get('__repr__')):
--> 394 return _repr_pprint(obj, self, cycle)
395
396 return _default_pprint(obj, self, cycle)
~/anaconda3/envs/py37/lib/python3.7/site-packages/IPython/lib/pretty.py in _repr_pprint(obj, p, cycle)
698 """A pprint that just redirects to the normal repr function."""
699 # Find newlines and replace them with p.break_()
--> 700 output = repr(obj)
701 lines = output.splitlines()
702 with p.group():
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/core.py in __repr__(self)
998
999 def __repr__(self):
-> 1000 return self._to_str(representation=True)
1001
1002 def _repr_html_(self):
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/core.py in _to_str(self, representation)
968 else:
969 corner_data = fetch_corner_data(
--> 970 self, session=self._executed_sessions[-1])
971
972 buf = StringIO()
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/utils.py in fetch_corner_data(df_or_series, session)
768 return df_or_series.fetch(session=session)
769 else:
--> 770 head = iloc(df_or_series)[:index_size]
771 tail = iloc(df_or_series)[-index_size:]
772 head_data, tail_data = \
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/indexing/iloc.py in __getitem__(self, indexes)
111 else:
112 op = SeriesIlocGetItem(indexes=process_iloc_indexes(self._obj, indexes))
--> 113 return op(self._obj)
114
115 def __setitem__(self, indexes, value):
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/indexing/iloc.py in __call__(self, series)
485 index_value = indexing_index_value(series.index_value, self.indexes[0])
486 inputs = [series] + [index for index in self._indexes if isinstance(index, (Base, Entity))]
--> 487 return self.new_series(inputs, shape=shape, dtype=series.dtype,
488 index_value=index_value, name=series.name)
489
AttributeError: 'DataFrameGroupByData' object has no attribute 'dtype'
|
AttributeError
|
def tile(cls, op: "DataFrameGroupByAgg"):
if op.method == "auto":
ctx = get_context()
if (
ctx is not None and ctx.running_mode == RunningMode.distributed
): # pragma: no cover
return cls._tile_with_shuffle(op)
else:
return cls._tile_with_tree(op)
if op.method == "shuffle":
return cls._tile_with_shuffle(op)
elif op.method == "tree":
return cls._tile_with_tree(op)
else: # pragma: no cover
raise NotImplementedError
|
def tile(cls, op: "DataFrameGroupByAgg"):
if op.method == "shuffle":
return cls._tile_with_shuffle(op)
elif op.method == "tree":
return cls._tile_with_tree(op)
else: # pragma: no cover
raise NotImplementedError
|
https://github.com/mars-project/mars/issues/1413
|
In [25]: merge_df = parsing_df.append(pFold_4_df)
In [26]: mDf_g = merge_df.groupby(["query","template"])
In [27]: mDf_g.execute()
Out[27]: ---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
~/anaconda3/envs/py37/lib/python3.7/site-packages/IPython/core/formatters.py in __call__(self, obj)
700 type_pprinters=self.type_printers,
701 deferred_pprinters=self.deferred_printers)
--> 702 printer.pretty(obj)
703 printer.flush()
704 return stream.getvalue()
~/anaconda3/envs/py37/lib/python3.7/site-packages/IPython/lib/pretty.py in pretty(self, obj)
392 if cls is not object \
393 and callable(cls.__dict__.get('__repr__')):
--> 394 return _repr_pprint(obj, self, cycle)
395
396 return _default_pprint(obj, self, cycle)
~/anaconda3/envs/py37/lib/python3.7/site-packages/IPython/lib/pretty.py in _repr_pprint(obj, p, cycle)
698 """A pprint that just redirects to the normal repr function."""
699 # Find newlines and replace them with p.break_()
--> 700 output = repr(obj)
701 lines = output.splitlines()
702 with p.group():
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/core.py in __repr__(self)
998
999 def __repr__(self):
-> 1000 return self._to_str(representation=True)
1001
1002 def _repr_html_(self):
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/core.py in _to_str(self, representation)
968 else:
969 corner_data = fetch_corner_data(
--> 970 self, session=self._executed_sessions[-1])
971
972 buf = StringIO()
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/utils.py in fetch_corner_data(df_or_series, session)
768 return df_or_series.fetch(session=session)
769 else:
--> 770 head = iloc(df_or_series)[:index_size]
771 tail = iloc(df_or_series)[-index_size:]
772 head_data, tail_data = \
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/indexing/iloc.py in __getitem__(self, indexes)
111 else:
112 op = SeriesIlocGetItem(indexes=process_iloc_indexes(self._obj, indexes))
--> 113 return op(self._obj)
114
115 def __setitem__(self, indexes, value):
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/indexing/iloc.py in __call__(self, series)
485 index_value = indexing_index_value(series.index_value, self.indexes[0])
486 inputs = [series] + [index for index in self._indexes if isinstance(index, (Base, Entity))]
--> 487 return self.new_series(inputs, shape=shape, dtype=series.dtype,
488 index_value=index_value, name=series.name)
489
AttributeError: 'DataFrameGroupByData' object has no attribute 'dtype'
|
AttributeError
|
def agg(groupby, func, method="auto", *args, **kwargs):
"""
Aggregate using one or more operations on grouped data.
:param groupby: Groupby data.
:param func: Aggregation functions.
:param method: 'shuffle' or 'tree', 'tree' method provide a better performance, 'shuffle' is recommended
if aggregated result is very large, 'auto' will use 'shuffle' method in distributed mode and use 'tree'
in local mode.
:return: Aggregated result.
"""
# When perform a computation on the grouped data, we won't shuffle
# the data in the stage of groupby and do shuffle after aggregation.
if not isinstance(groupby, GROUPBY_TYPE):
raise TypeError("Input should be type of groupby, not %s" % type(groupby))
if method not in ["shuffle", "tree", "auto"]:
raise ValueError(
"Method %s is not available, please specify 'tree' or 'shuffle" % method
)
if not _check_if_func_available(func):
return groupby.transform(func, *args, _call_agg=True, **kwargs)
agg_op = DataFrameGroupByAgg(
func=func,
method=method,
raw_func=func,
groupby_params=groupby.op.groupby_params,
)
return agg_op(groupby)
|
def agg(groupby, func, method="tree", *args, **kwargs):
"""
Aggregate using one or more operations on grouped data.
:param groupby: Groupby data.
:param func: Aggregation functions.
:param method: 'shuffle' or 'tree', 'tree' method provide a better performance, 'shuffle' is recommended
if aggregated result is very large.
:return: Aggregated result.
"""
# When perform a computation on the grouped data, we won't shuffle
# the data in the stage of groupby and do shuffle after aggregation.
if not isinstance(groupby, GROUPBY_TYPE):
raise TypeError("Input should be type of groupby, not %s" % type(groupby))
if method not in ["shuffle", "tree"]:
raise ValueError(
"Method %s is not available, please specify 'tree' or 'shuffle" % method
)
if not _check_if_func_available(func):
return groupby.transform(func, *args, _call_agg=True, **kwargs)
agg_op = DataFrameGroupByAgg(
func=func,
method=method,
raw_func=func,
groupby_params=groupby.op.groupby_params,
)
return agg_op(groupby)
|
https://github.com/mars-project/mars/issues/1413
|
In [25]: merge_df = parsing_df.append(pFold_4_df)
In [26]: mDf_g = merge_df.groupby(["query","template"])
In [27]: mDf_g.execute()
Out[27]: ---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
~/anaconda3/envs/py37/lib/python3.7/site-packages/IPython/core/formatters.py in __call__(self, obj)
700 type_pprinters=self.type_printers,
701 deferred_pprinters=self.deferred_printers)
--> 702 printer.pretty(obj)
703 printer.flush()
704 return stream.getvalue()
~/anaconda3/envs/py37/lib/python3.7/site-packages/IPython/lib/pretty.py in pretty(self, obj)
392 if cls is not object \
393 and callable(cls.__dict__.get('__repr__')):
--> 394 return _repr_pprint(obj, self, cycle)
395
396 return _default_pprint(obj, self, cycle)
~/anaconda3/envs/py37/lib/python3.7/site-packages/IPython/lib/pretty.py in _repr_pprint(obj, p, cycle)
698 """A pprint that just redirects to the normal repr function."""
699 # Find newlines and replace them with p.break_()
--> 700 output = repr(obj)
701 lines = output.splitlines()
702 with p.group():
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/core.py in __repr__(self)
998
999 def __repr__(self):
-> 1000 return self._to_str(representation=True)
1001
1002 def _repr_html_(self):
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/core.py in _to_str(self, representation)
968 else:
969 corner_data = fetch_corner_data(
--> 970 self, session=self._executed_sessions[-1])
971
972 buf = StringIO()
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/utils.py in fetch_corner_data(df_or_series, session)
768 return df_or_series.fetch(session=session)
769 else:
--> 770 head = iloc(df_or_series)[:index_size]
771 tail = iloc(df_or_series)[-index_size:]
772 head_data, tail_data = \
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/indexing/iloc.py in __getitem__(self, indexes)
111 else:
112 op = SeriesIlocGetItem(indexes=process_iloc_indexes(self._obj, indexes))
--> 113 return op(self._obj)
114
115 def __setitem__(self, indexes, value):
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/indexing/iloc.py in __call__(self, series)
485 index_value = indexing_index_value(series.index_value, self.indexes[0])
486 inputs = [series] + [index for index in self._indexes if isinstance(index, (Base, Entity))]
--> 487 return self.new_series(inputs, shape=shape, dtype=series.dtype,
488 index_value=index_value, name=series.name)
489
AttributeError: 'DataFrameGroupByData' object has no attribute 'dtype'
|
AttributeError
|
def execute(cls, ctx, op):
a = ctx[op.inputs[0].key]
if op.sort_type == "sort_values":
ctx[op.outputs[0].key] = res = execute_sort_values(a, op)
else:
ctx[op.outputs[0].key] = res = execute_sort_index(a, op)
n = op.n_partition
if a.shape[op.axis] < n:
num = n // a.shape[op.axis] + 1
res = execute_sort_values(pd.concat([a] * num), op)
w = int(res.shape[op.axis] // n)
slc = (slice(None),) * op.axis + (slice(0, n * w, w),)
if op.sort_type == "sort_values":
# do regular sample
if op.by is not None:
ctx[op.outputs[-1].key] = res[op.by].iloc[slc]
else:
ctx[op.outputs[-1].key] = res.iloc[slc]
else:
# do regular sample
ctx[op.outputs[-1].key] = res.iloc[slc]
|
def execute(cls, ctx, op):
a = ctx[op.inputs[0].key]
n = op.n_partition
w = int(a.shape[op.axis] // n)
slc = (slice(None),) * op.axis + (slice(0, n * w, w),)
if op.sort_type == "sort_values":
ctx[op.outputs[0].key] = res = execute_sort_values(a, op)
# do regular sample
if op.by is not None:
ctx[op.outputs[-1].key] = res[op.by].iloc[slc]
else:
ctx[op.outputs[-1].key] = res.iloc[slc]
else:
ctx[op.outputs[0].key] = res = execute_sort_index(a, op)
# do regular sample
ctx[op.outputs[-1].key] = res.iloc[slc]
|
https://github.com/mars-project/mars/issues/1413
|
In [25]: merge_df = parsing_df.append(pFold_4_df)
In [26]: mDf_g = merge_df.groupby(["query","template"])
In [27]: mDf_g.execute()
Out[27]: ---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
~/anaconda3/envs/py37/lib/python3.7/site-packages/IPython/core/formatters.py in __call__(self, obj)
700 type_pprinters=self.type_printers,
701 deferred_pprinters=self.deferred_printers)
--> 702 printer.pretty(obj)
703 printer.flush()
704 return stream.getvalue()
~/anaconda3/envs/py37/lib/python3.7/site-packages/IPython/lib/pretty.py in pretty(self, obj)
392 if cls is not object \
393 and callable(cls.__dict__.get('__repr__')):
--> 394 return _repr_pprint(obj, self, cycle)
395
396 return _default_pprint(obj, self, cycle)
~/anaconda3/envs/py37/lib/python3.7/site-packages/IPython/lib/pretty.py in _repr_pprint(obj, p, cycle)
698 """A pprint that just redirects to the normal repr function."""
699 # Find newlines and replace them with p.break_()
--> 700 output = repr(obj)
701 lines = output.splitlines()
702 with p.group():
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/core.py in __repr__(self)
998
999 def __repr__(self):
-> 1000 return self._to_str(representation=True)
1001
1002 def _repr_html_(self):
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/core.py in _to_str(self, representation)
968 else:
969 corner_data = fetch_corner_data(
--> 970 self, session=self._executed_sessions[-1])
971
972 buf = StringIO()
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/utils.py in fetch_corner_data(df_or_series, session)
768 return df_or_series.fetch(session=session)
769 else:
--> 770 head = iloc(df_or_series)[:index_size]
771 tail = iloc(df_or_series)[-index_size:]
772 head_data, tail_data = \
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/indexing/iloc.py in __getitem__(self, indexes)
111 else:
112 op = SeriesIlocGetItem(indexes=process_iloc_indexes(self._obj, indexes))
--> 113 return op(self._obj)
114
115 def __setitem__(self, indexes, value):
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/indexing/iloc.py in __call__(self, series)
485 index_value = indexing_index_value(series.index_value, self.indexes[0])
486 inputs = [series] + [index for index in self._indexes if isinstance(index, (Base, Entity))]
--> 487 return self.new_series(inputs, shape=shape, dtype=series.dtype,
488 index_value=index_value, name=series.name)
489
AttributeError: 'DataFrameGroupByData' object has no attribute 'dtype'
|
AttributeError
|
def standardize_range_index(chunks, axis=0):
from .base.standardize_range_index import ChunkStandardizeRangeIndex
row_chunks = dict(
(k, next(v)) for k, v in itertools.groupby(chunks, key=lambda x: x.index[axis])
)
row_chunks = [row_chunks[i] for i in range(len(row_chunks))]
out_chunks = []
for c in chunks:
inputs = row_chunks[: c.index[axis]] + [c]
op = ChunkStandardizeRangeIndex(
prepare_inputs=[False] * (len(inputs) - 1) + [True],
axis=axis,
output_types=c.op.output_types,
)
out_chunks.append(op.new_chunk(inputs, **c.params.copy()))
return out_chunks
|
def standardize_range_index(chunks, axis=0):
from .base.standardize_range_index import ChunkStandardizeRangeIndex
row_chunks = dict(
(k, next(v)) for k, v in itertools.groupby(chunks, key=lambda x: x.index[axis])
)
row_chunks = [row_chunks[i] for i in range(len(row_chunks))]
out_chunks = []
for c in chunks:
inputs = row_chunks[: c.index[axis]] + [c]
op = ChunkStandardizeRangeIndex(
prepare_inputs=[False] * len(inputs),
axis=axis,
output_types=c.op.output_types,
)
out_chunks.append(op.new_chunk(inputs, **c.params.copy()))
return out_chunks
|
https://github.com/mars-project/mars/issues/1413
|
In [25]: merge_df = parsing_df.append(pFold_4_df)
In [26]: mDf_g = merge_df.groupby(["query","template"])
In [27]: mDf_g.execute()
Out[27]: ---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
~/anaconda3/envs/py37/lib/python3.7/site-packages/IPython/core/formatters.py in __call__(self, obj)
700 type_pprinters=self.type_printers,
701 deferred_pprinters=self.deferred_printers)
--> 702 printer.pretty(obj)
703 printer.flush()
704 return stream.getvalue()
~/anaconda3/envs/py37/lib/python3.7/site-packages/IPython/lib/pretty.py in pretty(self, obj)
392 if cls is not object \
393 and callable(cls.__dict__.get('__repr__')):
--> 394 return _repr_pprint(obj, self, cycle)
395
396 return _default_pprint(obj, self, cycle)
~/anaconda3/envs/py37/lib/python3.7/site-packages/IPython/lib/pretty.py in _repr_pprint(obj, p, cycle)
698 """A pprint that just redirects to the normal repr function."""
699 # Find newlines and replace them with p.break_()
--> 700 output = repr(obj)
701 lines = output.splitlines()
702 with p.group():
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/core.py in __repr__(self)
998
999 def __repr__(self):
-> 1000 return self._to_str(representation=True)
1001
1002 def _repr_html_(self):
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/core.py in _to_str(self, representation)
968 else:
969 corner_data = fetch_corner_data(
--> 970 self, session=self._executed_sessions[-1])
971
972 buf = StringIO()
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/utils.py in fetch_corner_data(df_or_series, session)
768 return df_or_series.fetch(session=session)
769 else:
--> 770 head = iloc(df_or_series)[:index_size]
771 tail = iloc(df_or_series)[-index_size:]
772 head_data, tail_data = \
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/indexing/iloc.py in __getitem__(self, indexes)
111 else:
112 op = SeriesIlocGetItem(indexes=process_iloc_indexes(self._obj, indexes))
--> 113 return op(self._obj)
114
115 def __setitem__(self, indexes, value):
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/indexing/iloc.py in __call__(self, series)
485 index_value = indexing_index_value(series.index_value, self.indexes[0])
486 inputs = [series] + [index for index in self._indexes if isinstance(index, (Base, Entity))]
--> 487 return self.new_series(inputs, shape=shape, dtype=series.dtype,
488 index_value=index_value, name=series.name)
489
AttributeError: 'DataFrameGroupByData' object has no attribute 'dtype'
|
AttributeError
|
def get_dependent_data_keys(self):
return [dep.key for dep in self.inputs or ()]
|
def get_dependent_data_keys(self):
return [
dep.key
for dep, has_dep in zip(self.inputs or (), self.prepare_inputs)
if has_dep
]
|
https://github.com/mars-project/mars/issues/1413
|
In [25]: merge_df = parsing_df.append(pFold_4_df)
In [26]: mDf_g = merge_df.groupby(["query","template"])
In [27]: mDf_g.execute()
Out[27]: ---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
~/anaconda3/envs/py37/lib/python3.7/site-packages/IPython/core/formatters.py in __call__(self, obj)
700 type_pprinters=self.type_printers,
701 deferred_pprinters=self.deferred_printers)
--> 702 printer.pretty(obj)
703 printer.flush()
704 return stream.getvalue()
~/anaconda3/envs/py37/lib/python3.7/site-packages/IPython/lib/pretty.py in pretty(self, obj)
392 if cls is not object \
393 and callable(cls.__dict__.get('__repr__')):
--> 394 return _repr_pprint(obj, self, cycle)
395
396 return _default_pprint(obj, self, cycle)
~/anaconda3/envs/py37/lib/python3.7/site-packages/IPython/lib/pretty.py in _repr_pprint(obj, p, cycle)
698 """A pprint that just redirects to the normal repr function."""
699 # Find newlines and replace them with p.break_()
--> 700 output = repr(obj)
701 lines = output.splitlines()
702 with p.group():
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/core.py in __repr__(self)
998
999 def __repr__(self):
-> 1000 return self._to_str(representation=True)
1001
1002 def _repr_html_(self):
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/core.py in _to_str(self, representation)
968 else:
969 corner_data = fetch_corner_data(
--> 970 self, session=self._executed_sessions[-1])
971
972 buf = StringIO()
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/utils.py in fetch_corner_data(df_or_series, session)
768 return df_or_series.fetch(session=session)
769 else:
--> 770 head = iloc(df_or_series)[:index_size]
771 tail = iloc(df_or_series)[-index_size:]
772 head_data, tail_data = \
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/indexing/iloc.py in __getitem__(self, indexes)
111 else:
112 op = SeriesIlocGetItem(indexes=process_iloc_indexes(self._obj, indexes))
--> 113 return op(self._obj)
114
115 def __setitem__(self, indexes, value):
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/indexing/iloc.py in __call__(self, series)
485 index_value = indexing_index_value(series.index_value, self.indexes[0])
486 inputs = [series] + [index for index in self._indexes if isinstance(index, (Base, Entity))]
--> 487 return self.new_series(inputs, shape=shape, dtype=series.dtype,
488 index_value=index_value, name=series.name)
489
AttributeError: 'DataFrameGroupByData' object has no attribute 'dtype'
|
AttributeError
|
def get_dependent_data_keys(self):
if self.stage == OperandStage.reduce:
inputs = self.inputs or ()
deps = []
for inp in inputs:
if isinstance(inp.op, (ShuffleProxy, FetchShuffle)):
deps.extend(
[(chunk.key, self._shuffle_key) for chunk in inp.inputs or ()]
)
else:
deps.append(inp.key)
return deps
return super().get_dependent_data_keys()
|
def get_dependent_data_keys(self):
if self.stage == OperandStage.reduce:
inputs = self.inputs or ()
return [
(chunk.key, self._shuffle_key)
for proxy in inputs
for chunk in proxy.inputs or ()
]
return super().get_dependent_data_keys()
|
https://github.com/mars-project/mars/issues/1413
|
In [25]: merge_df = parsing_df.append(pFold_4_df)
In [26]: mDf_g = merge_df.groupby(["query","template"])
In [27]: mDf_g.execute()
Out[27]: ---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
~/anaconda3/envs/py37/lib/python3.7/site-packages/IPython/core/formatters.py in __call__(self, obj)
700 type_pprinters=self.type_printers,
701 deferred_pprinters=self.deferred_printers)
--> 702 printer.pretty(obj)
703 printer.flush()
704 return stream.getvalue()
~/anaconda3/envs/py37/lib/python3.7/site-packages/IPython/lib/pretty.py in pretty(self, obj)
392 if cls is not object \
393 and callable(cls.__dict__.get('__repr__')):
--> 394 return _repr_pprint(obj, self, cycle)
395
396 return _default_pprint(obj, self, cycle)
~/anaconda3/envs/py37/lib/python3.7/site-packages/IPython/lib/pretty.py in _repr_pprint(obj, p, cycle)
698 """A pprint that just redirects to the normal repr function."""
699 # Find newlines and replace them with p.break_()
--> 700 output = repr(obj)
701 lines = output.splitlines()
702 with p.group():
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/core.py in __repr__(self)
998
999 def __repr__(self):
-> 1000 return self._to_str(representation=True)
1001
1002 def _repr_html_(self):
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/core.py in _to_str(self, representation)
968 else:
969 corner_data = fetch_corner_data(
--> 970 self, session=self._executed_sessions[-1])
971
972 buf = StringIO()
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/utils.py in fetch_corner_data(df_or_series, session)
768 return df_or_series.fetch(session=session)
769 else:
--> 770 head = iloc(df_or_series)[:index_size]
771 tail = iloc(df_or_series)[-index_size:]
772 head_data, tail_data = \
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/indexing/iloc.py in __getitem__(self, indexes)
111 else:
112 op = SeriesIlocGetItem(indexes=process_iloc_indexes(self._obj, indexes))
--> 113 return op(self._obj)
114
115 def __setitem__(self, indexes, value):
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/indexing/iloc.py in __call__(self, series)
485 index_value = indexing_index_value(series.index_value, self.indexes[0])
486 inputs = [series] + [index for index in self._indexes if isinstance(index, (Base, Entity))]
--> 487 return self.new_series(inputs, shape=shape, dtype=series.dtype,
488 index_value=index_value, name=series.name)
489
AttributeError: 'DataFrameGroupByData' object has no attribute 'dtype'
|
AttributeError
|
def execute(cls, ctx, op: "RemoteFunction"):
from ..session import Session
session = ctx.get_current_session()
prev_default_session = Session.default
mapping = {
inp: ctx[inp.key]
for inp, prepare_inp in zip(op.inputs, op.prepare_inputs)
if prepare_inp
}
for to_search in [op.function_args, op.function_kwargs]:
tileable_placeholders = find_objects(to_search, _TileablePlaceholder)
for ph in tileable_placeholders:
tileable = ph.tileable
chunk_index_to_shape = dict()
for chunk in tileable.chunks:
if any(np.isnan(s) for s in chunk.shape):
shape = ctx.get_chunk_metas(
[chunk.key], filter_fields=["chunk_shape"]
)[0][0]
chunk._shape = shape
chunk_index_to_shape[chunk.index] = chunk.shape
if any(any(np.isnan(s) for s in ns) for ns in tileable._nsplits):
nsplits = calc_nsplits(chunk_index_to_shape)
tileable._nsplits = nsplits
tileable._shape = tuple(sum(ns) for ns in nsplits)
mapping[ph] = tileable
function = op.function
function_args = replace_inputs(op.function_args, mapping)
function_kwargs = replace_inputs(op.function_kwargs, mapping)
# set session created from context as default one
session.as_default()
try:
if isinstance(ctx, ContextBase):
with ctx:
result = function(*function_args, **function_kwargs)
else:
result = function(*function_args, **function_kwargs)
finally:
# set back default session
Session._set_default_session(prev_default_session)
if op.n_output is None:
ctx[op.outputs[0].key] = result
else:
if not isinstance(result, Iterable):
raise TypeError(
"Specifying n_output={}, but result is not iterable, got {}".format(
op.n_output, result
)
)
result = list(result)
if len(result) != op.n_output:
raise ValueError(
"Length of return value should be {}, got {}".format(
op.n_output, len(result)
)
)
for out, r in zip(op.outputs, result):
ctx[out.key] = r
|
def execute(cls, ctx, op: "RemoteFunction"):
from ..session import Session
session = ctx.get_current_session()
prev_default_session = Session.default
mapping = {
inp: ctx[inp.key]
for inp, prepare_inp in zip(op.inputs, op.prepare_inputs)
if prepare_inp
}
for to_search in [op.function_args, op.function_kwargs]:
tileable_placeholders = find_objects(to_search, _TileablePlaceholder)
for ph in tileable_placeholders:
mapping[ph] = ph.tileable
function = op.function
function_args = replace_inputs(op.function_args, mapping)
function_kwargs = replace_inputs(op.function_kwargs, mapping)
# set session created from context as default one
session.as_default()
try:
if isinstance(ctx, ContextBase):
with ctx:
result = function(*function_args, **function_kwargs)
else:
result = function(*function_args, **function_kwargs)
finally:
# set back default session
Session._set_default_session(prev_default_session)
if op.n_output is None:
ctx[op.outputs[0].key] = result
else:
if not isinstance(result, Iterable):
raise TypeError(
"Specifying n_output={}, but result is not iterable, got {}".format(
op.n_output, result
)
)
result = list(result)
if len(result) != op.n_output:
raise ValueError(
"Length of return value should be {}, got {}".format(
op.n_output, len(result)
)
)
for out, r in zip(op.outputs, result):
ctx[out.key] = r
|
https://github.com/mars-project/mars/issues/1413
|
In [25]: merge_df = parsing_df.append(pFold_4_df)
In [26]: mDf_g = merge_df.groupby(["query","template"])
In [27]: mDf_g.execute()
Out[27]: ---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
~/anaconda3/envs/py37/lib/python3.7/site-packages/IPython/core/formatters.py in __call__(self, obj)
700 type_pprinters=self.type_printers,
701 deferred_pprinters=self.deferred_printers)
--> 702 printer.pretty(obj)
703 printer.flush()
704 return stream.getvalue()
~/anaconda3/envs/py37/lib/python3.7/site-packages/IPython/lib/pretty.py in pretty(self, obj)
392 if cls is not object \
393 and callable(cls.__dict__.get('__repr__')):
--> 394 return _repr_pprint(obj, self, cycle)
395
396 return _default_pprint(obj, self, cycle)
~/anaconda3/envs/py37/lib/python3.7/site-packages/IPython/lib/pretty.py in _repr_pprint(obj, p, cycle)
698 """A pprint that just redirects to the normal repr function."""
699 # Find newlines and replace them with p.break_()
--> 700 output = repr(obj)
701 lines = output.splitlines()
702 with p.group():
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/core.py in __repr__(self)
998
999 def __repr__(self):
-> 1000 return self._to_str(representation=True)
1001
1002 def _repr_html_(self):
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/core.py in _to_str(self, representation)
968 else:
969 corner_data = fetch_corner_data(
--> 970 self, session=self._executed_sessions[-1])
971
972 buf = StringIO()
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/utils.py in fetch_corner_data(df_or_series, session)
768 return df_or_series.fetch(session=session)
769 else:
--> 770 head = iloc(df_or_series)[:index_size]
771 tail = iloc(df_or_series)[-index_size:]
772 head_data, tail_data = \
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/indexing/iloc.py in __getitem__(self, indexes)
111 else:
112 op = SeriesIlocGetItem(indexes=process_iloc_indexes(self._obj, indexes))
--> 113 return op(self._obj)
114
115 def __setitem__(self, indexes, value):
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/indexing/iloc.py in __call__(self, series)
485 index_value = indexing_index_value(series.index_value, self.indexes[0])
486 inputs = [series] + [index for index in self._indexes if isinstance(index, (Base, Entity))]
--> 487 return self.new_series(inputs, shape=shape, dtype=series.dtype,
488 index_value=index_value, name=series.name)
489
AttributeError: 'DataFrameGroupByData' object has no attribute 'dtype'
|
AttributeError
|
def preprocess(cls, op, in_data=None):
if in_data is None:
in_data = op.inputs[0]
axis_shape = in_data.shape[op.axis]
axis_chunk_shape = in_data.chunk_shape[op.axis]
# rechunk to ensure all chunks on axis have rough same size
has_unknown_shape = False
for ns in in_data.nsplits:
if any(np.isnan(s) for s in ns):
has_unknown_shape = True
break
if not has_unknown_shape:
axis_chunk_shape = min(axis_chunk_shape, int(np.sqrt(axis_shape)))
if np.isnan(axis_shape) or any(np.isnan(s) for s in in_data.nsplits[op.axis]):
raise TilesError(
"fail to tile because either the shape of "
"input data on axis {} has unknown shape or chunk shape".format(op.axis)
)
chunk_size = int(axis_shape / axis_chunk_shape)
chunk_sizes = [chunk_size for _ in range(int(axis_shape // chunk_size))]
if axis_shape % chunk_size > 0:
chunk_sizes[-1] += axis_shape % chunk_size
in_data = in_data.rechunk({op.axis: tuple(chunk_sizes)})._inplace_tile()
axis_chunk_shape = in_data.chunk_shape[op.axis]
left_chunk_shape = (
in_data.chunk_shape[: op.axis] + in_data.chunk_shape[op.axis + 1 :]
)
if len(left_chunk_shape) > 0:
out_idxes = itertools.product(*(range(s) for s in left_chunk_shape))
else:
out_idxes = [()]
# if the size except axis has more than 1, the sorted values on each one may be different
# another shuffle would be required to make sure each axis except to sort
# has elements with identical size
extra_shape = [s for i, s in enumerate(in_data.shape) if i != op.axis]
if getattr(op, "need_align", None) is None:
need_align = bool(np.prod(extra_shape, dtype=int) != 1)
else:
need_align = op.need_align
return in_data, axis_chunk_shape, out_idxes, need_align
|
def preprocess(cls, op, in_data=None):
in_data = in_data or op.inputs[0]
axis_shape = in_data.shape[op.axis]
axis_chunk_shape = in_data.chunk_shape[op.axis]
# rechunk to ensure all chunks on axis have rough same size
axis_chunk_shape = min(axis_chunk_shape, int(np.sqrt(axis_shape)))
if np.isnan(axis_shape) or any(np.isnan(s) for s in in_data.nsplits[op.axis]):
raise TilesError(
"fail to tile because either the shape of "
"input data on axis {} has unknown shape or chunk shape".format(op.axis)
)
chunk_size = int(axis_shape / axis_chunk_shape)
chunk_sizes = [chunk_size for _ in range(int(axis_shape // chunk_size))]
if axis_shape % chunk_size > 0:
chunk_sizes[-1] += axis_shape % chunk_size
in_data = in_data.rechunk({op.axis: tuple(chunk_sizes)})._inplace_tile()
axis_chunk_shape = in_data.chunk_shape[op.axis]
left_chunk_shape = (
in_data.chunk_shape[: op.axis] + in_data.chunk_shape[op.axis + 1 :]
)
if len(left_chunk_shape) > 0:
out_idxes = itertools.product(*(range(s) for s in left_chunk_shape))
else:
out_idxes = [()]
# if the size except axis has more than 1, the sorted values on each one may be different
# another shuffle would be required to make sure each axis except to sort
# has elements with identical size
extra_shape = [s for i, s in enumerate(in_data.shape) if i != op.axis]
if getattr(op, "need_align", None) is None:
need_align = bool(np.prod(extra_shape, dtype=int) != 1)
else:
need_align = op.need_align
return in_data, axis_chunk_shape, out_idxes, need_align
|
https://github.com/mars-project/mars/issues/1413
|
In [25]: merge_df = parsing_df.append(pFold_4_df)
In [26]: mDf_g = merge_df.groupby(["query","template"])
In [27]: mDf_g.execute()
Out[27]: ---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
~/anaconda3/envs/py37/lib/python3.7/site-packages/IPython/core/formatters.py in __call__(self, obj)
700 type_pprinters=self.type_printers,
701 deferred_pprinters=self.deferred_printers)
--> 702 printer.pretty(obj)
703 printer.flush()
704 return stream.getvalue()
~/anaconda3/envs/py37/lib/python3.7/site-packages/IPython/lib/pretty.py in pretty(self, obj)
392 if cls is not object \
393 and callable(cls.__dict__.get('__repr__')):
--> 394 return _repr_pprint(obj, self, cycle)
395
396 return _default_pprint(obj, self, cycle)
~/anaconda3/envs/py37/lib/python3.7/site-packages/IPython/lib/pretty.py in _repr_pprint(obj, p, cycle)
698 """A pprint that just redirects to the normal repr function."""
699 # Find newlines and replace them with p.break_()
--> 700 output = repr(obj)
701 lines = output.splitlines()
702 with p.group():
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/core.py in __repr__(self)
998
999 def __repr__(self):
-> 1000 return self._to_str(representation=True)
1001
1002 def _repr_html_(self):
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/core.py in _to_str(self, representation)
968 else:
969 corner_data = fetch_corner_data(
--> 970 self, session=self._executed_sessions[-1])
971
972 buf = StringIO()
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/utils.py in fetch_corner_data(df_or_series, session)
768 return df_or_series.fetch(session=session)
769 else:
--> 770 head = iloc(df_or_series)[:index_size]
771 tail = iloc(df_or_series)[-index_size:]
772 head_data, tail_data = \
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/indexing/iloc.py in __getitem__(self, indexes)
111 else:
112 op = SeriesIlocGetItem(indexes=process_iloc_indexes(self._obj, indexes))
--> 113 return op(self._obj)
114
115 def __setitem__(self, indexes, value):
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/indexing/iloc.py in __call__(self, series)
485 index_value = indexing_index_value(series.index_value, self.indexes[0])
486 inputs = [series] + [index for index in self._indexes if isinstance(index, (Base, Entity))]
--> 487 return self.new_series(inputs, shape=shape, dtype=series.dtype,
488 index_value=index_value, name=series.name)
489
AttributeError: 'DataFrameGroupByData' object has no attribute 'dtype'
|
AttributeError
|
def plan_rechunks(
tileable, new_chunk_size, itemsize, threshold=None, chunk_size_limit=None
):
threshold = threshold or options.rechunk.threshold
chunk_size_limit = chunk_size_limit or options.rechunk.chunk_size_limit
if len(new_chunk_size) != tileable.ndim:
raise ValueError(
"Provided chunks should have %d dimensions, got %d"
% (tileable.ndim, len(new_chunk_size))
)
steps = []
if itemsize > 0:
chunk_size_limit /= itemsize
chunk_size_limit = max(
[
int(chunk_size_limit),
_largest_chunk_size(tileable.nsplits),
_largest_chunk_size(new_chunk_size),
]
)
graph_size_threshold = threshold * (
_chunk_number(tileable.nsplits) + _chunk_number(new_chunk_size)
)
chunk_size = curr_chunk_size = tileable.nsplits
first_run = True
while True:
graph_size = _estimate_graph_size(chunk_size, new_chunk_size)
if graph_size < graph_size_threshold:
break
if not first_run:
chunk_size = _find_split_rechunk(
curr_chunk_size, new_chunk_size, graph_size * threshold
)
chunks_size, memory_limit_hit = _find_merge_rechunk(
chunk_size, new_chunk_size, chunk_size_limit
)
if chunk_size == curr_chunk_size or chunk_size == new_chunk_size:
break
steps.append(chunk_size)
curr_chunk_size = chunk_size
if not memory_limit_hit:
break
first_run = False
return steps + [new_chunk_size]
|
def plan_rechunks(
tileable, new_chunk_size, itemsize, threshold=None, chunk_size_limit=None
):
threshold = threshold or options.rechunk.threshold
chunk_size_limit = chunk_size_limit or options.rechunk.chunk_size_limit
if len(new_chunk_size) != tileable.ndim:
raise ValueError(
"Provided chunks should have %d dimensions, got %d"
% (tileable.ndim, len(new_chunk_size))
)
steps = []
chunk_size_limit /= itemsize
chunk_size_limit = max(
[
int(chunk_size_limit),
_largest_chunk_size(tileable.nsplits),
_largest_chunk_size(new_chunk_size),
]
)
graph_size_threshold = threshold * (
_chunk_number(tileable.nsplits) + _chunk_number(new_chunk_size)
)
chunk_size = curr_chunk_size = tileable.nsplits
first_run = True
while True:
graph_size = _estimate_graph_size(chunk_size, new_chunk_size)
if graph_size < graph_size_threshold:
break
if not first_run:
chunk_size = _find_split_rechunk(
curr_chunk_size, new_chunk_size, graph_size * threshold
)
chunks_size, memory_limit_hit = _find_merge_rechunk(
chunk_size, new_chunk_size, chunk_size_limit
)
if chunk_size == curr_chunk_size or chunk_size == new_chunk_size:
break
steps.append(chunk_size)
curr_chunk_size = chunk_size
if not memory_limit_hit:
break
first_run = False
return steps + [new_chunk_size]
|
https://github.com/mars-project/mars/issues/1413
|
In [25]: merge_df = parsing_df.append(pFold_4_df)
In [26]: mDf_g = merge_df.groupby(["query","template"])
In [27]: mDf_g.execute()
Out[27]: ---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
~/anaconda3/envs/py37/lib/python3.7/site-packages/IPython/core/formatters.py in __call__(self, obj)
700 type_pprinters=self.type_printers,
701 deferred_pprinters=self.deferred_printers)
--> 702 printer.pretty(obj)
703 printer.flush()
704 return stream.getvalue()
~/anaconda3/envs/py37/lib/python3.7/site-packages/IPython/lib/pretty.py in pretty(self, obj)
392 if cls is not object \
393 and callable(cls.__dict__.get('__repr__')):
--> 394 return _repr_pprint(obj, self, cycle)
395
396 return _default_pprint(obj, self, cycle)
~/anaconda3/envs/py37/lib/python3.7/site-packages/IPython/lib/pretty.py in _repr_pprint(obj, p, cycle)
698 """A pprint that just redirects to the normal repr function."""
699 # Find newlines and replace them with p.break_()
--> 700 output = repr(obj)
701 lines = output.splitlines()
702 with p.group():
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/core.py in __repr__(self)
998
999 def __repr__(self):
-> 1000 return self._to_str(representation=True)
1001
1002 def _repr_html_(self):
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/core.py in _to_str(self, representation)
968 else:
969 corner_data = fetch_corner_data(
--> 970 self, session=self._executed_sessions[-1])
971
972 buf = StringIO()
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/utils.py in fetch_corner_data(df_or_series, session)
768 return df_or_series.fetch(session=session)
769 else:
--> 770 head = iloc(df_or_series)[:index_size]
771 tail = iloc(df_or_series)[-index_size:]
772 head_data, tail_data = \
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/indexing/iloc.py in __getitem__(self, indexes)
111 else:
112 op = SeriesIlocGetItem(indexes=process_iloc_indexes(self._obj, indexes))
--> 113 return op(self._obj)
114
115 def __setitem__(self, indexes, value):
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/indexing/iloc.py in __call__(self, series)
485 index_value = indexing_index_value(series.index_value, self.indexes[0])
486 inputs = [series] + [index for index in self._indexes if isinstance(index, (Base, Entity))]
--> 487 return self.new_series(inputs, shape=shape, dtype=series.dtype,
488 index_value=index_value, name=series.name)
489
AttributeError: 'DataFrameGroupByData' object has no attribute 'dtype'
|
AttributeError
|
def is_object_dtype(dtype):
try:
return (
np.issubdtype(dtype, np.object_)
or np.issubdtype(dtype, np.unicode_)
or np.issubdtype(dtype, np.bytes_)
)
except TypeError: # pragma: no cover
return False
|
def is_object_dtype(dtype):
return (
np.issubdtype(dtype, np.object_)
or np.issubdtype(dtype, np.unicode_)
or np.issubdtype(dtype, np.bytes_)
)
|
https://github.com/mars-project/mars/issues/1413
|
In [25]: merge_df = parsing_df.append(pFold_4_df)
In [26]: mDf_g = merge_df.groupby(["query","template"])
In [27]: mDf_g.execute()
Out[27]: ---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
~/anaconda3/envs/py37/lib/python3.7/site-packages/IPython/core/formatters.py in __call__(self, obj)
700 type_pprinters=self.type_printers,
701 deferred_pprinters=self.deferred_printers)
--> 702 printer.pretty(obj)
703 printer.flush()
704 return stream.getvalue()
~/anaconda3/envs/py37/lib/python3.7/site-packages/IPython/lib/pretty.py in pretty(self, obj)
392 if cls is not object \
393 and callable(cls.__dict__.get('__repr__')):
--> 394 return _repr_pprint(obj, self, cycle)
395
396 return _default_pprint(obj, self, cycle)
~/anaconda3/envs/py37/lib/python3.7/site-packages/IPython/lib/pretty.py in _repr_pprint(obj, p, cycle)
698 """A pprint that just redirects to the normal repr function."""
699 # Find newlines and replace them with p.break_()
--> 700 output = repr(obj)
701 lines = output.splitlines()
702 with p.group():
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/core.py in __repr__(self)
998
999 def __repr__(self):
-> 1000 return self._to_str(representation=True)
1001
1002 def _repr_html_(self):
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/core.py in _to_str(self, representation)
968 else:
969 corner_data = fetch_corner_data(
--> 970 self, session=self._executed_sessions[-1])
971
972 buf = StringIO()
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/utils.py in fetch_corner_data(df_or_series, session)
768 return df_or_series.fetch(session=session)
769 else:
--> 770 head = iloc(df_or_series)[:index_size]
771 tail = iloc(df_or_series)[-index_size:]
772 head_data, tail_data = \
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/indexing/iloc.py in __getitem__(self, indexes)
111 else:
112 op = SeriesIlocGetItem(indexes=process_iloc_indexes(self._obj, indexes))
--> 113 return op(self._obj)
114
115 def __setitem__(self, indexes, value):
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/indexing/iloc.py in __call__(self, series)
485 index_value = indexing_index_value(series.index_value, self.indexes[0])
486 inputs = [series] + [index for index in self._indexes if isinstance(index, (Base, Entity))]
--> 487 return self.new_series(inputs, shape=shape, dtype=series.dtype,
488 index_value=index_value, name=series.name)
489
AttributeError: 'DataFrameGroupByData' object has no attribute 'dtype'
|
AttributeError
|
def _calc_results(self, session_id, graph_key, graph, context_dict, chunk_targets):
_, op_name = concat_operand_keys(graph, "_")
logger.debug("Start calculating operand %s in %s.", graph_key, self.uid)
start_time = time.time()
for chunk in graph:
for inp, prepare_inp in zip(chunk.inputs, chunk.op.prepare_inputs):
if not prepare_inp:
context_dict[inp.key] = None
local_context_dict = DistributedDictContext(
self.get_scheduler(self.default_uid()),
session_id,
actor_ctx=self.ctx,
address=self.address,
n_cpu=self._get_n_cpu(),
)
local_context_dict["_actor_cls"] = type(self)
local_context_dict["_actor_uid"] = self.uid
local_context_dict["_op_key"] = graph_key
local_context_dict.update(context_dict)
context_dict.clear()
# start actual execution
executor = Executor(storage=local_context_dict)
with EventContext(
self._events_ref,
EventCategory.PROCEDURE,
EventLevel.NORMAL,
self._calc_event_type,
self.uid,
):
self._execution_pool.submit(
executor.execute_graph, graph, chunk_targets, retval=False
).result()
end_time = time.time()
# collect results
result_keys = []
result_values = []
result_sizes = []
collected_chunk_keys = set()
for k, v in local_context_dict.items():
if isinstance(k, tuple):
k = tuple(to_str(i) for i in k)
else:
k = to_str(k)
chunk_key = get_chunk_key(k)
if chunk_key in chunk_targets:
result_keys.append(k)
result_values.append(v)
result_sizes.append(calc_data_size(v))
collected_chunk_keys.add(chunk_key)
local_context_dict.clear()
# check if all targets generated
if any(k not in collected_chunk_keys for k in chunk_targets):
raise KeyError([k for k in chunk_targets if k not in collected_chunk_keys])
# adjust sizes in allocation
apply_allocs = defaultdict(lambda: 0)
for k, size in zip(result_keys, result_sizes):
apply_allocs[get_chunk_key(k)] += size
apply_alloc_quota_keys, apply_alloc_sizes = [], []
for k, v in apply_allocs.items():
apply_alloc_quota_keys.append(
build_quota_key(session_id, k, owner=self.proc_id)
)
apply_alloc_sizes.append(v)
self._mem_quota_ref.alter_allocations(
apply_alloc_quota_keys, apply_alloc_sizes, _tell=True, _wait=False
)
self._mem_quota_ref.hold_quotas(apply_alloc_quota_keys, _tell=True)
if self._status_ref:
self._status_ref.update_mean_stats(
"calc_speed." + op_name,
sum(apply_alloc_sizes) * 1.0 / (end_time - start_time),
_tell=True,
_wait=False,
)
logger.debug("Finish calculating operand %s.", graph_key)
return self.storage_client.put_objects(
session_id,
result_keys,
result_values,
[self._calc_intermediate_device],
sizes=result_sizes,
).then(lambda *_: result_keys)
|
def _calc_results(self, session_id, graph_key, graph, context_dict, chunk_targets):
_, op_name = concat_operand_keys(graph, "_")
logger.debug("Start calculating operand %s in %s.", graph_key, self.uid)
start_time = time.time()
local_context_dict = DistributedDictContext(
self.get_scheduler(self.default_uid()),
session_id,
actor_ctx=self.ctx,
address=self.address,
n_cpu=self._get_n_cpu(),
)
local_context_dict["_actor_cls"] = type(self)
local_context_dict["_actor_uid"] = self.uid
local_context_dict["_op_key"] = graph_key
local_context_dict.update(context_dict)
context_dict.clear()
# start actual execution
executor = Executor(storage=local_context_dict)
with EventContext(
self._events_ref,
EventCategory.PROCEDURE,
EventLevel.NORMAL,
self._calc_event_type,
self.uid,
):
self._execution_pool.submit(
executor.execute_graph, graph, chunk_targets, retval=False
).result()
end_time = time.time()
# collect results
result_keys = []
result_values = []
result_sizes = []
collected_chunk_keys = set()
for k, v in local_context_dict.items():
if isinstance(k, tuple):
k = tuple(to_str(i) for i in k)
else:
k = to_str(k)
chunk_key = get_chunk_key(k)
if chunk_key in chunk_targets:
result_keys.append(k)
result_values.append(v)
result_sizes.append(calc_data_size(v))
collected_chunk_keys.add(chunk_key)
local_context_dict.clear()
# check if all targets generated
if any(k not in collected_chunk_keys for k in chunk_targets):
raise KeyError([k for k in chunk_targets if k not in collected_chunk_keys])
# adjust sizes in allocation
apply_allocs = defaultdict(lambda: 0)
for k, size in zip(result_keys, result_sizes):
apply_allocs[get_chunk_key(k)] += size
apply_alloc_quota_keys, apply_alloc_sizes = [], []
for k, v in apply_allocs.items():
apply_alloc_quota_keys.append(
build_quota_key(session_id, k, owner=self.proc_id)
)
apply_alloc_sizes.append(v)
self._mem_quota_ref.alter_allocations(
apply_alloc_quota_keys, apply_alloc_sizes, _tell=True, _wait=False
)
self._mem_quota_ref.hold_quotas(apply_alloc_quota_keys, _tell=True)
if self._status_ref:
self._status_ref.update_mean_stats(
"calc_speed." + op_name,
sum(apply_alloc_sizes) * 1.0 / (end_time - start_time),
_tell=True,
_wait=False,
)
logger.debug("Finish calculating operand %s.", graph_key)
return self.storage_client.put_objects(
session_id,
result_keys,
result_values,
[self._calc_intermediate_device],
sizes=result_sizes,
).then(lambda *_: result_keys)
|
https://github.com/mars-project/mars/issues/1413
|
In [25]: merge_df = parsing_df.append(pFold_4_df)
In [26]: mDf_g = merge_df.groupby(["query","template"])
In [27]: mDf_g.execute()
Out[27]: ---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
~/anaconda3/envs/py37/lib/python3.7/site-packages/IPython/core/formatters.py in __call__(self, obj)
700 type_pprinters=self.type_printers,
701 deferred_pprinters=self.deferred_printers)
--> 702 printer.pretty(obj)
703 printer.flush()
704 return stream.getvalue()
~/anaconda3/envs/py37/lib/python3.7/site-packages/IPython/lib/pretty.py in pretty(self, obj)
392 if cls is not object \
393 and callable(cls.__dict__.get('__repr__')):
--> 394 return _repr_pprint(obj, self, cycle)
395
396 return _default_pprint(obj, self, cycle)
~/anaconda3/envs/py37/lib/python3.7/site-packages/IPython/lib/pretty.py in _repr_pprint(obj, p, cycle)
698 """A pprint that just redirects to the normal repr function."""
699 # Find newlines and replace them with p.break_()
--> 700 output = repr(obj)
701 lines = output.splitlines()
702 with p.group():
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/core.py in __repr__(self)
998
999 def __repr__(self):
-> 1000 return self._to_str(representation=True)
1001
1002 def _repr_html_(self):
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/core.py in _to_str(self, representation)
968 else:
969 corner_data = fetch_corner_data(
--> 970 self, session=self._executed_sessions[-1])
971
972 buf = StringIO()
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/utils.py in fetch_corner_data(df_or_series, session)
768 return df_or_series.fetch(session=session)
769 else:
--> 770 head = iloc(df_or_series)[:index_size]
771 tail = iloc(df_or_series)[-index_size:]
772 head_data, tail_data = \
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/indexing/iloc.py in __getitem__(self, indexes)
111 else:
112 op = SeriesIlocGetItem(indexes=process_iloc_indexes(self._obj, indexes))
--> 113 return op(self._obj)
114
115 def __setitem__(self, indexes, value):
~/anaconda3/envs/py37/lib/python3.7/site-packages/mars/dataframe/indexing/iloc.py in __call__(self, series)
485 index_value = indexing_index_value(series.index_value, self.indexes[0])
486 inputs = [series] + [index for index in self._indexes if isinstance(index, (Base, Entity))]
--> 487 return self.new_series(inputs, shape=shape, dtype=series.dtype,
488 index_value=index_value, name=series.name)
489
AttributeError: 'DataFrameGroupByData' object has no attribute 'dtype'
|
AttributeError
|
def _get_selectable(self, engine_or_conn, columns=None):
import sqlalchemy as sa
from sqlalchemy import sql
from sqlalchemy.exc import NoSuchTableError
# process table_name
if self._selectable is not None:
selectable = self._selectable
else:
if isinstance(self._table_or_sql, sa.Table):
selectable = self._table_or_sql
self._table_or_sql = selectable.name
else:
m = sa.MetaData()
try:
selectable = sa.Table(
self._table_or_sql,
m,
autoload=True,
autoload_with=engine_or_conn,
schema=self._schema,
)
except NoSuchTableError:
temp_name_1 = "t1_" + binascii.b2a_hex(uuid.uuid4().bytes).decode()
temp_name_2 = "t2_" + binascii.b2a_hex(uuid.uuid4().bytes).decode()
if columns:
selectable = (
sql.text(self._table_or_sql)
.columns(*[sql.column(c) for c in columns])
.alias(temp_name_2)
)
else:
selectable = sql.select(
"*",
from_obj=sql.text(
"(%s) AS %s" % (self._table_or_sql, temp_name_1)
),
).alias(temp_name_2)
self._selectable = selectable
return selectable
|
def _get_selectable(self, engine_or_conn, columns=None):
import sqlalchemy as sa
from sqlalchemy import sql
from sqlalchemy.exc import NoSuchTableError
# process table_name
if self._selectable is not None:
selectable = self._selectable
else:
if isinstance(self._table_or_sql, sa.Table):
selectable = self._table_or_sql
self._table_or_sql = selectable.name
else:
m = sa.MetaData()
try:
selectable = sa.Table(
self._table_or_sql,
m,
autoload=True,
autoload_with=engine_or_conn,
schema=self._schema,
)
except NoSuchTableError:
temp_table_name = (
"temp_" + binascii.b2a_hex(uuid.uuid4().bytes).decode()
)
if columns:
selectable = sql.text(self._table_or_sql).columns(
*[sql.column(c) for c in columns]
)
else:
selectable = sql.select(
"*",
from_obj=sql.text(
"(%s) AS %s" % (self._table_or_sql, temp_table_name)
),
)
self._selectable = selectable
return selectable
|
https://github.com/mars-project/mars/issues/1415
|
Traceback (most recent call last):
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 1249, in _execute_context
cursor, statement, parameters, context
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\default.py", line 580, in do_execute
cursor.execute(statement, parameters)
File "D:\Anaconda3\envs\py37\lib\site-packages\mysql\connector\cursor.py", line 551, in execute
self._handle_result(self._connection.cmd_query(stmt))
File "D:\Anaconda3\envs\py37\lib\site-packages\mysql\connector\connection.py", line 490, in cmd_query
result = self._handle_result(self._send_cmd(ServerCmd.QUERY, query))
File "D:\Anaconda3\envs\py37\lib\site-packages\mysql\connector\connection.py", line 395, in _handle_result
raise errors.get_exception(packet)
mysql.connector.errors.ProgrammingError: 1248 (42000): Every derived table must have its own alias
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "E:\mycodes\readdatabymars.py", line 17, in <module>
df = md.read_sql_query(sql1, con=engine)
File "D:\Anaconda3\envs\py37\lib\site-packages\mars\dataframe\datasource\read_sql.py", line 766, in read_sql_query
low_limit=low_limit, high_limit=high_limit)
File "D:\Anaconda3\envs\py37\lib\site-packages\mars\dataframe\datasource\read_sql.py", line 476, in _read_sql
return op(test_rows, chunk_size)
File "D:\Anaconda3\envs\py37\lib\site-packages\mars\dataframe\datasource\read_sql.py", line 250, in __call__
test_df, shape = self._collect_info(con, selectable, collect_cols, test_rows)
File "D:\Anaconda3\envs\py37\lib\site-packages\mars\dataframe\datasource\read_sql.py", line 196, in _collect_info
parse_dates=self._parse_dates)
File "D:\Anaconda3\envs\py37\lib\site-packages\pandas\io\sql.py", line 436, in read_sql
chunksize=chunksize,
File "D:\Anaconda3\envs\py37\lib\site-packages\pandas\io\sql.py", line 1218, in read_query
result = self.execute(*args)
File "D:\Anaconda3\envs\py37\lib\site-packages\pandas\io\sql.py", line 1087, in execute
return self.connectable.execute(*args, **kwargs)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 988, in execute
return meth(self, multiparams, params)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\sql\elements.py", line 287, in _execute_on_connection
return connection._execute_clauseelement(self, multiparams, params)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 1107, in _execute_clauseelement
distilled_params,
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 1253, in _execute_context
e, statement, parameters, cursor, context
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 1473, in _handle_dbapi_exception
util.raise_from_cause(sqlalchemy_exception, exc_info)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\util\compat.py", line 398, in raise_from_cause
reraise(type(exception), exception, tb=exc_tb, cause=cause)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\util\compat.py", line 152, in reraise
raise value.with_traceback(tb)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 1249, in _execute_context
cursor, statement, parameters, context
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\default.py", line 580, in do_execute
cursor.execute(statement, parameters)
File "D:\Anaconda3\envs\py37\lib\site-packages\mysql\connector\cursor.py", line 551, in execute
self._handle_result(self._connection.cmd_query(stmt))
File "D:\Anaconda3\envs\py37\lib\site-packages\mysql\connector\connection.py", line 490, in cmd_query
result = self._handle_result(self._send_cmd(ServerCmd.QUERY, query))
File "D:\Anaconda3\envs\py37\lib\site-packages\mysql\connector\connection.py", line 395, in _handle_result
raise errors.get_exception(packet)
sqlalchemy.exc.ProgrammingError: (mysql.connector.errors.ProgrammingError) 1248 (42000): Every derived table must have its own alias
[SQL: SELECT *
FROM (SELECT *
FROM (SELECT * FROM table1) AS temp_6302b78c8a9347b8b02475d757cf17f7)
LIMIT %(param_1)s]
[parameters: {'param_1': 5}]
(Background on this error at: http://sqlalche.me/e/f405)
|
mysql.connector.errors.ProgrammingError
|
def estimate_size(cls, ctx, op):
from .dataframe.core import (
DATAFRAME_CHUNK_TYPE,
SERIES_CHUNK_TYPE,
INDEX_CHUNK_TYPE,
)
exec_size = 0
outputs = op.outputs
if all(
not c.is_sparse() and hasattr(c, "nbytes") and not np.isnan(c.nbytes)
for c in outputs
):
for out in outputs:
ctx[out.key] = (out.nbytes, out.nbytes)
for inp in op.inputs or ():
try:
# execution size of a specific data chunk may be
# larger than stored type due to objects
obj_overhead = n_strings = 0
if getattr(inp, "shape", None) and not np.isnan(inp.shape[0]):
if isinstance(inp, DATAFRAME_CHUNK_TYPE) and inp.dtypes is not None:
n_strings = len([dt for dt in inp.dtypes if is_object_dtype(dt)])
elif (
isinstance(inp, (INDEX_CHUNK_TYPE, SERIES_CHUNK_TYPE))
and inp.dtype is not None
):
n_strings = 1 if is_object_dtype(inp.dtype) else 0
obj_overhead += n_strings * inp.shape[0] * OBJECT_FIELD_OVERHEAD
exec_size += ctx[inp.key][0] + obj_overhead
except KeyError:
if not op.sparse:
inp_size = calc_data_size(inp)
if not np.isnan(inp_size):
exec_size += inp_size
exec_size = int(exec_size)
total_out_size = 0
chunk_sizes = dict()
for out in outputs:
try:
chunk_size = calc_data_size(out) if not out.is_sparse() else exec_size
if np.isnan(chunk_size):
raise TypeError
chunk_sizes[out.key] = chunk_size
total_out_size += chunk_size
except (AttributeError, TypeError, ValueError):
pass
exec_size = max(exec_size, total_out_size)
for out in outputs:
if out.key in ctx:
continue
if out.key in chunk_sizes:
store_size = chunk_sizes[out.key]
else:
store_size = max(
exec_size // len(outputs), total_out_size // max(len(chunk_sizes), 1)
)
try:
if out.is_sparse():
max_sparse_size = (
out.nbytes
+ np.dtype(np.int64).itemsize * np.prod(out.shape) * out.ndim
)
else:
max_sparse_size = np.nan
except TypeError: # pragma: no cover
max_sparse_size = np.nan
if not np.isnan(max_sparse_size):
store_size = min(store_size, max_sparse_size)
ctx[out.key] = (store_size, exec_size // len(outputs))
|
def estimate_size(cls, ctx, op):
exec_size = 0
outputs = op.outputs
if all(
not c.is_sparse() and hasattr(c, "nbytes") and not np.isnan(c.nbytes)
for c in outputs
):
for out in outputs:
ctx[out.key] = (out.nbytes, out.nbytes)
for inp in op.inputs or ():
try:
exec_size += ctx[inp.key][0]
except KeyError:
if not op.sparse:
inp_size = calc_data_size(inp)
if not np.isnan(inp_size):
exec_size += inp_size
exec_size = int(exec_size)
total_out_size = 0
chunk_sizes = dict()
for out in outputs:
try:
chunk_size = calc_data_size(out) if not out.is_sparse() else exec_size
if np.isnan(chunk_size):
raise TypeError
chunk_sizes[out.key] = chunk_size
total_out_size += chunk_size
except (AttributeError, TypeError, ValueError):
pass
exec_size = max(exec_size, total_out_size)
for out in outputs:
if out.key in ctx:
continue
if out.key in chunk_sizes:
store_size = chunk_sizes[out.key]
else:
store_size = max(
exec_size // len(outputs), total_out_size // max(len(chunk_sizes), 1)
)
try:
if out.is_sparse():
max_sparse_size = (
out.nbytes
+ np.dtype(np.int64).itemsize * np.prod(out.shape) * out.ndim
)
else:
max_sparse_size = np.nan
except TypeError: # pragma: no cover
max_sparse_size = np.nan
if not np.isnan(max_sparse_size):
store_size = min(store_size, max_sparse_size)
ctx[out.key] = (store_size, exec_size // len(outputs))
|
https://github.com/mars-project/mars/issues/1415
|
Traceback (most recent call last):
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 1249, in _execute_context
cursor, statement, parameters, context
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\default.py", line 580, in do_execute
cursor.execute(statement, parameters)
File "D:\Anaconda3\envs\py37\lib\site-packages\mysql\connector\cursor.py", line 551, in execute
self._handle_result(self._connection.cmd_query(stmt))
File "D:\Anaconda3\envs\py37\lib\site-packages\mysql\connector\connection.py", line 490, in cmd_query
result = self._handle_result(self._send_cmd(ServerCmd.QUERY, query))
File "D:\Anaconda3\envs\py37\lib\site-packages\mysql\connector\connection.py", line 395, in _handle_result
raise errors.get_exception(packet)
mysql.connector.errors.ProgrammingError: 1248 (42000): Every derived table must have its own alias
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "E:\mycodes\readdatabymars.py", line 17, in <module>
df = md.read_sql_query(sql1, con=engine)
File "D:\Anaconda3\envs\py37\lib\site-packages\mars\dataframe\datasource\read_sql.py", line 766, in read_sql_query
low_limit=low_limit, high_limit=high_limit)
File "D:\Anaconda3\envs\py37\lib\site-packages\mars\dataframe\datasource\read_sql.py", line 476, in _read_sql
return op(test_rows, chunk_size)
File "D:\Anaconda3\envs\py37\lib\site-packages\mars\dataframe\datasource\read_sql.py", line 250, in __call__
test_df, shape = self._collect_info(con, selectable, collect_cols, test_rows)
File "D:\Anaconda3\envs\py37\lib\site-packages\mars\dataframe\datasource\read_sql.py", line 196, in _collect_info
parse_dates=self._parse_dates)
File "D:\Anaconda3\envs\py37\lib\site-packages\pandas\io\sql.py", line 436, in read_sql
chunksize=chunksize,
File "D:\Anaconda3\envs\py37\lib\site-packages\pandas\io\sql.py", line 1218, in read_query
result = self.execute(*args)
File "D:\Anaconda3\envs\py37\lib\site-packages\pandas\io\sql.py", line 1087, in execute
return self.connectable.execute(*args, **kwargs)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 988, in execute
return meth(self, multiparams, params)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\sql\elements.py", line 287, in _execute_on_connection
return connection._execute_clauseelement(self, multiparams, params)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 1107, in _execute_clauseelement
distilled_params,
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 1253, in _execute_context
e, statement, parameters, cursor, context
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 1473, in _handle_dbapi_exception
util.raise_from_cause(sqlalchemy_exception, exc_info)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\util\compat.py", line 398, in raise_from_cause
reraise(type(exception), exception, tb=exc_tb, cause=cause)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\util\compat.py", line 152, in reraise
raise value.with_traceback(tb)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 1249, in _execute_context
cursor, statement, parameters, context
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\default.py", line 580, in do_execute
cursor.execute(statement, parameters)
File "D:\Anaconda3\envs\py37\lib\site-packages\mysql\connector\cursor.py", line 551, in execute
self._handle_result(self._connection.cmd_query(stmt))
File "D:\Anaconda3\envs\py37\lib\site-packages\mysql\connector\connection.py", line 490, in cmd_query
result = self._handle_result(self._send_cmd(ServerCmd.QUERY, query))
File "D:\Anaconda3\envs\py37\lib\site-packages\mysql\connector\connection.py", line 395, in _handle_result
raise errors.get_exception(packet)
sqlalchemy.exc.ProgrammingError: (mysql.connector.errors.ProgrammingError) 1248 (42000): Every derived table must have its own alias
[SQL: SELECT *
FROM (SELECT *
FROM (SELECT * FROM table1) AS temp_6302b78c8a9347b8b02475d757cf17f7)
LIMIT %(param_1)s]
[parameters: {'param_1': 5}]
(Background on this error at: http://sqlalche.me/e/f405)
|
mysql.connector.errors.ProgrammingError
|
def _estimate_calc_memory(self, session_id, graph_key):
graph_record = self._graph_records[(session_id, graph_key)]
data_metas = graph_record.data_metas
size_ctx = dict((k, (v.chunk_size, v.chunk_size)) for k, v in data_metas.items())
# update shapes
for n in graph_record.graph:
if isinstance(n.op, Fetch):
try:
meta = data_metas[n.key]
if hasattr(n, "_shape") and meta.chunk_shape is not None:
n._shape = meta.chunk_shape
except KeyError:
pass
executor = Executor(
storage=size_ctx, sync_provider_type=Executor.SyncProviderType.MOCK
)
res = executor.execute_graph(
graph_record.graph, graph_record.chunk_targets, mock=True
)
targets = graph_record.chunk_targets
target_sizes = dict(zip(targets, res))
total_mem = sum(target_sizes[key][1] for key in targets)
if total_mem:
for key in targets:
r = target_sizes[key]
target_sizes[key] = (
r[0],
max(r[1], r[1] * executor.mock_max_memory // total_mem),
)
return target_sizes
|
def _estimate_calc_memory(self, session_id, graph_key):
graph_record = self._graph_records[(session_id, graph_key)]
size_ctx = dict(
(k, (v.chunk_size, v.chunk_size)) for k, v in graph_record.data_metas.items()
)
executor = Executor(
storage=size_ctx, sync_provider_type=Executor.SyncProviderType.MOCK
)
res = executor.execute_graph(
graph_record.graph, graph_record.chunk_targets, mock=True
)
targets = graph_record.chunk_targets
target_sizes = dict(zip(targets, res))
total_mem = sum(target_sizes[key][1] for key in targets)
if total_mem:
for key in targets:
r = target_sizes[key]
target_sizes[key] = (
r[0],
max(r[1], r[1] * executor.mock_max_memory // total_mem),
)
return target_sizes
|
https://github.com/mars-project/mars/issues/1415
|
Traceback (most recent call last):
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 1249, in _execute_context
cursor, statement, parameters, context
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\default.py", line 580, in do_execute
cursor.execute(statement, parameters)
File "D:\Anaconda3\envs\py37\lib\site-packages\mysql\connector\cursor.py", line 551, in execute
self._handle_result(self._connection.cmd_query(stmt))
File "D:\Anaconda3\envs\py37\lib\site-packages\mysql\connector\connection.py", line 490, in cmd_query
result = self._handle_result(self._send_cmd(ServerCmd.QUERY, query))
File "D:\Anaconda3\envs\py37\lib\site-packages\mysql\connector\connection.py", line 395, in _handle_result
raise errors.get_exception(packet)
mysql.connector.errors.ProgrammingError: 1248 (42000): Every derived table must have its own alias
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "E:\mycodes\readdatabymars.py", line 17, in <module>
df = md.read_sql_query(sql1, con=engine)
File "D:\Anaconda3\envs\py37\lib\site-packages\mars\dataframe\datasource\read_sql.py", line 766, in read_sql_query
low_limit=low_limit, high_limit=high_limit)
File "D:\Anaconda3\envs\py37\lib\site-packages\mars\dataframe\datasource\read_sql.py", line 476, in _read_sql
return op(test_rows, chunk_size)
File "D:\Anaconda3\envs\py37\lib\site-packages\mars\dataframe\datasource\read_sql.py", line 250, in __call__
test_df, shape = self._collect_info(con, selectable, collect_cols, test_rows)
File "D:\Anaconda3\envs\py37\lib\site-packages\mars\dataframe\datasource\read_sql.py", line 196, in _collect_info
parse_dates=self._parse_dates)
File "D:\Anaconda3\envs\py37\lib\site-packages\pandas\io\sql.py", line 436, in read_sql
chunksize=chunksize,
File "D:\Anaconda3\envs\py37\lib\site-packages\pandas\io\sql.py", line 1218, in read_query
result = self.execute(*args)
File "D:\Anaconda3\envs\py37\lib\site-packages\pandas\io\sql.py", line 1087, in execute
return self.connectable.execute(*args, **kwargs)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 988, in execute
return meth(self, multiparams, params)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\sql\elements.py", line 287, in _execute_on_connection
return connection._execute_clauseelement(self, multiparams, params)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 1107, in _execute_clauseelement
distilled_params,
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 1253, in _execute_context
e, statement, parameters, cursor, context
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 1473, in _handle_dbapi_exception
util.raise_from_cause(sqlalchemy_exception, exc_info)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\util\compat.py", line 398, in raise_from_cause
reraise(type(exception), exception, tb=exc_tb, cause=cause)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\util\compat.py", line 152, in reraise
raise value.with_traceback(tb)
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\base.py", line 1249, in _execute_context
cursor, statement, parameters, context
File "D:\Anaconda3\envs\py37\lib\site-packages\sqlalchemy\engine\default.py", line 580, in do_execute
cursor.execute(statement, parameters)
File "D:\Anaconda3\envs\py37\lib\site-packages\mysql\connector\cursor.py", line 551, in execute
self._handle_result(self._connection.cmd_query(stmt))
File "D:\Anaconda3\envs\py37\lib\site-packages\mysql\connector\connection.py", line 490, in cmd_query
result = self._handle_result(self._send_cmd(ServerCmd.QUERY, query))
File "D:\Anaconda3\envs\py37\lib\site-packages\mysql\connector\connection.py", line 395, in _handle_result
raise errors.get_exception(packet)
sqlalchemy.exc.ProgrammingError: (mysql.connector.errors.ProgrammingError) 1248 (42000): Every derived table must have its own alias
[SQL: SELECT *
FROM (SELECT *
FROM (SELECT * FROM table1) AS temp_6302b78c8a9347b8b02475d757cf17f7)
LIMIT %(param_1)s]
[parameters: {'param_1': 5}]
(Background on this error at: http://sqlalche.me/e/f405)
|
mysql.connector.errors.ProgrammingError
|
def tile(cls, op: "LGBMTrain"):
ctx = get_context()
if ctx.running_mode != RunningMode.distributed:
assert all(len(inp.chunks) == 1 for inp in op.inputs)
chunk_op = op.copy().reset_key()
out_chunk = chunk_op.new_chunk(
[inp.chunks[0] for inp in op.inputs], shape=(1,), index=(0,)
)
new_op = op.copy()
return new_op.new_tileables(op.inputs, chunks=[out_chunk], nsplits=((1,),))
else:
data = op.data
worker_to_args = defaultdict(dict)
workers = cls._get_data_chunks_workers(ctx, data)
worker_to_endpoint = cls._build_lgbm_endpoints(workers, op.lgbm_port)
worker_endpoints = list(worker_to_endpoint.values())
for arg in ["_data", "_label", "_sample_weight", "_init_score"]:
if getattr(op, arg) is not None:
for worker, chunk in cls._concat_chunks_by_worker(
getattr(op, arg).chunks, workers
).items():
worker_to_args[worker][arg] = chunk
if op.eval_datas:
eval_workers_list = [
cls._get_data_chunks_workers(ctx, d) for d in op.eval_datas
]
extra_workers = reduce(
operator.or_, (set(w) for w in eval_workers_list)
) - set(workers)
worker_remap = dict(zip(extra_workers, itertools.cycle(workers)))
if worker_remap:
eval_workers_list = [
[worker_remap.get(w, w) for w in wl] for wl in eval_workers_list
]
for arg in [
"_eval_datas",
"_eval_labels",
"_eval_sample_weights",
"_eval_init_scores",
]:
if getattr(op, arg):
for tileable, eval_workers in zip(
getattr(op, arg), eval_workers_list
):
for worker, chunk in cls._concat_chunks_by_worker(
tileable.chunks, eval_workers
).items():
if arg not in worker_to_args[worker]:
worker_to_args[worker][arg] = []
worker_to_args[worker][arg].append(chunk)
out_chunks = []
for worker in workers:
chunk_op = op.copy().reset_key()
chunk_op._expect_worker = worker
chunk_op._lgbm_endpoints = worker_endpoints
chunk_op._lgbm_port = int(worker_to_endpoint[worker].rsplit(":", 1)[-1])
input_chunks = []
concat_args = worker_to_args.get(worker, {})
for arg in [
"_data",
"_label",
"_sample_weight",
"_init_score",
"_eval_datas",
"_eval_labels",
"_eval_sample_weights",
"_eval_init_scores",
]:
arg_val = getattr(op, arg)
if arg_val:
arg_chunk = concat_args.get(arg)
setattr(chunk_op, arg, arg_chunk)
if isinstance(arg_chunk, list):
input_chunks.extend(arg_chunk)
else:
input_chunks.append(arg_chunk)
data_chunk = concat_args["_data"]
out_chunk = chunk_op.new_chunk(
input_chunks, shape=(np.nan,), index=data_chunk.index[:1]
)
out_chunks.append(out_chunk)
new_op = op.copy()
return new_op.new_tileables(
op.inputs, chunks=out_chunks, nsplits=((np.nan for _ in out_chunks),)
)
|
def tile(cls, op: "LGBMTrain"):
ctx = get_context()
if ctx.running_mode != RunningMode.distributed:
assert all(len(inp.chunks) == 1 for inp in op.inputs)
chunk_op = op.copy().reset_key()
out_chunk = chunk_op.new_chunk(
[inp.chunks[0] for inp in op.inputs], shape=(1,), index=(0,)
)
new_op = op.copy()
return new_op.new_tileables(op.inputs, chunks=[out_chunk], nsplits=((1,),))
else:
data = op.data
worker_to_args = defaultdict(dict)
workers = cls._get_data_chunks_workers(ctx, data)
worker_to_endpoint = cls._build_lgbm_endpoints(workers, op.lgbm_port)
worker_endpoints = list(worker_to_endpoint.values())
for arg in ["_data", "_label", "_sample_weight", "_init_score"]:
if getattr(op, arg) is not None:
for worker, chunk in cls._concat_chunks_by_worker(
getattr(op, arg).chunks, workers
).items():
worker_to_args[worker][arg] = chunk
eval_workers_list = [
cls._get_data_chunks_workers(ctx, d) for d in op.eval_datas
]
extra_workers = reduce(operator.or_, (set(w) for w in eval_workers_list)) - set(
workers
)
worker_remap = dict(zip(extra_workers, itertools.cycle(workers)))
if worker_remap:
eval_workers_list = [
[worker_remap.get(w, w) for w in wl] for wl in eval_workers_list
]
for arg in [
"_eval_datas",
"_eval_labels",
"_eval_sample_weights",
"_eval_init_scores",
]:
if getattr(op, arg):
for tileable, eval_workers in zip(getattr(op, arg), eval_workers_list):
for worker, chunk in cls._concat_chunks_by_worker(
tileable.chunks, eval_workers
).items():
if arg not in worker_to_args[worker]:
worker_to_args[worker][arg] = []
worker_to_args[worker][arg].append(chunk)
out_chunks = []
for worker in workers:
chunk_op = op.copy().reset_key()
chunk_op._expect_worker = worker
chunk_op._lgbm_endpoints = worker_endpoints
chunk_op._lgbm_port = int(worker_to_endpoint[worker].rsplit(":", 1)[-1])
input_chunks = []
concat_args = worker_to_args.get(worker, {})
for arg in [
"_data",
"_label",
"_sample_weight",
"_init_score",
"_eval_datas",
"_eval_labels",
"_eval_sample_weights",
"_eval_init_scores",
]:
arg_val = getattr(op, arg)
if arg_val:
arg_chunk = concat_args.get(arg)
setattr(chunk_op, arg, arg_chunk)
if isinstance(arg_chunk, list):
input_chunks.extend(arg_chunk)
else:
input_chunks.append(arg_chunk)
data_chunk = concat_args["_data"]
out_chunk = chunk_op.new_chunk(
input_chunks, shape=(np.nan,), index=data_chunk.index[:1]
)
out_chunks.append(out_chunk)
new_op = op.copy()
return new_op.new_tileables(
op.inputs, chunks=out_chunks, nsplits=((np.nan for _ in out_chunks),)
)
|
https://github.com/mars-project/mars/issues/1404
|
Error
Traceback (most recent call last):
File "/Users/wenjun.swj/Code/mars/mars/scheduler/graph.py", line 395, in _execute_graph
self.prepare_graph(compose=compose)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 353, in _wrapped
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 493, in inner
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/scheduler/graph.py", line 633, in prepare_graph
self._target_tileable_datas + fetch_tileables, tileable_graph)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 399, in _wrapped
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 493, in inner
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 350, in build
tileables, tileable_graph=tileable_graph)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 399, in _wrapped
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 493, in inner
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 263, in build
self._on_tile_failure(tileable_data.op, exc_info)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 302, in inner
raise exc_info[1].with_traceback(exc_info[2]) from None
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 243, in build
tiled = self._tile(tileable_data, tileable_graph)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 338, in _tile
return super()._tile(tileable_data, tileable_graph)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 203, in _tile
tds = on_tile(tileable_data.op.outputs, tds)
File "/Users/wenjun.swj/Code/mars/mars/scheduler/graph.py", line 615, in on_tile
return self.context.wraps(handler.dispatch)(first.op)
File "/Users/wenjun.swj/Code/mars/mars/context.py", line 68, in h
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 399, in _wrapped
return func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/tiles.py", line 119, in dispatch
tiled = op_cls.tile(op)
File "/Users/wenjun.swj/Code/mars/mars/learn/contrib/lightgbm/train.py", line 207, in tile
extra_workers = reduce(operator.or_, (set(w) for w in eval_workers_list)) - set(workers)
TypeError: reduce() of empty sequence with no initial value
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/Users/wenjun.swj/miniconda3/lib/python3.7/unittest/case.py", line 59, in testPartExecutor
yield
File "/Users/wenjun.swj/miniconda3/lib/python3.7/unittest/case.py", line 628, in run
testMethod()
File "/Users/wenjun.swj/Code/mars/mars/learn/contrib/lightgbm/tests/integrated/test_distributed_lightgbm.py", line 52, in testDistributedLGBMClassifier
classifier.fit(X, y, session=sess, run_kwargs=run_kwargs)
File "/Users/wenjun.swj/Code/mars/mars/learn/contrib/lightgbm/classifier.py", line 32, in fit
session=session, run_kwargs=run_kwargs, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/learn/contrib/lightgbm/train.py", line 343, in train
ret = op().execute(session=session, **run_kwargs).fetch(session=session)
File "/Users/wenjun.swj/Code/mars/mars/core.py", line 367, in execute
session.run(self, **kw)
File "/Users/wenjun.swj/Code/mars/mars/session.py", line 428, in run
result = self._sess.run(*tileables, **kw)
File "/Users/wenjun.swj/Code/mars/mars/web/session.py", line 187, in run
if self._check_response_finished(graph_url, timeout_val):
File "/Users/wenjun.swj/Code/mars/mars/web/session.py", line 146, in _check_response_finished
raise ExecutionFailed('Graph execution failed.') from exc
mars.errors.ExecutionFailed: 'Graph execution failed.'
|
TypeError
|
def tile(cls, op: "LGBMAlign"):
inputs = [
d for d in [op.data, op.label, op.sample_weight, op.init_score] if d is not None
]
data = op.data
# check inputs to make sure no unknown chunk shape exists
check_chunks_unknown_shape(inputs, TilesError)
ctx = get_context()
if ctx.running_mode != RunningMode.distributed:
outputs = [
inp.rechunk(tuple((s,) for s in inp.shape))._inplace_tile()
for inp in inputs
]
else:
if len(data.nsplits[1]) != 1:
data = data.rechunk({1: data.shape[1]})._inplace_tile()
outputs = [data]
for inp in inputs[1:]:
if inp is not None:
outputs.append(inp.rechunk((data.nsplits[0],))._inplace_tile())
kws = []
for o in outputs:
kw = o.params.copy()
kw.update(dict(chunks=o.chunks, nsplits=o.nsplits))
kws.append(kw)
new_op = op.copy().reset_key()
tileables = new_op.new_tileables(inputs, kws=kws)
return tileables
|
def tile(cls, op: "LGBMAlign"):
inputs = [
d for d in [op.data, op.label, op.sample_weight, op.init_score] if d is not None
]
data = op.data
ctx = get_context()
if ctx.running_mode != RunningMode.distributed:
outputs = [
inp.rechunk(tuple((s,) for s in inp.shape))._inplace_tile()
for inp in inputs
]
else:
if len(data.nsplits[1]) != 1:
data = data.rechunk({1: data.shape[1]})._inplace_tile()
outputs = [data]
for inp in inputs[1:]:
if inp is not None:
outputs.append(inp.rechunk((data.nsplits[0],))._inplace_tile())
kws = []
for o in outputs:
kw = o.params.copy()
kw.update(dict(chunks=o.chunks, nsplits=o.nsplits))
kws.append(kw)
new_op = op.copy().reset_key()
tileables = new_op.new_tileables(inputs, kws=kws)
return tileables
|
https://github.com/mars-project/mars/issues/1395
|
Traceback (most recent call last):
File "/home/admin/work/_public-mars-0.4.2.zip/mars/scheduler/graph.py", line 371, in _execute_graph
File "/home/admin/work/_public-mars-0.4.2.zip/mars/utils.py", line 343, in _wrapped
File "/home/admin/work/_public-mars-0.4.2.zip/mars/utils.py", line 483, in inner
File "/home/admin/work/_public-mars-0.4.2.zip/mars/scheduler/graph.py", line 603, in prepare_graph
File "/home/admin/work/_public-mars-0.4.2.zip/mars/utils.py", line 389, in _wrapped
File "/home/admin/work/_public-mars-0.4.2.zip/mars/utils.py", line 483, in inner
File "/home/admin/work/_public-mars-0.4.2.zip/mars/tiles.py", line 342, in build
File "/home/admin/work/_public-mars-0.4.2.zip/mars/utils.py", line 389, in _wrapped
File "/home/admin/work/_public-mars-0.4.2.zip/mars/utils.py", line 483, in inner
File "/home/admin/work/_public-mars-0.4.2.zip/mars/tiles.py", line 255, in build
File "/home/admin/work/_public-mars-0.4.2.zip/mars/tiles.py", line 294, in inner
File "/home/admin/work/_public-mars-0.4.2.zip/mars/tiles.py", line 235, in build
File "/home/admin/work/_public-mars-0.4.2.zip/mars/tiles.py", line 330, in _tile
File "/home/admin/work/_public-mars-0.4.2.zip/mars/tiles.py", line 195, in _tile
File "/home/admin/work/_public-mars-0.4.2.zip/mars/scheduler/graph.py", line 586, in on_tile
File "/home/admin/work/_public-mars-0.4.2.zip/mars/context.py", line 68, in h
File "/home/admin/work/_public-mars-0.4.2.zip/mars/utils.py", line 389, in _wrapped
File "/home/admin/work/_public-mars-0.4.2.zip/mars/tiles.py", line 115, in dispatch
File "/home/admin/work/_public-mars-0.4.2.zip/mars/learn/contrib/lightgbm/align.py", line 87, in tile
File "/home/admin/work/_public-mars-0.4.2.zip/mars/dataframe/base/rechunk.py", line 90, in rechunk
File "/home/admin/work/_public-mars-0.4.2.zip/mars/tensor/rechunk/core.py", line 38, in get_nsplits
File "/home/admin/work/_public-mars-0.4.2.zip/mars/tensor/utils.py", line 549, in decide_chunk_sizes
File "/home/admin/work/_public-mars-0.4.2.zip/mars/tensor/utils.py", line 67, in normalize_chunk_sizes
ValueError: chunks shape should be of the same length, got shape: nan, chunks: (nan,)
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/home/admin/work/_public-mars-0.4.2.zip/mars/promise.py", line 100, in _wrapped
File "/home/admin/work/_public-mars-0.4.2.zip/mars/worker/calc.py", line 271, in <lambda>
File "/home/admin/work/_public-mars-0.4.2.zip/mars/worker/calc.py", line 244, in _start_calc
File "/home/admin/work/_public-mars-0.4.2.zip/mars/worker/calc.py", line 173, in _calc_results
File "src/gevent/event.py", line 383, in gevent._gevent_cevent.AsyncResult.result
File "src/gevent/event.py", line 305, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 335, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 323, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 303, in gevent._gevent_cevent.AsyncResult._raise_exception
File "/opt/conda/lib/python3.7/site-packages/gevent/_compat.py", line 65, in reraise
File "/opt/conda/lib/python3.7/site-packages/gevent/threadpool.py", line 142, in __run_task
File "mars/actors/pool/gevent_pool.pyx", line 127, in mars.actors.pool.gevent_pool.GeventThreadPool._wrap_watch.inner
result = fn(*args, **kwargs)
File "/home/admin/work/_public-mars-0.4.2.zip/mars/executor.py", line 690, in execute_graph
File "/home/admin/work/_public-mars-0.4.2.zip/mars/executor.py", line 571, in execute
File "/opt/conda/lib/python3.7/concurrent/futures/_base.py", line 435, in result
File "/opt/conda/lib/python3.7/concurrent/futures/_base.py", line 384, in __get_result
File "/opt/conda/lib/python3.7/concurrent/futures/thread.py", line 57, in run
File "/home/admin/work/_public-mars-0.4.2.zip/mars/executor.py", line 443, in _execute_operand
File "/home/admin/work/_public-mars-0.4.2.zip/mars/executor.py", line 641, in handle
File "/home/admin/work/_public-pyodps-0.9.3.zip/odps/mars_extension/core.py", line 187, in wrapper
File "/home/admin/work/_public-mars-0.4.2.zip/mars/remote/core.py", line 200, in execute
File "E:/MyDocuments/PycharmProjects/borrowloan/temp_query/test_mars.py", line 31, in light_gbm
lg_reg = lgb.LGBMRegressor()
File "/home/admin/work/_public-mars-0.4.2.zip/mars/learn/contrib/lightgbm/regressor.py", line 28, in fit
File "/home/admin/work/_public-mars-0.4.2.zip/mars/learn/contrib/lightgbm/train.py", line 322, in train
File "/home/admin/work/_public-mars-0.4.2.zip/mars/core.py", line 651, in execute
File "/home/admin/work/_public-mars-0.4.2.zip/mars/core.py", line 370, in execute
File "/home/admin/work/_public-mars-0.4.2.zip/mars/session.py", line 428, in run
File "/home/admin/work/_public-mars-0.4.2.zip/mars/session.py", line 303, in run
mars.errors.ExecutionFailed: '\'\\\'\\\\\\\'"\\\\\\\\\\\\\\\'Graph execution failed.\\\\\\\\\\\\\\\'"\\\\\\\'\\\'\''
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "E:/MyDocuments/PycharmProjects/borrowloan/temp_query/test_mars.py", line 38, in <module>
odps.run_mars_job(light_gbm, args=(tb_name,), worker_num=2, worker_cpu=2, worker_mem=8, mars_image='extended',
File "F:\Anaconda3\lib\site-packages\odps\mars_extension\core.py", line 151, in run_mars_job
r.execute()
File "F:\Anaconda3\lib\site-packages\mars\core.py", line 370, in execute
session.run(self, **kw)
File "F:\Anaconda3\lib\site-packages\mars\session.py", line 428, in run
result = self._sess.run(*tileables, **kw)
File "F:\Anaconda3\lib\site-packages\mars\web\session.py", line 187, in run
if self._check_response_finished(graph_url, timeout_val):
File "F:\Anaconda3\lib\site-packages\mars\web\session.py", line 146, in _check_response_finished
raise ExecutionFailed('Graph execution failed.') from exc
mars.errors.ExecutionFailed: 'Graph execution failed.'
|
ValueError
|
def fit(
self,
X,
y,
sample_weight=None,
init_score=None,
eval_set=None,
eval_sample_weight=None,
eval_init_score=None,
session=None,
run_kwargs=None,
**kwargs,
):
check_consistent_length(X, y, session=session, run_kwargs=run_kwargs)
params = self.get_params(True)
model = train(
params,
self._wrap_train_tuple(X, y, sample_weight, init_score),
eval_sets=self._wrap_eval_tuples(eval_set, eval_sample_weight, eval_init_score),
model_type=LGBMModelType.CLASSIFIER,
session=session,
run_kwargs=run_kwargs,
**kwargs,
)
self.set_params(**model.get_params())
self._copy_extra_params(model, self)
return self
|
def fit(
self,
X,
y,
sample_weight=None,
init_score=None,
eval_set=None,
eval_sample_weight=None,
eval_init_score=None,
**kwargs,
):
params = self.get_params(True)
model = train(
params,
self._wrap_train_tuple(X, y, sample_weight, init_score),
eval_sets=self._wrap_eval_tuples(eval_set, eval_sample_weight, eval_init_score),
model_type=LGBMModelType.CLASSIFIER,
**kwargs,
)
self.set_params(**model.get_params())
self._copy_extra_params(model, self)
return self
|
https://github.com/mars-project/mars/issues/1395
|
Traceback (most recent call last):
File "/home/admin/work/_public-mars-0.4.2.zip/mars/scheduler/graph.py", line 371, in _execute_graph
File "/home/admin/work/_public-mars-0.4.2.zip/mars/utils.py", line 343, in _wrapped
File "/home/admin/work/_public-mars-0.4.2.zip/mars/utils.py", line 483, in inner
File "/home/admin/work/_public-mars-0.4.2.zip/mars/scheduler/graph.py", line 603, in prepare_graph
File "/home/admin/work/_public-mars-0.4.2.zip/mars/utils.py", line 389, in _wrapped
File "/home/admin/work/_public-mars-0.4.2.zip/mars/utils.py", line 483, in inner
File "/home/admin/work/_public-mars-0.4.2.zip/mars/tiles.py", line 342, in build
File "/home/admin/work/_public-mars-0.4.2.zip/mars/utils.py", line 389, in _wrapped
File "/home/admin/work/_public-mars-0.4.2.zip/mars/utils.py", line 483, in inner
File "/home/admin/work/_public-mars-0.4.2.zip/mars/tiles.py", line 255, in build
File "/home/admin/work/_public-mars-0.4.2.zip/mars/tiles.py", line 294, in inner
File "/home/admin/work/_public-mars-0.4.2.zip/mars/tiles.py", line 235, in build
File "/home/admin/work/_public-mars-0.4.2.zip/mars/tiles.py", line 330, in _tile
File "/home/admin/work/_public-mars-0.4.2.zip/mars/tiles.py", line 195, in _tile
File "/home/admin/work/_public-mars-0.4.2.zip/mars/scheduler/graph.py", line 586, in on_tile
File "/home/admin/work/_public-mars-0.4.2.zip/mars/context.py", line 68, in h
File "/home/admin/work/_public-mars-0.4.2.zip/mars/utils.py", line 389, in _wrapped
File "/home/admin/work/_public-mars-0.4.2.zip/mars/tiles.py", line 115, in dispatch
File "/home/admin/work/_public-mars-0.4.2.zip/mars/learn/contrib/lightgbm/align.py", line 87, in tile
File "/home/admin/work/_public-mars-0.4.2.zip/mars/dataframe/base/rechunk.py", line 90, in rechunk
File "/home/admin/work/_public-mars-0.4.2.zip/mars/tensor/rechunk/core.py", line 38, in get_nsplits
File "/home/admin/work/_public-mars-0.4.2.zip/mars/tensor/utils.py", line 549, in decide_chunk_sizes
File "/home/admin/work/_public-mars-0.4.2.zip/mars/tensor/utils.py", line 67, in normalize_chunk_sizes
ValueError: chunks shape should be of the same length, got shape: nan, chunks: (nan,)
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/home/admin/work/_public-mars-0.4.2.zip/mars/promise.py", line 100, in _wrapped
File "/home/admin/work/_public-mars-0.4.2.zip/mars/worker/calc.py", line 271, in <lambda>
File "/home/admin/work/_public-mars-0.4.2.zip/mars/worker/calc.py", line 244, in _start_calc
File "/home/admin/work/_public-mars-0.4.2.zip/mars/worker/calc.py", line 173, in _calc_results
File "src/gevent/event.py", line 383, in gevent._gevent_cevent.AsyncResult.result
File "src/gevent/event.py", line 305, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 335, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 323, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 303, in gevent._gevent_cevent.AsyncResult._raise_exception
File "/opt/conda/lib/python3.7/site-packages/gevent/_compat.py", line 65, in reraise
File "/opt/conda/lib/python3.7/site-packages/gevent/threadpool.py", line 142, in __run_task
File "mars/actors/pool/gevent_pool.pyx", line 127, in mars.actors.pool.gevent_pool.GeventThreadPool._wrap_watch.inner
result = fn(*args, **kwargs)
File "/home/admin/work/_public-mars-0.4.2.zip/mars/executor.py", line 690, in execute_graph
File "/home/admin/work/_public-mars-0.4.2.zip/mars/executor.py", line 571, in execute
File "/opt/conda/lib/python3.7/concurrent/futures/_base.py", line 435, in result
File "/opt/conda/lib/python3.7/concurrent/futures/_base.py", line 384, in __get_result
File "/opt/conda/lib/python3.7/concurrent/futures/thread.py", line 57, in run
File "/home/admin/work/_public-mars-0.4.2.zip/mars/executor.py", line 443, in _execute_operand
File "/home/admin/work/_public-mars-0.4.2.zip/mars/executor.py", line 641, in handle
File "/home/admin/work/_public-pyodps-0.9.3.zip/odps/mars_extension/core.py", line 187, in wrapper
File "/home/admin/work/_public-mars-0.4.2.zip/mars/remote/core.py", line 200, in execute
File "E:/MyDocuments/PycharmProjects/borrowloan/temp_query/test_mars.py", line 31, in light_gbm
lg_reg = lgb.LGBMRegressor()
File "/home/admin/work/_public-mars-0.4.2.zip/mars/learn/contrib/lightgbm/regressor.py", line 28, in fit
File "/home/admin/work/_public-mars-0.4.2.zip/mars/learn/contrib/lightgbm/train.py", line 322, in train
File "/home/admin/work/_public-mars-0.4.2.zip/mars/core.py", line 651, in execute
File "/home/admin/work/_public-mars-0.4.2.zip/mars/core.py", line 370, in execute
File "/home/admin/work/_public-mars-0.4.2.zip/mars/session.py", line 428, in run
File "/home/admin/work/_public-mars-0.4.2.zip/mars/session.py", line 303, in run
mars.errors.ExecutionFailed: '\'\\\'\\\\\\\'"\\\\\\\\\\\\\\\'Graph execution failed.\\\\\\\\\\\\\\\'"\\\\\\\'\\\'\''
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "E:/MyDocuments/PycharmProjects/borrowloan/temp_query/test_mars.py", line 38, in <module>
odps.run_mars_job(light_gbm, args=(tb_name,), worker_num=2, worker_cpu=2, worker_mem=8, mars_image='extended',
File "F:\Anaconda3\lib\site-packages\odps\mars_extension\core.py", line 151, in run_mars_job
r.execute()
File "F:\Anaconda3\lib\site-packages\mars\core.py", line 370, in execute
session.run(self, **kw)
File "F:\Anaconda3\lib\site-packages\mars\session.py", line 428, in run
result = self._sess.run(*tileables, **kw)
File "F:\Anaconda3\lib\site-packages\mars\web\session.py", line 187, in run
if self._check_response_finished(graph_url, timeout_val):
File "F:\Anaconda3\lib\site-packages\mars\web\session.py", line 146, in _check_response_finished
raise ExecutionFailed('Graph execution failed.') from exc
mars.errors.ExecutionFailed: 'Graph execution failed.'
|
ValueError
|
def fit(
self,
X,
y,
sample_weight=None,
init_score=None,
group=None,
eval_set=None,
eval_sample_weight=None,
eval_init_score=None,
session=None,
run_kwargs=None,
**kwargs,
):
check_consistent_length(X, y, session=session, run_kwargs=run_kwargs)
params = self.get_params(True)
model = train(
params,
self._wrap_train_tuple(X, y, sample_weight, init_score),
eval_sets=self._wrap_eval_tuples(eval_set, eval_sample_weight, eval_init_score),
group=group,
model_type=LGBMModelType.RANKER,
session=session,
run_kwargs=run_kwargs,
**kwargs,
)
self.set_params(**model.get_params())
self._copy_extra_params(model, self)
return self
|
def fit(
self,
X,
y,
sample_weight=None,
init_score=None,
group=None,
eval_set=None,
eval_sample_weight=None,
eval_init_score=None,
**kwargs,
):
params = self.get_params(True)
model = train(
params,
self._wrap_train_tuple(X, y, sample_weight, init_score),
eval_sets=self._wrap_eval_tuples(eval_set, eval_sample_weight, eval_init_score),
group=group,
model_type=LGBMModelType.RANKER,
**kwargs,
)
self.set_params(**model.get_params())
self._copy_extra_params(model, self)
return self
|
https://github.com/mars-project/mars/issues/1395
|
Traceback (most recent call last):
File "/home/admin/work/_public-mars-0.4.2.zip/mars/scheduler/graph.py", line 371, in _execute_graph
File "/home/admin/work/_public-mars-0.4.2.zip/mars/utils.py", line 343, in _wrapped
File "/home/admin/work/_public-mars-0.4.2.zip/mars/utils.py", line 483, in inner
File "/home/admin/work/_public-mars-0.4.2.zip/mars/scheduler/graph.py", line 603, in prepare_graph
File "/home/admin/work/_public-mars-0.4.2.zip/mars/utils.py", line 389, in _wrapped
File "/home/admin/work/_public-mars-0.4.2.zip/mars/utils.py", line 483, in inner
File "/home/admin/work/_public-mars-0.4.2.zip/mars/tiles.py", line 342, in build
File "/home/admin/work/_public-mars-0.4.2.zip/mars/utils.py", line 389, in _wrapped
File "/home/admin/work/_public-mars-0.4.2.zip/mars/utils.py", line 483, in inner
File "/home/admin/work/_public-mars-0.4.2.zip/mars/tiles.py", line 255, in build
File "/home/admin/work/_public-mars-0.4.2.zip/mars/tiles.py", line 294, in inner
File "/home/admin/work/_public-mars-0.4.2.zip/mars/tiles.py", line 235, in build
File "/home/admin/work/_public-mars-0.4.2.zip/mars/tiles.py", line 330, in _tile
File "/home/admin/work/_public-mars-0.4.2.zip/mars/tiles.py", line 195, in _tile
File "/home/admin/work/_public-mars-0.4.2.zip/mars/scheduler/graph.py", line 586, in on_tile
File "/home/admin/work/_public-mars-0.4.2.zip/mars/context.py", line 68, in h
File "/home/admin/work/_public-mars-0.4.2.zip/mars/utils.py", line 389, in _wrapped
File "/home/admin/work/_public-mars-0.4.2.zip/mars/tiles.py", line 115, in dispatch
File "/home/admin/work/_public-mars-0.4.2.zip/mars/learn/contrib/lightgbm/align.py", line 87, in tile
File "/home/admin/work/_public-mars-0.4.2.zip/mars/dataframe/base/rechunk.py", line 90, in rechunk
File "/home/admin/work/_public-mars-0.4.2.zip/mars/tensor/rechunk/core.py", line 38, in get_nsplits
File "/home/admin/work/_public-mars-0.4.2.zip/mars/tensor/utils.py", line 549, in decide_chunk_sizes
File "/home/admin/work/_public-mars-0.4.2.zip/mars/tensor/utils.py", line 67, in normalize_chunk_sizes
ValueError: chunks shape should be of the same length, got shape: nan, chunks: (nan,)
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/home/admin/work/_public-mars-0.4.2.zip/mars/promise.py", line 100, in _wrapped
File "/home/admin/work/_public-mars-0.4.2.zip/mars/worker/calc.py", line 271, in <lambda>
File "/home/admin/work/_public-mars-0.4.2.zip/mars/worker/calc.py", line 244, in _start_calc
File "/home/admin/work/_public-mars-0.4.2.zip/mars/worker/calc.py", line 173, in _calc_results
File "src/gevent/event.py", line 383, in gevent._gevent_cevent.AsyncResult.result
File "src/gevent/event.py", line 305, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 335, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 323, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 303, in gevent._gevent_cevent.AsyncResult._raise_exception
File "/opt/conda/lib/python3.7/site-packages/gevent/_compat.py", line 65, in reraise
File "/opt/conda/lib/python3.7/site-packages/gevent/threadpool.py", line 142, in __run_task
File "mars/actors/pool/gevent_pool.pyx", line 127, in mars.actors.pool.gevent_pool.GeventThreadPool._wrap_watch.inner
result = fn(*args, **kwargs)
File "/home/admin/work/_public-mars-0.4.2.zip/mars/executor.py", line 690, in execute_graph
File "/home/admin/work/_public-mars-0.4.2.zip/mars/executor.py", line 571, in execute
File "/opt/conda/lib/python3.7/concurrent/futures/_base.py", line 435, in result
File "/opt/conda/lib/python3.7/concurrent/futures/_base.py", line 384, in __get_result
File "/opt/conda/lib/python3.7/concurrent/futures/thread.py", line 57, in run
File "/home/admin/work/_public-mars-0.4.2.zip/mars/executor.py", line 443, in _execute_operand
File "/home/admin/work/_public-mars-0.4.2.zip/mars/executor.py", line 641, in handle
File "/home/admin/work/_public-pyodps-0.9.3.zip/odps/mars_extension/core.py", line 187, in wrapper
File "/home/admin/work/_public-mars-0.4.2.zip/mars/remote/core.py", line 200, in execute
File "E:/MyDocuments/PycharmProjects/borrowloan/temp_query/test_mars.py", line 31, in light_gbm
lg_reg = lgb.LGBMRegressor()
File "/home/admin/work/_public-mars-0.4.2.zip/mars/learn/contrib/lightgbm/regressor.py", line 28, in fit
File "/home/admin/work/_public-mars-0.4.2.zip/mars/learn/contrib/lightgbm/train.py", line 322, in train
File "/home/admin/work/_public-mars-0.4.2.zip/mars/core.py", line 651, in execute
File "/home/admin/work/_public-mars-0.4.2.zip/mars/core.py", line 370, in execute
File "/home/admin/work/_public-mars-0.4.2.zip/mars/session.py", line 428, in run
File "/home/admin/work/_public-mars-0.4.2.zip/mars/session.py", line 303, in run
mars.errors.ExecutionFailed: '\'\\\'\\\\\\\'"\\\\\\\\\\\\\\\'Graph execution failed.\\\\\\\\\\\\\\\'"\\\\\\\'\\\'\''
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "E:/MyDocuments/PycharmProjects/borrowloan/temp_query/test_mars.py", line 38, in <module>
odps.run_mars_job(light_gbm, args=(tb_name,), worker_num=2, worker_cpu=2, worker_mem=8, mars_image='extended',
File "F:\Anaconda3\lib\site-packages\odps\mars_extension\core.py", line 151, in run_mars_job
r.execute()
File "F:\Anaconda3\lib\site-packages\mars\core.py", line 370, in execute
session.run(self, **kw)
File "F:\Anaconda3\lib\site-packages\mars\session.py", line 428, in run
result = self._sess.run(*tileables, **kw)
File "F:\Anaconda3\lib\site-packages\mars\web\session.py", line 187, in run
if self._check_response_finished(graph_url, timeout_val):
File "F:\Anaconda3\lib\site-packages\mars\web\session.py", line 146, in _check_response_finished
raise ExecutionFailed('Graph execution failed.') from exc
mars.errors.ExecutionFailed: 'Graph execution failed.'
|
ValueError
|
def fit(
self,
X,
y,
sample_weight=None,
init_score=None,
eval_set=None,
eval_sample_weight=None,
eval_init_score=None,
session=None,
run_kwargs=None,
**kwargs,
):
check_consistent_length(X, y, session=session, run_kwargs=run_kwargs)
params = self.get_params(True)
model = train(
params,
self._wrap_train_tuple(X, y, sample_weight, init_score),
eval_sets=self._wrap_eval_tuples(eval_set, eval_sample_weight, eval_init_score),
model_type=LGBMModelType.REGRESSOR,
session=session,
run_kwargs=run_kwargs,
**kwargs,
)
self.set_params(**model.get_params())
self._copy_extra_params(model, self)
return self
|
def fit(
self,
X,
y,
sample_weight=None,
init_score=None,
eval_set=None,
eval_sample_weight=None,
eval_init_score=None,
**kwargs,
):
params = self.get_params(True)
model = train(
params,
self._wrap_train_tuple(X, y, sample_weight, init_score),
eval_sets=self._wrap_eval_tuples(eval_set, eval_sample_weight, eval_init_score),
model_type=LGBMModelType.REGRESSOR,
**kwargs,
)
self.set_params(**model.get_params())
self._copy_extra_params(model, self)
return self
|
https://github.com/mars-project/mars/issues/1395
|
Traceback (most recent call last):
File "/home/admin/work/_public-mars-0.4.2.zip/mars/scheduler/graph.py", line 371, in _execute_graph
File "/home/admin/work/_public-mars-0.4.2.zip/mars/utils.py", line 343, in _wrapped
File "/home/admin/work/_public-mars-0.4.2.zip/mars/utils.py", line 483, in inner
File "/home/admin/work/_public-mars-0.4.2.zip/mars/scheduler/graph.py", line 603, in prepare_graph
File "/home/admin/work/_public-mars-0.4.2.zip/mars/utils.py", line 389, in _wrapped
File "/home/admin/work/_public-mars-0.4.2.zip/mars/utils.py", line 483, in inner
File "/home/admin/work/_public-mars-0.4.2.zip/mars/tiles.py", line 342, in build
File "/home/admin/work/_public-mars-0.4.2.zip/mars/utils.py", line 389, in _wrapped
File "/home/admin/work/_public-mars-0.4.2.zip/mars/utils.py", line 483, in inner
File "/home/admin/work/_public-mars-0.4.2.zip/mars/tiles.py", line 255, in build
File "/home/admin/work/_public-mars-0.4.2.zip/mars/tiles.py", line 294, in inner
File "/home/admin/work/_public-mars-0.4.2.zip/mars/tiles.py", line 235, in build
File "/home/admin/work/_public-mars-0.4.2.zip/mars/tiles.py", line 330, in _tile
File "/home/admin/work/_public-mars-0.4.2.zip/mars/tiles.py", line 195, in _tile
File "/home/admin/work/_public-mars-0.4.2.zip/mars/scheduler/graph.py", line 586, in on_tile
File "/home/admin/work/_public-mars-0.4.2.zip/mars/context.py", line 68, in h
File "/home/admin/work/_public-mars-0.4.2.zip/mars/utils.py", line 389, in _wrapped
File "/home/admin/work/_public-mars-0.4.2.zip/mars/tiles.py", line 115, in dispatch
File "/home/admin/work/_public-mars-0.4.2.zip/mars/learn/contrib/lightgbm/align.py", line 87, in tile
File "/home/admin/work/_public-mars-0.4.2.zip/mars/dataframe/base/rechunk.py", line 90, in rechunk
File "/home/admin/work/_public-mars-0.4.2.zip/mars/tensor/rechunk/core.py", line 38, in get_nsplits
File "/home/admin/work/_public-mars-0.4.2.zip/mars/tensor/utils.py", line 549, in decide_chunk_sizes
File "/home/admin/work/_public-mars-0.4.2.zip/mars/tensor/utils.py", line 67, in normalize_chunk_sizes
ValueError: chunks shape should be of the same length, got shape: nan, chunks: (nan,)
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/home/admin/work/_public-mars-0.4.2.zip/mars/promise.py", line 100, in _wrapped
File "/home/admin/work/_public-mars-0.4.2.zip/mars/worker/calc.py", line 271, in <lambda>
File "/home/admin/work/_public-mars-0.4.2.zip/mars/worker/calc.py", line 244, in _start_calc
File "/home/admin/work/_public-mars-0.4.2.zip/mars/worker/calc.py", line 173, in _calc_results
File "src/gevent/event.py", line 383, in gevent._gevent_cevent.AsyncResult.result
File "src/gevent/event.py", line 305, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 335, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 323, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 303, in gevent._gevent_cevent.AsyncResult._raise_exception
File "/opt/conda/lib/python3.7/site-packages/gevent/_compat.py", line 65, in reraise
File "/opt/conda/lib/python3.7/site-packages/gevent/threadpool.py", line 142, in __run_task
File "mars/actors/pool/gevent_pool.pyx", line 127, in mars.actors.pool.gevent_pool.GeventThreadPool._wrap_watch.inner
result = fn(*args, **kwargs)
File "/home/admin/work/_public-mars-0.4.2.zip/mars/executor.py", line 690, in execute_graph
File "/home/admin/work/_public-mars-0.4.2.zip/mars/executor.py", line 571, in execute
File "/opt/conda/lib/python3.7/concurrent/futures/_base.py", line 435, in result
File "/opt/conda/lib/python3.7/concurrent/futures/_base.py", line 384, in __get_result
File "/opt/conda/lib/python3.7/concurrent/futures/thread.py", line 57, in run
File "/home/admin/work/_public-mars-0.4.2.zip/mars/executor.py", line 443, in _execute_operand
File "/home/admin/work/_public-mars-0.4.2.zip/mars/executor.py", line 641, in handle
File "/home/admin/work/_public-pyodps-0.9.3.zip/odps/mars_extension/core.py", line 187, in wrapper
File "/home/admin/work/_public-mars-0.4.2.zip/mars/remote/core.py", line 200, in execute
File "E:/MyDocuments/PycharmProjects/borrowloan/temp_query/test_mars.py", line 31, in light_gbm
lg_reg = lgb.LGBMRegressor()
File "/home/admin/work/_public-mars-0.4.2.zip/mars/learn/contrib/lightgbm/regressor.py", line 28, in fit
File "/home/admin/work/_public-mars-0.4.2.zip/mars/learn/contrib/lightgbm/train.py", line 322, in train
File "/home/admin/work/_public-mars-0.4.2.zip/mars/core.py", line 651, in execute
File "/home/admin/work/_public-mars-0.4.2.zip/mars/core.py", line 370, in execute
File "/home/admin/work/_public-mars-0.4.2.zip/mars/session.py", line 428, in run
File "/home/admin/work/_public-mars-0.4.2.zip/mars/session.py", line 303, in run
mars.errors.ExecutionFailed: '\'\\\'\\\\\\\'"\\\\\\\\\\\\\\\'Graph execution failed.\\\\\\\\\\\\\\\'"\\\\\\\'\\\'\''
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "E:/MyDocuments/PycharmProjects/borrowloan/temp_query/test_mars.py", line 38, in <module>
odps.run_mars_job(light_gbm, args=(tb_name,), worker_num=2, worker_cpu=2, worker_mem=8, mars_image='extended',
File "F:\Anaconda3\lib\site-packages\odps\mars_extension\core.py", line 151, in run_mars_job
r.execute()
File "F:\Anaconda3\lib\site-packages\mars\core.py", line 370, in execute
session.run(self, **kw)
File "F:\Anaconda3\lib\site-packages\mars\session.py", line 428, in run
result = self._sess.run(*tileables, **kw)
File "F:\Anaconda3\lib\site-packages\mars\web\session.py", line 187, in run
if self._check_response_finished(graph_url, timeout_val):
File "F:\Anaconda3\lib\site-packages\mars\web\session.py", line 146, in _check_response_finished
raise ExecutionFailed('Graph execution failed.') from exc
mars.errors.ExecutionFailed: 'Graph execution failed.'
|
ValueError
|
def train(params, train_set, eval_sets=None, **kwargs):
eval_sets = eval_sets or []
model_type = kwargs.pop("model_type", LGBMModelType.CLASSIFIER)
evals_result = kwargs.pop("evals_result", dict())
session = kwargs.pop("session", None)
run_kwargs = kwargs.pop("run_kwargs", None)
if run_kwargs is None:
run_kwargs = dict()
timeout = kwargs.pop("timeout", 120)
base_port = kwargs.pop("base_port", None)
aligns = align_data_set(train_set)
for eval_set in eval_sets:
aligns += align_data_set(eval_set)
aligned_iter = iter(ExecutableTuple(aligns).execute(session))
datas, labels, sample_weights, init_scores = [], [], [], []
for dataset in [train_set] + eval_sets:
train_kw = dict()
for arg in ["data", "label", "sample_weight", "init_score"]:
if getattr(dataset, arg) is not None:
train_kw[arg] = next(aligned_iter)
else:
train_kw[arg] = None
datas.append(train_kw["data"])
labels.append(train_kw["label"])
sample_weights.append(train_kw["sample_weight"])
init_scores.append(train_kw["init_score"])
op = LGBMTrain(
params=params,
data=datas[0],
label=labels[0],
sample_weight=sample_weights[0],
init_score=init_scores[0],
eval_datas=datas[1:],
eval_labels=labels[1:],
eval_weights=sample_weights[1:],
eval_init_score=init_scores[1:],
model_type=model_type,
timeout=timeout,
lgbm_port=base_port,
kwds=kwargs,
)
ret = op().execute(session=session, **run_kwargs).fetch(session=session)
bst = pickle.loads(ret)
evals_result.update(bst.evals_result_ or {})
return bst
|
def train(params, train_set, eval_sets=None, **kwargs):
eval_sets = eval_sets or []
model_type = kwargs.pop("model_type", LGBMModelType.CLASSIFIER)
evals_result = kwargs.pop("evals_result", dict())
session = kwargs.pop("session", None)
run_kwargs = kwargs.pop("run_kwargs", dict())
timeout = kwargs.pop("timeout", 120)
base_port = kwargs.pop("base_port", None)
aligns = align_data_set(train_set)
for eval_set in eval_sets:
aligns += align_data_set(eval_set)
aligned_iter = iter(ExecutableTuple(aligns).execute(session))
datas, labels, sample_weights, init_scores = [], [], [], []
for dataset in [train_set] + eval_sets:
train_kw = dict()
for arg in ["data", "label", "sample_weight", "init_score"]:
if getattr(dataset, arg) is not None:
train_kw[arg] = next(aligned_iter)
else:
train_kw[arg] = None
datas.append(train_kw["data"])
labels.append(train_kw["label"])
sample_weights.append(train_kw["sample_weight"])
init_scores.append(train_kw["init_score"])
op = LGBMTrain(
params=params,
data=datas[0],
label=labels[0],
sample_weight=sample_weights[0],
init_score=init_scores[0],
eval_datas=datas[1:],
eval_labels=labels[1:],
eval_weights=sample_weights[1:],
eval_init_score=init_scores[1:],
model_type=model_type,
timeout=timeout,
lgbm_port=base_port,
kwds=kwargs,
)
ret = op().execute(session=session, **run_kwargs).fetch(session=session)
bst = pickle.loads(ret)
evals_result.update(bst.evals_result_ or {})
return bst
|
https://github.com/mars-project/mars/issues/1395
|
Traceback (most recent call last):
File "/home/admin/work/_public-mars-0.4.2.zip/mars/scheduler/graph.py", line 371, in _execute_graph
File "/home/admin/work/_public-mars-0.4.2.zip/mars/utils.py", line 343, in _wrapped
File "/home/admin/work/_public-mars-0.4.2.zip/mars/utils.py", line 483, in inner
File "/home/admin/work/_public-mars-0.4.2.zip/mars/scheduler/graph.py", line 603, in prepare_graph
File "/home/admin/work/_public-mars-0.4.2.zip/mars/utils.py", line 389, in _wrapped
File "/home/admin/work/_public-mars-0.4.2.zip/mars/utils.py", line 483, in inner
File "/home/admin/work/_public-mars-0.4.2.zip/mars/tiles.py", line 342, in build
File "/home/admin/work/_public-mars-0.4.2.zip/mars/utils.py", line 389, in _wrapped
File "/home/admin/work/_public-mars-0.4.2.zip/mars/utils.py", line 483, in inner
File "/home/admin/work/_public-mars-0.4.2.zip/mars/tiles.py", line 255, in build
File "/home/admin/work/_public-mars-0.4.2.zip/mars/tiles.py", line 294, in inner
File "/home/admin/work/_public-mars-0.4.2.zip/mars/tiles.py", line 235, in build
File "/home/admin/work/_public-mars-0.4.2.zip/mars/tiles.py", line 330, in _tile
File "/home/admin/work/_public-mars-0.4.2.zip/mars/tiles.py", line 195, in _tile
File "/home/admin/work/_public-mars-0.4.2.zip/mars/scheduler/graph.py", line 586, in on_tile
File "/home/admin/work/_public-mars-0.4.2.zip/mars/context.py", line 68, in h
File "/home/admin/work/_public-mars-0.4.2.zip/mars/utils.py", line 389, in _wrapped
File "/home/admin/work/_public-mars-0.4.2.zip/mars/tiles.py", line 115, in dispatch
File "/home/admin/work/_public-mars-0.4.2.zip/mars/learn/contrib/lightgbm/align.py", line 87, in tile
File "/home/admin/work/_public-mars-0.4.2.zip/mars/dataframe/base/rechunk.py", line 90, in rechunk
File "/home/admin/work/_public-mars-0.4.2.zip/mars/tensor/rechunk/core.py", line 38, in get_nsplits
File "/home/admin/work/_public-mars-0.4.2.zip/mars/tensor/utils.py", line 549, in decide_chunk_sizes
File "/home/admin/work/_public-mars-0.4.2.zip/mars/tensor/utils.py", line 67, in normalize_chunk_sizes
ValueError: chunks shape should be of the same length, got shape: nan, chunks: (nan,)
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/home/admin/work/_public-mars-0.4.2.zip/mars/promise.py", line 100, in _wrapped
File "/home/admin/work/_public-mars-0.4.2.zip/mars/worker/calc.py", line 271, in <lambda>
File "/home/admin/work/_public-mars-0.4.2.zip/mars/worker/calc.py", line 244, in _start_calc
File "/home/admin/work/_public-mars-0.4.2.zip/mars/worker/calc.py", line 173, in _calc_results
File "src/gevent/event.py", line 383, in gevent._gevent_cevent.AsyncResult.result
File "src/gevent/event.py", line 305, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 335, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 323, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 303, in gevent._gevent_cevent.AsyncResult._raise_exception
File "/opt/conda/lib/python3.7/site-packages/gevent/_compat.py", line 65, in reraise
File "/opt/conda/lib/python3.7/site-packages/gevent/threadpool.py", line 142, in __run_task
File "mars/actors/pool/gevent_pool.pyx", line 127, in mars.actors.pool.gevent_pool.GeventThreadPool._wrap_watch.inner
result = fn(*args, **kwargs)
File "/home/admin/work/_public-mars-0.4.2.zip/mars/executor.py", line 690, in execute_graph
File "/home/admin/work/_public-mars-0.4.2.zip/mars/executor.py", line 571, in execute
File "/opt/conda/lib/python3.7/concurrent/futures/_base.py", line 435, in result
File "/opt/conda/lib/python3.7/concurrent/futures/_base.py", line 384, in __get_result
File "/opt/conda/lib/python3.7/concurrent/futures/thread.py", line 57, in run
File "/home/admin/work/_public-mars-0.4.2.zip/mars/executor.py", line 443, in _execute_operand
File "/home/admin/work/_public-mars-0.4.2.zip/mars/executor.py", line 641, in handle
File "/home/admin/work/_public-pyodps-0.9.3.zip/odps/mars_extension/core.py", line 187, in wrapper
File "/home/admin/work/_public-mars-0.4.2.zip/mars/remote/core.py", line 200, in execute
File "E:/MyDocuments/PycharmProjects/borrowloan/temp_query/test_mars.py", line 31, in light_gbm
lg_reg = lgb.LGBMRegressor()
File "/home/admin/work/_public-mars-0.4.2.zip/mars/learn/contrib/lightgbm/regressor.py", line 28, in fit
File "/home/admin/work/_public-mars-0.4.2.zip/mars/learn/contrib/lightgbm/train.py", line 322, in train
File "/home/admin/work/_public-mars-0.4.2.zip/mars/core.py", line 651, in execute
File "/home/admin/work/_public-mars-0.4.2.zip/mars/core.py", line 370, in execute
File "/home/admin/work/_public-mars-0.4.2.zip/mars/session.py", line 428, in run
File "/home/admin/work/_public-mars-0.4.2.zip/mars/session.py", line 303, in run
mars.errors.ExecutionFailed: '\'\\\'\\\\\\\'"\\\\\\\\\\\\\\\'Graph execution failed.\\\\\\\\\\\\\\\'"\\\\\\\'\\\'\''
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "E:/MyDocuments/PycharmProjects/borrowloan/temp_query/test_mars.py", line 38, in <module>
odps.run_mars_job(light_gbm, args=(tb_name,), worker_num=2, worker_cpu=2, worker_mem=8, mars_image='extended',
File "F:\Anaconda3\lib\site-packages\odps\mars_extension\core.py", line 151, in run_mars_job
r.execute()
File "F:\Anaconda3\lib\site-packages\mars\core.py", line 370, in execute
session.run(self, **kw)
File "F:\Anaconda3\lib\site-packages\mars\session.py", line 428, in run
result = self._sess.run(*tileables, **kw)
File "F:\Anaconda3\lib\site-packages\mars\web\session.py", line 187, in run
if self._check_response_finished(graph_url, timeout_val):
File "F:\Anaconda3\lib\site-packages\mars\web\session.py", line 146, in _check_response_finished
raise ExecutionFailed('Graph execution failed.') from exc
mars.errors.ExecutionFailed: 'Graph execution failed.'
|
ValueError
|
def _execute_graph(self, compose=True):
try:
self.prepare_graph(compose=compose)
self._detect_cancel()
self._dump_graph()
self.analyze_graph()
self._detect_cancel()
if self.state == GraphState.SUCCEEDED:
self._graph_meta_ref.set_graph_end(_tell=True, _wait=False)
else:
self.create_operand_actors()
self._detect_cancel(self.stop_graph)
except ExecutionInterrupted:
pass
except: # noqa: E722
logger.exception("Failed to start graph execution.")
self._graph_meta_ref.set_exc_info(sys.exc_info(), _tell=True, _wait=False)
self.stop_graph()
self.state = GraphState.FAILED
self._graph_meta_ref.set_graph_end(_tell=True, _wait=False)
raise
|
def _execute_graph(self, compose=True):
try:
self.prepare_graph(compose=compose)
self._detect_cancel()
self._dump_graph()
self.analyze_graph()
self._detect_cancel()
self.create_operand_actors()
self._detect_cancel(self.stop_graph)
except ExecutionInterrupted:
pass
except: # noqa: E722
logger.exception("Failed to start graph execution.")
self._graph_meta_ref.set_exc_info(sys.exc_info(), _tell=True, _wait=False)
self.stop_graph()
self.state = GraphState.FAILED
self._graph_meta_ref.set_graph_end(_tell=True, _wait=False)
raise
if len(self._chunk_graph_cache) == 0:
self.state = GraphState.SUCCEEDED
self._graph_meta_ref.set_graph_end(_tell=True, _wait=False)
|
https://github.com/mars-project/mars/issues/1395
|
Traceback (most recent call last):
File "/home/admin/work/_public-mars-0.4.2.zip/mars/scheduler/graph.py", line 371, in _execute_graph
File "/home/admin/work/_public-mars-0.4.2.zip/mars/utils.py", line 343, in _wrapped
File "/home/admin/work/_public-mars-0.4.2.zip/mars/utils.py", line 483, in inner
File "/home/admin/work/_public-mars-0.4.2.zip/mars/scheduler/graph.py", line 603, in prepare_graph
File "/home/admin/work/_public-mars-0.4.2.zip/mars/utils.py", line 389, in _wrapped
File "/home/admin/work/_public-mars-0.4.2.zip/mars/utils.py", line 483, in inner
File "/home/admin/work/_public-mars-0.4.2.zip/mars/tiles.py", line 342, in build
File "/home/admin/work/_public-mars-0.4.2.zip/mars/utils.py", line 389, in _wrapped
File "/home/admin/work/_public-mars-0.4.2.zip/mars/utils.py", line 483, in inner
File "/home/admin/work/_public-mars-0.4.2.zip/mars/tiles.py", line 255, in build
File "/home/admin/work/_public-mars-0.4.2.zip/mars/tiles.py", line 294, in inner
File "/home/admin/work/_public-mars-0.4.2.zip/mars/tiles.py", line 235, in build
File "/home/admin/work/_public-mars-0.4.2.zip/mars/tiles.py", line 330, in _tile
File "/home/admin/work/_public-mars-0.4.2.zip/mars/tiles.py", line 195, in _tile
File "/home/admin/work/_public-mars-0.4.2.zip/mars/scheduler/graph.py", line 586, in on_tile
File "/home/admin/work/_public-mars-0.4.2.zip/mars/context.py", line 68, in h
File "/home/admin/work/_public-mars-0.4.2.zip/mars/utils.py", line 389, in _wrapped
File "/home/admin/work/_public-mars-0.4.2.zip/mars/tiles.py", line 115, in dispatch
File "/home/admin/work/_public-mars-0.4.2.zip/mars/learn/contrib/lightgbm/align.py", line 87, in tile
File "/home/admin/work/_public-mars-0.4.2.zip/mars/dataframe/base/rechunk.py", line 90, in rechunk
File "/home/admin/work/_public-mars-0.4.2.zip/mars/tensor/rechunk/core.py", line 38, in get_nsplits
File "/home/admin/work/_public-mars-0.4.2.zip/mars/tensor/utils.py", line 549, in decide_chunk_sizes
File "/home/admin/work/_public-mars-0.4.2.zip/mars/tensor/utils.py", line 67, in normalize_chunk_sizes
ValueError: chunks shape should be of the same length, got shape: nan, chunks: (nan,)
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/home/admin/work/_public-mars-0.4.2.zip/mars/promise.py", line 100, in _wrapped
File "/home/admin/work/_public-mars-0.4.2.zip/mars/worker/calc.py", line 271, in <lambda>
File "/home/admin/work/_public-mars-0.4.2.zip/mars/worker/calc.py", line 244, in _start_calc
File "/home/admin/work/_public-mars-0.4.2.zip/mars/worker/calc.py", line 173, in _calc_results
File "src/gevent/event.py", line 383, in gevent._gevent_cevent.AsyncResult.result
File "src/gevent/event.py", line 305, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 335, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 323, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 303, in gevent._gevent_cevent.AsyncResult._raise_exception
File "/opt/conda/lib/python3.7/site-packages/gevent/_compat.py", line 65, in reraise
File "/opt/conda/lib/python3.7/site-packages/gevent/threadpool.py", line 142, in __run_task
File "mars/actors/pool/gevent_pool.pyx", line 127, in mars.actors.pool.gevent_pool.GeventThreadPool._wrap_watch.inner
result = fn(*args, **kwargs)
File "/home/admin/work/_public-mars-0.4.2.zip/mars/executor.py", line 690, in execute_graph
File "/home/admin/work/_public-mars-0.4.2.zip/mars/executor.py", line 571, in execute
File "/opt/conda/lib/python3.7/concurrent/futures/_base.py", line 435, in result
File "/opt/conda/lib/python3.7/concurrent/futures/_base.py", line 384, in __get_result
File "/opt/conda/lib/python3.7/concurrent/futures/thread.py", line 57, in run
File "/home/admin/work/_public-mars-0.4.2.zip/mars/executor.py", line 443, in _execute_operand
File "/home/admin/work/_public-mars-0.4.2.zip/mars/executor.py", line 641, in handle
File "/home/admin/work/_public-pyodps-0.9.3.zip/odps/mars_extension/core.py", line 187, in wrapper
File "/home/admin/work/_public-mars-0.4.2.zip/mars/remote/core.py", line 200, in execute
File "E:/MyDocuments/PycharmProjects/borrowloan/temp_query/test_mars.py", line 31, in light_gbm
lg_reg = lgb.LGBMRegressor()
File "/home/admin/work/_public-mars-0.4.2.zip/mars/learn/contrib/lightgbm/regressor.py", line 28, in fit
File "/home/admin/work/_public-mars-0.4.2.zip/mars/learn/contrib/lightgbm/train.py", line 322, in train
File "/home/admin/work/_public-mars-0.4.2.zip/mars/core.py", line 651, in execute
File "/home/admin/work/_public-mars-0.4.2.zip/mars/core.py", line 370, in execute
File "/home/admin/work/_public-mars-0.4.2.zip/mars/session.py", line 428, in run
File "/home/admin/work/_public-mars-0.4.2.zip/mars/session.py", line 303, in run
mars.errors.ExecutionFailed: '\'\\\'\\\\\\\'"\\\\\\\\\\\\\\\'Graph execution failed.\\\\\\\\\\\\\\\'"\\\\\\\'\\\'\''
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "E:/MyDocuments/PycharmProjects/borrowloan/temp_query/test_mars.py", line 38, in <module>
odps.run_mars_job(light_gbm, args=(tb_name,), worker_num=2, worker_cpu=2, worker_mem=8, mars_image='extended',
File "F:\Anaconda3\lib\site-packages\odps\mars_extension\core.py", line 151, in run_mars_job
r.execute()
File "F:\Anaconda3\lib\site-packages\mars\core.py", line 370, in execute
session.run(self, **kw)
File "F:\Anaconda3\lib\site-packages\mars\session.py", line 428, in run
result = self._sess.run(*tileables, **kw)
File "F:\Anaconda3\lib\site-packages\mars\web\session.py", line 187, in run
if self._check_response_finished(graph_url, timeout_val):
File "F:\Anaconda3\lib\site-packages\mars\web\session.py", line 146, in _check_response_finished
raise ExecutionFailed('Graph execution failed.') from exc
mars.errors.ExecutionFailed: 'Graph execution failed.'
|
ValueError
|
def analyze_graph(self, **kwargs):
operand_infos = self._operand_infos
chunk_graph = self.get_chunk_graph()
# remove fetch chunk if exists
if any(isinstance(c.op, Fetch) for c in chunk_graph):
chunk_graph = chunk_graph.copy()
for c in list(chunk_graph):
if isinstance(c.op, Fetch):
chunk_graph.remove_node(c)
if len(chunk_graph) == 0:
self.state = GraphState.SUCCEEDED
return
for n in chunk_graph:
k = n.op.key
succ_size = chunk_graph.count_successors(n)
if k not in operand_infos:
operand_infos[k] = dict(
optimize=dict(
depth=0,
demand_depths=(),
successor_size=succ_size,
descendant_size=0,
)
)
else:
operand_infos[k]["optimize"]["successor_size"] = succ_size
worker_slots = self._get_worker_slots()
if not worker_slots:
raise RuntimeError("No worker attached for execution")
self._assigned_workers = set(worker_slots)
analyzer = GraphAnalyzer(chunk_graph, worker_slots)
for k, v in analyzer.calc_depths().items():
operand_infos[k]["optimize"]["depth"] = v
for k, v in analyzer.calc_descendant_sizes().items():
operand_infos[k]["optimize"]["descendant_size"] = v
if kwargs.get("do_placement", True):
logger.debug("Placing initial chunks for graph %s", self._graph_key)
self._assign_initial_workers(analyzer)
|
def analyze_graph(self, **kwargs):
operand_infos = self._operand_infos
chunk_graph = self.get_chunk_graph()
# remove fetch chunk if exists
if any(isinstance(c.op, Fetch) for c in chunk_graph):
chunk_graph = chunk_graph.copy()
for c in list(chunk_graph):
if isinstance(c.op, Fetch):
chunk_graph.remove_node(c)
if len(chunk_graph) == 0:
return
for n in chunk_graph:
k = n.op.key
succ_size = chunk_graph.count_successors(n)
if k not in operand_infos:
operand_infos[k] = dict(
optimize=dict(
depth=0,
demand_depths=(),
successor_size=succ_size,
descendant_size=0,
)
)
else:
operand_infos[k]["optimize"]["successor_size"] = succ_size
worker_slots = self._get_worker_slots()
if not worker_slots:
raise RuntimeError("No worker attached for execution")
self._assigned_workers = set(worker_slots)
analyzer = GraphAnalyzer(chunk_graph, worker_slots)
for k, v in analyzer.calc_depths().items():
operand_infos[k]["optimize"]["depth"] = v
for k, v in analyzer.calc_descendant_sizes().items():
operand_infos[k]["optimize"]["descendant_size"] = v
if kwargs.get("do_placement", True):
logger.debug("Placing initial chunks for graph %s", self._graph_key)
self._assign_initial_workers(analyzer)
|
https://github.com/mars-project/mars/issues/1395
|
Traceback (most recent call last):
File "/home/admin/work/_public-mars-0.4.2.zip/mars/scheduler/graph.py", line 371, in _execute_graph
File "/home/admin/work/_public-mars-0.4.2.zip/mars/utils.py", line 343, in _wrapped
File "/home/admin/work/_public-mars-0.4.2.zip/mars/utils.py", line 483, in inner
File "/home/admin/work/_public-mars-0.4.2.zip/mars/scheduler/graph.py", line 603, in prepare_graph
File "/home/admin/work/_public-mars-0.4.2.zip/mars/utils.py", line 389, in _wrapped
File "/home/admin/work/_public-mars-0.4.2.zip/mars/utils.py", line 483, in inner
File "/home/admin/work/_public-mars-0.4.2.zip/mars/tiles.py", line 342, in build
File "/home/admin/work/_public-mars-0.4.2.zip/mars/utils.py", line 389, in _wrapped
File "/home/admin/work/_public-mars-0.4.2.zip/mars/utils.py", line 483, in inner
File "/home/admin/work/_public-mars-0.4.2.zip/mars/tiles.py", line 255, in build
File "/home/admin/work/_public-mars-0.4.2.zip/mars/tiles.py", line 294, in inner
File "/home/admin/work/_public-mars-0.4.2.zip/mars/tiles.py", line 235, in build
File "/home/admin/work/_public-mars-0.4.2.zip/mars/tiles.py", line 330, in _tile
File "/home/admin/work/_public-mars-0.4.2.zip/mars/tiles.py", line 195, in _tile
File "/home/admin/work/_public-mars-0.4.2.zip/mars/scheduler/graph.py", line 586, in on_tile
File "/home/admin/work/_public-mars-0.4.2.zip/mars/context.py", line 68, in h
File "/home/admin/work/_public-mars-0.4.2.zip/mars/utils.py", line 389, in _wrapped
File "/home/admin/work/_public-mars-0.4.2.zip/mars/tiles.py", line 115, in dispatch
File "/home/admin/work/_public-mars-0.4.2.zip/mars/learn/contrib/lightgbm/align.py", line 87, in tile
File "/home/admin/work/_public-mars-0.4.2.zip/mars/dataframe/base/rechunk.py", line 90, in rechunk
File "/home/admin/work/_public-mars-0.4.2.zip/mars/tensor/rechunk/core.py", line 38, in get_nsplits
File "/home/admin/work/_public-mars-0.4.2.zip/mars/tensor/utils.py", line 549, in decide_chunk_sizes
File "/home/admin/work/_public-mars-0.4.2.zip/mars/tensor/utils.py", line 67, in normalize_chunk_sizes
ValueError: chunks shape should be of the same length, got shape: nan, chunks: (nan,)
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/home/admin/work/_public-mars-0.4.2.zip/mars/promise.py", line 100, in _wrapped
File "/home/admin/work/_public-mars-0.4.2.zip/mars/worker/calc.py", line 271, in <lambda>
File "/home/admin/work/_public-mars-0.4.2.zip/mars/worker/calc.py", line 244, in _start_calc
File "/home/admin/work/_public-mars-0.4.2.zip/mars/worker/calc.py", line 173, in _calc_results
File "src/gevent/event.py", line 383, in gevent._gevent_cevent.AsyncResult.result
File "src/gevent/event.py", line 305, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 335, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 323, in gevent._gevent_cevent.AsyncResult.get
File "src/gevent/event.py", line 303, in gevent._gevent_cevent.AsyncResult._raise_exception
File "/opt/conda/lib/python3.7/site-packages/gevent/_compat.py", line 65, in reraise
File "/opt/conda/lib/python3.7/site-packages/gevent/threadpool.py", line 142, in __run_task
File "mars/actors/pool/gevent_pool.pyx", line 127, in mars.actors.pool.gevent_pool.GeventThreadPool._wrap_watch.inner
result = fn(*args, **kwargs)
File "/home/admin/work/_public-mars-0.4.2.zip/mars/executor.py", line 690, in execute_graph
File "/home/admin/work/_public-mars-0.4.2.zip/mars/executor.py", line 571, in execute
File "/opt/conda/lib/python3.7/concurrent/futures/_base.py", line 435, in result
File "/opt/conda/lib/python3.7/concurrent/futures/_base.py", line 384, in __get_result
File "/opt/conda/lib/python3.7/concurrent/futures/thread.py", line 57, in run
File "/home/admin/work/_public-mars-0.4.2.zip/mars/executor.py", line 443, in _execute_operand
File "/home/admin/work/_public-mars-0.4.2.zip/mars/executor.py", line 641, in handle
File "/home/admin/work/_public-pyodps-0.9.3.zip/odps/mars_extension/core.py", line 187, in wrapper
File "/home/admin/work/_public-mars-0.4.2.zip/mars/remote/core.py", line 200, in execute
File "E:/MyDocuments/PycharmProjects/borrowloan/temp_query/test_mars.py", line 31, in light_gbm
lg_reg = lgb.LGBMRegressor()
File "/home/admin/work/_public-mars-0.4.2.zip/mars/learn/contrib/lightgbm/regressor.py", line 28, in fit
File "/home/admin/work/_public-mars-0.4.2.zip/mars/learn/contrib/lightgbm/train.py", line 322, in train
File "/home/admin/work/_public-mars-0.4.2.zip/mars/core.py", line 651, in execute
File "/home/admin/work/_public-mars-0.4.2.zip/mars/core.py", line 370, in execute
File "/home/admin/work/_public-mars-0.4.2.zip/mars/session.py", line 428, in run
File "/home/admin/work/_public-mars-0.4.2.zip/mars/session.py", line 303, in run
mars.errors.ExecutionFailed: '\'\\\'\\\\\\\'"\\\\\\\\\\\\\\\'Graph execution failed.\\\\\\\\\\\\\\\'"\\\\\\\'\\\'\''
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "E:/MyDocuments/PycharmProjects/borrowloan/temp_query/test_mars.py", line 38, in <module>
odps.run_mars_job(light_gbm, args=(tb_name,), worker_num=2, worker_cpu=2, worker_mem=8, mars_image='extended',
File "F:\Anaconda3\lib\site-packages\odps\mars_extension\core.py", line 151, in run_mars_job
r.execute()
File "F:\Anaconda3\lib\site-packages\mars\core.py", line 370, in execute
session.run(self, **kw)
File "F:\Anaconda3\lib\site-packages\mars\session.py", line 428, in run
result = self._sess.run(*tileables, **kw)
File "F:\Anaconda3\lib\site-packages\mars\web\session.py", line 187, in run
if self._check_response_finished(graph_url, timeout_val):
File "F:\Anaconda3\lib\site-packages\mars\web\session.py", line 146, in _check_response_finished
raise ExecutionFailed('Graph execution failed.') from exc
mars.errors.ExecutionFailed: 'Graph execution failed.'
|
ValueError
|
def _calc_chunk_params(
cls, in_chunk, axes, chunk_shape, output, output_type, chunk_op, no_shuffle: bool
):
params = {"index": in_chunk.index}
if output_type == OutputType.tensor:
shape_c = list(in_chunk.shape)
for ax in axes:
if not no_shuffle and chunk_shape[ax] > 1:
shape_c[ax] = np.nan
params["shape"] = tuple(shape_c)
params["dtype"] = in_chunk.dtype
params["order"] = output.order
elif output_type == OutputType.dataframe:
shape_c = list(in_chunk.shape)
if 0 in axes:
if not no_shuffle and chunk_shape[0] > 1:
shape_c[0] = np.nan
params["shape"] = tuple(shape_c)
if 1 not in axes:
params["dtypes"] = in_chunk.dtypes
params["columns_value"] = in_chunk.columns_value
else:
params["dtypes"] = output.dtypes
params["columns_value"] = output.columns_value
params["index_value"] = _shuffle_index_value(chunk_op, in_chunk.index_value)
else:
assert output_type == OutputType.series
if no_shuffle:
params["shape"] = in_chunk.shape
else:
params["shape"] = (np.nan,)
params["name"] = in_chunk.name
params["index_value"] = _shuffle_index_value(chunk_op, in_chunk.index_value)
params["dtype"] = in_chunk.dtype
return params
|
def _calc_chunk_params(
cls, in_chunk, axes, chunk_shape, output, output_type, chunk_op, no_shuffle: bool
):
params = {"index": in_chunk.index}
if output_type == OutputType.tensor:
shape_c = list(in_chunk.shape)
for ax in axes:
if not no_shuffle and chunk_shape[ax] > 1:
shape_c[ax] = np.nan
params["shape"] = tuple(shape_c)
params["dtype"] = in_chunk.dtype
params["order"] = output.order
elif output_type == OutputType.dataframe:
shape_c = list(in_chunk.shape)
if 0 in axes:
if not no_shuffle and chunk_shape[0] > 1:
shape_c[0] = np.nan
params["shape"] = tuple(shape_c)
params["dtypes"] = output.dtypes
params["columns_value"] = output.columns_value
params["index_value"] = _shuffle_index_value(chunk_op, in_chunk.index_value)
else:
assert output_type == OutputType.series
if no_shuffle:
params["shape"] = in_chunk.shape
else:
params["shape"] = (np.nan,)
params["name"] = in_chunk.name
params["index_value"] = _shuffle_index_value(chunk_op, in_chunk.index_value)
params["dtype"] = in_chunk.dtype
return params
|
https://github.com/mars-project/mars/issues/1393
|
In [1]: import mars.dataframe as md
In [2]: from mars.deploy.local import new_cluster
In [3]: cluster = new_cluster()
WARNING: Logging before InitGoogleLogging() is written to STDERR
I0710 12:01:39.413233 286952896 store.cc:1149] Allowing the Plasma store to use up to 3.43597GB of memory.
I0710 12:01:39.414255 286952896 store.cc:1176] Starting object store with directory /tmp and huge page support disabled
In [4]: import mars.tensor as mt
In [5]: df = md.DataFrame(mt.random.rand(10, 3))
In [6]: df.execute()
Out[6]:
0 1 2
0 0.212577 0.758511 0.148990
1 0.525289 0.382298 0.331657
2 0.821829 0.991404 0.504818
3 0.910740 0.971152 0.915968
4 0.540863 0.289341 0.546004
5 0.869099 0.257637 0.282307
6 0.738262 0.636345 0.717278
7 0.064604 0.481792 0.356584
8 0.598765 0.156633 0.140831
9 0.873232 0.527147 0.247436
In [7]: def f(in_df):
...: return in_df.sum().to_pandas()
...:
In [8]: import mars.remote as mr
In [9]: mr.spawn(f, args=(df,)).execute()
Unexpected exception occurred in ExecutionActor.execute_graph. graph_key='6ca5f502ce0c9fbccecc434fde3fbe75'
Traceback (most recent call last):
File "/Users/qinxuye/Workspace/mars/mars/utils.py", line 353, in _wrapped
return func(*args, **kwargs)
File "/Users/qinxuye/Workspace/mars/mars/worker/execution.py", line 481, in execute_graph
no_prepare_chunk_keys=io_meta.get('no_prepare_chunk_keys') or set(),
File "/Users/qinxuye/Workspace/mars/mars/worker/execution.py", line 57, in __init__
graph = self.graph = deserialize_graph(graph_serialized)
File "/Users/qinxuye/Workspace/mars/mars/utils.py", line 283, in deserialize_graph
return graph_cls.from_pb(g)
File "mars/graph.pyx", line 426, in mars.graph.DirectedGraph.from_pb
File "mars/serialize/core.pyx", line 683, in mars.serialize.core.Serializable.from_pb
File "mars/serialize/core.pyx", line 667, in mars.serialize.core.Serializable.deserialize
File "mars/serialize/core.pyx", line 810, in mars.serialize.core.Provider.deserialize_model
File "mars/serialize/core.pyx", line 157, in mars.serialize.core.Field.deserialize
File "mars/serialize/pbserializer.pyx", line 874, in mars.serialize.pbserializer.ProtobufSerializeProvider.deserialize_field
File "mars/serialize/pbserializer.pyx", line 839, in mars.serialize.pbserializer.ProtobufSerializeProvider._deserial_reference_value
File "mars/serialize/core.pyx", line 731, in mars.serialize.core.AttributeAsDict.deserialize
File "mars/serialize/pbserializer.pyx", line 972, in mars.serialize.pbserializer.ProtobufSerializeProvider.deserialize_attribute_as_dict
File "mars/serialize/core.pyx", line 157, in mars.serialize.core.Field.deserialize
File "mars/serialize/pbserializer.pyx", line 878, in mars.serialize.pbserializer.ProtobufSerializeProvider.deserialize_field
File "mars/serialize/pbserializer.pyx", line 293, in mars.serialize.pbserializer.ProtobufSerializeProvider._get_list
File "mars/serialize/pbserializer.pyx", line 812, in mars.serialize.pbserializer.ProtobufSerializeProvider._get_value
File "mars/serialize/pbserializer.pyx", line 802, in mars.serialize.pbserializer.ProtobufSerializeProvider._get_untyped_value
File "mars/serialize/pbserializer.pyx", line 223, in mars.serialize.pbserializer.ProtobufSerializeProvider._get_function
File "/Users/qinxuye/Workspace/mars/mars/remote/core.py", line 46, in __setstate__
chunk = ci[0]().new_chunk(None, _key=ci[1], _id=ci[2], kws=[ci[3]])
File "/Users/qinxuye/Workspace/mars/mars/operands.py", line 347, in new_chunk
return self.new_chunks(inputs, kws=kws, **kw)[0]
File "/Users/qinxuye/Workspace/mars/mars/operands.py", line 341, in new_chunks
return self._new_chunks(inputs, kws=kws, **kwargs)
File "/Users/qinxuye/Workspace/mars/mars/dataframe/fetch/core.py", line 39, in _new_chunks
return super()._new_chunks(inputs, kws=kws, **kw)
File "/Users/qinxuye/Workspace/mars/mars/operands.py", line 315, in _new_chunks
chunk = self._create_chunk(j, index, **create_chunk_kw)
File "/Users/qinxuye/Workspace/mars/mars/operands.py", line 283, in _create_chunk
raise ValueError('output_type should be specified')
ValueError: output_type should be specified
Unhandled exception in promise call
Traceback (most recent call last):
File "/Users/qinxuye/Workspace/mars/mars/promise.py", line 372, in _wrapped
return func(*args, **kwargs)
File "/Users/qinxuye/Workspace/mars/mars/utils.py", line 353, in _wrapped
return func(*args, **kwargs)
File "/Users/qinxuye/Workspace/mars/mars/worker/execution.py", line 481, in execute_graph
no_prepare_chunk_keys=io_meta.get('no_prepare_chunk_keys') or set(),
File "/Users/qinxuye/Workspace/mars/mars/worker/execution.py", line 57, in __init__
graph = self.graph = deserialize_graph(graph_serialized)
File "/Users/qinxuye/Workspace/mars/mars/utils.py", line 283, in deserialize_graph
return graph_cls.from_pb(g)
File "mars/graph.pyx", line 426, in mars.graph.DirectedGraph.from_pb
File "mars/serialize/core.pyx", line 683, in mars.serialize.core.Serializable.from_pb
File "mars/serialize/core.pyx", line 667, in mars.serialize.core.Serializable.deserialize
File "mars/serialize/core.pyx", line 810, in mars.serialize.core.Provider.deserialize_model
File "mars/serialize/core.pyx", line 157, in mars.serialize.core.Field.deserialize
File "mars/serialize/pbserializer.pyx", line 874, in mars.serialize.pbserializer.ProtobufSerializeProvider.deserialize_field
File "mars/serialize/pbserializer.pyx", line 839, in mars.serialize.pbserializer.ProtobufSerializeProvider._deserial_reference_value
File "mars/serialize/core.pyx", line 731, in mars.serialize.core.AttributeAsDict.deserialize
File "mars/serialize/pbserializer.pyx", line 972, in mars.serialize.pbserializer.ProtobufSerializeProvider.deserialize_attribute_as_dict
File "mars/serialize/core.pyx", line 157, in mars.serialize.core.Field.deserialize
File "mars/serialize/pbserializer.pyx", line 878, in mars.serialize.pbserializer.ProtobufSerializeProvider.deserialize_field
File "mars/serialize/pbserializer.pyx", line 293, in mars.serialize.pbserializer.ProtobufSerializeProvider._get_list
File "mars/serialize/pbserializer.pyx", line 812, in mars.serialize.pbserializer.ProtobufSerializeProvider._get_value
File "mars/serialize/pbserializer.pyx", line 802, in mars.serialize.pbserializer.ProtobufSerializeProvider._get_untyped_value
File "mars/serialize/pbserializer.pyx", line 223, in mars.serialize.pbserializer.ProtobufSerializeProvider._get_function
File "/Users/qinxuye/Workspace/mars/mars/remote/core.py", line 46, in __setstate__
chunk = ci[0]().new_chunk(None, _key=ci[1], _id=ci[2], kws=[ci[3]])
File "/Users/qinxuye/Workspace/mars/mars/operands.py", line 347, in new_chunk
return self.new_chunks(inputs, kws=kws, **kw)[0]
File "/Users/qinxuye/Workspace/mars/mars/operands.py", line 341, in new_chunks
return self._new_chunks(inputs, kws=kws, **kwargs)
File "/Users/qinxuye/Workspace/mars/mars/dataframe/fetch/core.py", line 39, in _new_chunks
return super()._new_chunks(inputs, kws=kws, **kw)
File "/Users/qinxuye/Workspace/mars/mars/operands.py", line 315, in _new_chunks
chunk = self._create_chunk(j, index, **create_chunk_kw)
File "/Users/qinxuye/Workspace/mars/mars/operands.py", line 283, in _create_chunk
raise ValueError('output_type should be specified')
ValueError: output_type should be specified
Attempt 1: Unexpected error ValueError occurred in executing operand 6ca5f502ce0c9fbccecc434fde3fbe75 in 0.0.0.0:40516
Traceback (most recent call last):
File "/Users/qinxuye/Workspace/mars/mars/promise.py", line 372, in _wrapped
return func(*args, **kwargs)
File "/Users/qinxuye/Workspace/mars/mars/utils.py", line 353, in _wrapped
return func(*args, **kwargs)
File "/Users/qinxuye/Workspace/mars/mars/worker/execution.py", line 481, in execute_graph
no_prepare_chunk_keys=io_meta.get('no_prepare_chunk_keys') or set(),
File "/Users/qinxuye/Workspace/mars/mars/worker/execution.py", line 57, in __init__
graph = self.graph = deserialize_graph(graph_serialized)
File "/Users/qinxuye/Workspace/mars/mars/utils.py", line 283, in deserialize_graph
return graph_cls.from_pb(g)
File "mars/graph.pyx", line 426, in mars.graph.DirectedGraph.from_pb
return cls.deserialize(SerializableGraph.from_pb(pb_obj))
File "mars/serialize/core.pyx", line 683, in mars.serialize.core.Serializable.from_pb
return cls.deserialize(provider, obj)
File "mars/serialize/core.pyx", line 667, in mars.serialize.core.Serializable.deserialize
obj = provider.deserialize_model(cls, obj, callbacks, key_to_instance)
File "mars/serialize/core.pyx", line 810, in mars.serialize.core.Provider.deserialize_model
field.deserialize(self, model_instance, obj, callbacks, key_to_instance)
File "mars/serialize/core.pyx", line 157, in mars.serialize.core.Field.deserialize
return provider.deserialize_field(self, model_instance, obj, callbacks, key_to_instance)
File "mars/serialize/pbserializer.pyx", line 874, in mars.serialize.pbserializer.ProtobufSerializeProvider.deserialize_field
value = [self._deserial_reference_value(
File "mars/serialize/pbserializer.pyx", line 839, in mars.serialize.pbserializer.ProtobufSerializeProvider._deserial_reference_value
return model.deserialize(self, f_obj, callbacks, key_to_instance)
File "mars/serialize/core.pyx", line 731, in mars.serialize.core.AttributeAsDict.deserialize
obj = provider.deserialize_attribute_as_dict(
File "mars/serialize/pbserializer.pyx", line 972, in mars.serialize.pbserializer.ProtobufSerializeProvider.deserialize_attribute_as_dict
it_field.deserialize(self, model_instance, AttrWrapper(d_obj),
File "mars/serialize/core.pyx", line 157, in mars.serialize.core.Field.deserialize
return provider.deserialize_field(self, model_instance, obj, callbacks, key_to_instance)
File "mars/serialize/pbserializer.pyx", line 878, in mars.serialize.pbserializer.ProtobufSerializeProvider.deserialize_field
value = self._get_list(field_obj, field.type, callbacks, field.weak_ref)
File "mars/serialize/pbserializer.pyx", line 293, in mars.serialize.pbserializer.ProtobufSerializeProvider._get_list
val = self._get_value(it_obj, tp.type if tp is not None else tp,
File "mars/serialize/pbserializer.pyx", line 812, in mars.serialize.pbserializer.ProtobufSerializeProvider._get_value
return self._get_untyped_value(obj, callbacks, weak_ref)
File "mars/serialize/pbserializer.pyx", line 802, in mars.serialize.pbserializer.ProtobufSerializeProvider._get_untyped_value
return ref(self._get_function(obj))
File "mars/serialize/pbserializer.pyx", line 223, in mars.serialize.pbserializer.ProtobufSerializeProvider._get_function
return cloudpickle.loads(x) if x is not None and len(x) > 0 else None
File "/Users/qinxuye/Workspace/mars/mars/remote/core.py", line 46, in __setstate__
chunk = ci[0]().new_chunk(None, _key=ci[1], _id=ci[2], kws=[ci[3]])
File "/Users/qinxuye/Workspace/mars/mars/operands.py", line 347, in new_chunk
return self.new_chunks(inputs, kws=kws, **kw)[0]
File "/Users/qinxuye/Workspace/mars/mars/operands.py", line 341, in new_chunks
return self._new_chunks(inputs, kws=kws, **kwargs)
File "/Users/qinxuye/Workspace/mars/mars/dataframe/fetch/core.py", line 39, in _new_chunks
return super()._new_chunks(inputs, kws=kws, **kw)
File "/Users/qinxuye/Workspace/mars/mars/operands.py", line 315, in _new_chunks
chunk = self._create_chunk(j, index, **create_chunk_kw)
File "/Users/qinxuye/Workspace/mars/mars/operands.py", line 283, in _create_chunk
raise ValueError('output_type should be specified')
ValueError: output_type should be specified
|
ValueError
|
def __getstate__(self):
fetch_op = self.tileable.op
fetch_tileable = self.tileable
chunk_infos = [
(type(c.op), c.op.output_types, c.key, c.id, c.params)
for c in fetch_tileable.chunks
]
return (
type(fetch_op),
fetch_op.id,
fetch_op.output_types,
fetch_tileable.params,
fetch_tileable.nsplits,
chunk_infos,
)
|
def __getstate__(self):
fetch_op = self.tileable.op
fetch_tileable = self.tileable
chunk_infos = [(type(c.op), c.key, c.id, c.params) for c in fetch_tileable.chunks]
return (
type(fetch_op),
fetch_op.id,
fetch_tileable.params,
fetch_tileable.nsplits,
chunk_infos,
)
|
https://github.com/mars-project/mars/issues/1393
|
In [1]: import mars.dataframe as md
In [2]: from mars.deploy.local import new_cluster
In [3]: cluster = new_cluster()
WARNING: Logging before InitGoogleLogging() is written to STDERR
I0710 12:01:39.413233 286952896 store.cc:1149] Allowing the Plasma store to use up to 3.43597GB of memory.
I0710 12:01:39.414255 286952896 store.cc:1176] Starting object store with directory /tmp and huge page support disabled
In [4]: import mars.tensor as mt
In [5]: df = md.DataFrame(mt.random.rand(10, 3))
In [6]: df.execute()
Out[6]:
0 1 2
0 0.212577 0.758511 0.148990
1 0.525289 0.382298 0.331657
2 0.821829 0.991404 0.504818
3 0.910740 0.971152 0.915968
4 0.540863 0.289341 0.546004
5 0.869099 0.257637 0.282307
6 0.738262 0.636345 0.717278
7 0.064604 0.481792 0.356584
8 0.598765 0.156633 0.140831
9 0.873232 0.527147 0.247436
In [7]: def f(in_df):
...: return in_df.sum().to_pandas()
...:
In [8]: import mars.remote as mr
In [9]: mr.spawn(f, args=(df,)).execute()
Unexpected exception occurred in ExecutionActor.execute_graph. graph_key='6ca5f502ce0c9fbccecc434fde3fbe75'
Traceback (most recent call last):
File "/Users/qinxuye/Workspace/mars/mars/utils.py", line 353, in _wrapped
return func(*args, **kwargs)
File "/Users/qinxuye/Workspace/mars/mars/worker/execution.py", line 481, in execute_graph
no_prepare_chunk_keys=io_meta.get('no_prepare_chunk_keys') or set(),
File "/Users/qinxuye/Workspace/mars/mars/worker/execution.py", line 57, in __init__
graph = self.graph = deserialize_graph(graph_serialized)
File "/Users/qinxuye/Workspace/mars/mars/utils.py", line 283, in deserialize_graph
return graph_cls.from_pb(g)
File "mars/graph.pyx", line 426, in mars.graph.DirectedGraph.from_pb
File "mars/serialize/core.pyx", line 683, in mars.serialize.core.Serializable.from_pb
File "mars/serialize/core.pyx", line 667, in mars.serialize.core.Serializable.deserialize
File "mars/serialize/core.pyx", line 810, in mars.serialize.core.Provider.deserialize_model
File "mars/serialize/core.pyx", line 157, in mars.serialize.core.Field.deserialize
File "mars/serialize/pbserializer.pyx", line 874, in mars.serialize.pbserializer.ProtobufSerializeProvider.deserialize_field
File "mars/serialize/pbserializer.pyx", line 839, in mars.serialize.pbserializer.ProtobufSerializeProvider._deserial_reference_value
File "mars/serialize/core.pyx", line 731, in mars.serialize.core.AttributeAsDict.deserialize
File "mars/serialize/pbserializer.pyx", line 972, in mars.serialize.pbserializer.ProtobufSerializeProvider.deserialize_attribute_as_dict
File "mars/serialize/core.pyx", line 157, in mars.serialize.core.Field.deserialize
File "mars/serialize/pbserializer.pyx", line 878, in mars.serialize.pbserializer.ProtobufSerializeProvider.deserialize_field
File "mars/serialize/pbserializer.pyx", line 293, in mars.serialize.pbserializer.ProtobufSerializeProvider._get_list
File "mars/serialize/pbserializer.pyx", line 812, in mars.serialize.pbserializer.ProtobufSerializeProvider._get_value
File "mars/serialize/pbserializer.pyx", line 802, in mars.serialize.pbserializer.ProtobufSerializeProvider._get_untyped_value
File "mars/serialize/pbserializer.pyx", line 223, in mars.serialize.pbserializer.ProtobufSerializeProvider._get_function
File "/Users/qinxuye/Workspace/mars/mars/remote/core.py", line 46, in __setstate__
chunk = ci[0]().new_chunk(None, _key=ci[1], _id=ci[2], kws=[ci[3]])
File "/Users/qinxuye/Workspace/mars/mars/operands.py", line 347, in new_chunk
return self.new_chunks(inputs, kws=kws, **kw)[0]
File "/Users/qinxuye/Workspace/mars/mars/operands.py", line 341, in new_chunks
return self._new_chunks(inputs, kws=kws, **kwargs)
File "/Users/qinxuye/Workspace/mars/mars/dataframe/fetch/core.py", line 39, in _new_chunks
return super()._new_chunks(inputs, kws=kws, **kw)
File "/Users/qinxuye/Workspace/mars/mars/operands.py", line 315, in _new_chunks
chunk = self._create_chunk(j, index, **create_chunk_kw)
File "/Users/qinxuye/Workspace/mars/mars/operands.py", line 283, in _create_chunk
raise ValueError('output_type should be specified')
ValueError: output_type should be specified
Unhandled exception in promise call
Traceback (most recent call last):
File "/Users/qinxuye/Workspace/mars/mars/promise.py", line 372, in _wrapped
return func(*args, **kwargs)
File "/Users/qinxuye/Workspace/mars/mars/utils.py", line 353, in _wrapped
return func(*args, **kwargs)
File "/Users/qinxuye/Workspace/mars/mars/worker/execution.py", line 481, in execute_graph
no_prepare_chunk_keys=io_meta.get('no_prepare_chunk_keys') or set(),
File "/Users/qinxuye/Workspace/mars/mars/worker/execution.py", line 57, in __init__
graph = self.graph = deserialize_graph(graph_serialized)
File "/Users/qinxuye/Workspace/mars/mars/utils.py", line 283, in deserialize_graph
return graph_cls.from_pb(g)
File "mars/graph.pyx", line 426, in mars.graph.DirectedGraph.from_pb
File "mars/serialize/core.pyx", line 683, in mars.serialize.core.Serializable.from_pb
File "mars/serialize/core.pyx", line 667, in mars.serialize.core.Serializable.deserialize
File "mars/serialize/core.pyx", line 810, in mars.serialize.core.Provider.deserialize_model
File "mars/serialize/core.pyx", line 157, in mars.serialize.core.Field.deserialize
File "mars/serialize/pbserializer.pyx", line 874, in mars.serialize.pbserializer.ProtobufSerializeProvider.deserialize_field
File "mars/serialize/pbserializer.pyx", line 839, in mars.serialize.pbserializer.ProtobufSerializeProvider._deserial_reference_value
File "mars/serialize/core.pyx", line 731, in mars.serialize.core.AttributeAsDict.deserialize
File "mars/serialize/pbserializer.pyx", line 972, in mars.serialize.pbserializer.ProtobufSerializeProvider.deserialize_attribute_as_dict
File "mars/serialize/core.pyx", line 157, in mars.serialize.core.Field.deserialize
File "mars/serialize/pbserializer.pyx", line 878, in mars.serialize.pbserializer.ProtobufSerializeProvider.deserialize_field
File "mars/serialize/pbserializer.pyx", line 293, in mars.serialize.pbserializer.ProtobufSerializeProvider._get_list
File "mars/serialize/pbserializer.pyx", line 812, in mars.serialize.pbserializer.ProtobufSerializeProvider._get_value
File "mars/serialize/pbserializer.pyx", line 802, in mars.serialize.pbserializer.ProtobufSerializeProvider._get_untyped_value
File "mars/serialize/pbserializer.pyx", line 223, in mars.serialize.pbserializer.ProtobufSerializeProvider._get_function
File "/Users/qinxuye/Workspace/mars/mars/remote/core.py", line 46, in __setstate__
chunk = ci[0]().new_chunk(None, _key=ci[1], _id=ci[2], kws=[ci[3]])
File "/Users/qinxuye/Workspace/mars/mars/operands.py", line 347, in new_chunk
return self.new_chunks(inputs, kws=kws, **kw)[0]
File "/Users/qinxuye/Workspace/mars/mars/operands.py", line 341, in new_chunks
return self._new_chunks(inputs, kws=kws, **kwargs)
File "/Users/qinxuye/Workspace/mars/mars/dataframe/fetch/core.py", line 39, in _new_chunks
return super()._new_chunks(inputs, kws=kws, **kw)
File "/Users/qinxuye/Workspace/mars/mars/operands.py", line 315, in _new_chunks
chunk = self._create_chunk(j, index, **create_chunk_kw)
File "/Users/qinxuye/Workspace/mars/mars/operands.py", line 283, in _create_chunk
raise ValueError('output_type should be specified')
ValueError: output_type should be specified
Attempt 1: Unexpected error ValueError occurred in executing operand 6ca5f502ce0c9fbccecc434fde3fbe75 in 0.0.0.0:40516
Traceback (most recent call last):
File "/Users/qinxuye/Workspace/mars/mars/promise.py", line 372, in _wrapped
return func(*args, **kwargs)
File "/Users/qinxuye/Workspace/mars/mars/utils.py", line 353, in _wrapped
return func(*args, **kwargs)
File "/Users/qinxuye/Workspace/mars/mars/worker/execution.py", line 481, in execute_graph
no_prepare_chunk_keys=io_meta.get('no_prepare_chunk_keys') or set(),
File "/Users/qinxuye/Workspace/mars/mars/worker/execution.py", line 57, in __init__
graph = self.graph = deserialize_graph(graph_serialized)
File "/Users/qinxuye/Workspace/mars/mars/utils.py", line 283, in deserialize_graph
return graph_cls.from_pb(g)
File "mars/graph.pyx", line 426, in mars.graph.DirectedGraph.from_pb
return cls.deserialize(SerializableGraph.from_pb(pb_obj))
File "mars/serialize/core.pyx", line 683, in mars.serialize.core.Serializable.from_pb
return cls.deserialize(provider, obj)
File "mars/serialize/core.pyx", line 667, in mars.serialize.core.Serializable.deserialize
obj = provider.deserialize_model(cls, obj, callbacks, key_to_instance)
File "mars/serialize/core.pyx", line 810, in mars.serialize.core.Provider.deserialize_model
field.deserialize(self, model_instance, obj, callbacks, key_to_instance)
File "mars/serialize/core.pyx", line 157, in mars.serialize.core.Field.deserialize
return provider.deserialize_field(self, model_instance, obj, callbacks, key_to_instance)
File "mars/serialize/pbserializer.pyx", line 874, in mars.serialize.pbserializer.ProtobufSerializeProvider.deserialize_field
value = [self._deserial_reference_value(
File "mars/serialize/pbserializer.pyx", line 839, in mars.serialize.pbserializer.ProtobufSerializeProvider._deserial_reference_value
return model.deserialize(self, f_obj, callbacks, key_to_instance)
File "mars/serialize/core.pyx", line 731, in mars.serialize.core.AttributeAsDict.deserialize
obj = provider.deserialize_attribute_as_dict(
File "mars/serialize/pbserializer.pyx", line 972, in mars.serialize.pbserializer.ProtobufSerializeProvider.deserialize_attribute_as_dict
it_field.deserialize(self, model_instance, AttrWrapper(d_obj),
File "mars/serialize/core.pyx", line 157, in mars.serialize.core.Field.deserialize
return provider.deserialize_field(self, model_instance, obj, callbacks, key_to_instance)
File "mars/serialize/pbserializer.pyx", line 878, in mars.serialize.pbserializer.ProtobufSerializeProvider.deserialize_field
value = self._get_list(field_obj, field.type, callbacks, field.weak_ref)
File "mars/serialize/pbserializer.pyx", line 293, in mars.serialize.pbserializer.ProtobufSerializeProvider._get_list
val = self._get_value(it_obj, tp.type if tp is not None else tp,
File "mars/serialize/pbserializer.pyx", line 812, in mars.serialize.pbserializer.ProtobufSerializeProvider._get_value
return self._get_untyped_value(obj, callbacks, weak_ref)
File "mars/serialize/pbserializer.pyx", line 802, in mars.serialize.pbserializer.ProtobufSerializeProvider._get_untyped_value
return ref(self._get_function(obj))
File "mars/serialize/pbserializer.pyx", line 223, in mars.serialize.pbserializer.ProtobufSerializeProvider._get_function
return cloudpickle.loads(x) if x is not None and len(x) > 0 else None
File "/Users/qinxuye/Workspace/mars/mars/remote/core.py", line 46, in __setstate__
chunk = ci[0]().new_chunk(None, _key=ci[1], _id=ci[2], kws=[ci[3]])
File "/Users/qinxuye/Workspace/mars/mars/operands.py", line 347, in new_chunk
return self.new_chunks(inputs, kws=kws, **kw)[0]
File "/Users/qinxuye/Workspace/mars/mars/operands.py", line 341, in new_chunks
return self._new_chunks(inputs, kws=kws, **kwargs)
File "/Users/qinxuye/Workspace/mars/mars/dataframe/fetch/core.py", line 39, in _new_chunks
return super()._new_chunks(inputs, kws=kws, **kw)
File "/Users/qinxuye/Workspace/mars/mars/operands.py", line 315, in _new_chunks
chunk = self._create_chunk(j, index, **create_chunk_kw)
File "/Users/qinxuye/Workspace/mars/mars/operands.py", line 283, in _create_chunk
raise ValueError('output_type should be specified')
ValueError: output_type should be specified
|
ValueError
|
def __setstate__(self, state):
fetch_op_type, fetch_op_id, output_types, params, nsplits, chunk_infos = state
params["nsplits"] = nsplits
chunks = []
for ci in chunk_infos:
chunk_op_type, chunk_op_output_types, chunk_key, chunk_id, chunk_params = ci
chunk = chunk_op_type(output_types=chunk_op_output_types).new_chunk(
None, _key=chunk_key, _id=chunk_id, kws=[chunk_params]
)
chunks.append(chunk)
params["chunks"] = chunks
self.tileable = fetch_op_type(
_id=fetch_op_id, output_types=output_types
).new_tileable(None, kws=[params])
|
def __setstate__(self, state):
fetch_op_type, fetch_op_id, params, nsplits, chunk_infos = state
params["nsplits"] = nsplits
chunks = []
for ci in chunk_infos:
chunk = ci[0]().new_chunk(None, _key=ci[1], _id=ci[2], kws=[ci[3]])
chunks.append(chunk)
params["chunks"] = chunks
self.tileable = fetch_op_type(_id=fetch_op_id).new_tileable(None, kws=[params])
|
https://github.com/mars-project/mars/issues/1393
|
In [1]: import mars.dataframe as md
In [2]: from mars.deploy.local import new_cluster
In [3]: cluster = new_cluster()
WARNING: Logging before InitGoogleLogging() is written to STDERR
I0710 12:01:39.413233 286952896 store.cc:1149] Allowing the Plasma store to use up to 3.43597GB of memory.
I0710 12:01:39.414255 286952896 store.cc:1176] Starting object store with directory /tmp and huge page support disabled
In [4]: import mars.tensor as mt
In [5]: df = md.DataFrame(mt.random.rand(10, 3))
In [6]: df.execute()
Out[6]:
0 1 2
0 0.212577 0.758511 0.148990
1 0.525289 0.382298 0.331657
2 0.821829 0.991404 0.504818
3 0.910740 0.971152 0.915968
4 0.540863 0.289341 0.546004
5 0.869099 0.257637 0.282307
6 0.738262 0.636345 0.717278
7 0.064604 0.481792 0.356584
8 0.598765 0.156633 0.140831
9 0.873232 0.527147 0.247436
In [7]: def f(in_df):
...: return in_df.sum().to_pandas()
...:
In [8]: import mars.remote as mr
In [9]: mr.spawn(f, args=(df,)).execute()
Unexpected exception occurred in ExecutionActor.execute_graph. graph_key='6ca5f502ce0c9fbccecc434fde3fbe75'
Traceback (most recent call last):
File "/Users/qinxuye/Workspace/mars/mars/utils.py", line 353, in _wrapped
return func(*args, **kwargs)
File "/Users/qinxuye/Workspace/mars/mars/worker/execution.py", line 481, in execute_graph
no_prepare_chunk_keys=io_meta.get('no_prepare_chunk_keys') or set(),
File "/Users/qinxuye/Workspace/mars/mars/worker/execution.py", line 57, in __init__
graph = self.graph = deserialize_graph(graph_serialized)
File "/Users/qinxuye/Workspace/mars/mars/utils.py", line 283, in deserialize_graph
return graph_cls.from_pb(g)
File "mars/graph.pyx", line 426, in mars.graph.DirectedGraph.from_pb
File "mars/serialize/core.pyx", line 683, in mars.serialize.core.Serializable.from_pb
File "mars/serialize/core.pyx", line 667, in mars.serialize.core.Serializable.deserialize
File "mars/serialize/core.pyx", line 810, in mars.serialize.core.Provider.deserialize_model
File "mars/serialize/core.pyx", line 157, in mars.serialize.core.Field.deserialize
File "mars/serialize/pbserializer.pyx", line 874, in mars.serialize.pbserializer.ProtobufSerializeProvider.deserialize_field
File "mars/serialize/pbserializer.pyx", line 839, in mars.serialize.pbserializer.ProtobufSerializeProvider._deserial_reference_value
File "mars/serialize/core.pyx", line 731, in mars.serialize.core.AttributeAsDict.deserialize
File "mars/serialize/pbserializer.pyx", line 972, in mars.serialize.pbserializer.ProtobufSerializeProvider.deserialize_attribute_as_dict
File "mars/serialize/core.pyx", line 157, in mars.serialize.core.Field.deserialize
File "mars/serialize/pbserializer.pyx", line 878, in mars.serialize.pbserializer.ProtobufSerializeProvider.deserialize_field
File "mars/serialize/pbserializer.pyx", line 293, in mars.serialize.pbserializer.ProtobufSerializeProvider._get_list
File "mars/serialize/pbserializer.pyx", line 812, in mars.serialize.pbserializer.ProtobufSerializeProvider._get_value
File "mars/serialize/pbserializer.pyx", line 802, in mars.serialize.pbserializer.ProtobufSerializeProvider._get_untyped_value
File "mars/serialize/pbserializer.pyx", line 223, in mars.serialize.pbserializer.ProtobufSerializeProvider._get_function
File "/Users/qinxuye/Workspace/mars/mars/remote/core.py", line 46, in __setstate__
chunk = ci[0]().new_chunk(None, _key=ci[1], _id=ci[2], kws=[ci[3]])
File "/Users/qinxuye/Workspace/mars/mars/operands.py", line 347, in new_chunk
return self.new_chunks(inputs, kws=kws, **kw)[0]
File "/Users/qinxuye/Workspace/mars/mars/operands.py", line 341, in new_chunks
return self._new_chunks(inputs, kws=kws, **kwargs)
File "/Users/qinxuye/Workspace/mars/mars/dataframe/fetch/core.py", line 39, in _new_chunks
return super()._new_chunks(inputs, kws=kws, **kw)
File "/Users/qinxuye/Workspace/mars/mars/operands.py", line 315, in _new_chunks
chunk = self._create_chunk(j, index, **create_chunk_kw)
File "/Users/qinxuye/Workspace/mars/mars/operands.py", line 283, in _create_chunk
raise ValueError('output_type should be specified')
ValueError: output_type should be specified
Unhandled exception in promise call
Traceback (most recent call last):
File "/Users/qinxuye/Workspace/mars/mars/promise.py", line 372, in _wrapped
return func(*args, **kwargs)
File "/Users/qinxuye/Workspace/mars/mars/utils.py", line 353, in _wrapped
return func(*args, **kwargs)
File "/Users/qinxuye/Workspace/mars/mars/worker/execution.py", line 481, in execute_graph
no_prepare_chunk_keys=io_meta.get('no_prepare_chunk_keys') or set(),
File "/Users/qinxuye/Workspace/mars/mars/worker/execution.py", line 57, in __init__
graph = self.graph = deserialize_graph(graph_serialized)
File "/Users/qinxuye/Workspace/mars/mars/utils.py", line 283, in deserialize_graph
return graph_cls.from_pb(g)
File "mars/graph.pyx", line 426, in mars.graph.DirectedGraph.from_pb
File "mars/serialize/core.pyx", line 683, in mars.serialize.core.Serializable.from_pb
File "mars/serialize/core.pyx", line 667, in mars.serialize.core.Serializable.deserialize
File "mars/serialize/core.pyx", line 810, in mars.serialize.core.Provider.deserialize_model
File "mars/serialize/core.pyx", line 157, in mars.serialize.core.Field.deserialize
File "mars/serialize/pbserializer.pyx", line 874, in mars.serialize.pbserializer.ProtobufSerializeProvider.deserialize_field
File "mars/serialize/pbserializer.pyx", line 839, in mars.serialize.pbserializer.ProtobufSerializeProvider._deserial_reference_value
File "mars/serialize/core.pyx", line 731, in mars.serialize.core.AttributeAsDict.deserialize
File "mars/serialize/pbserializer.pyx", line 972, in mars.serialize.pbserializer.ProtobufSerializeProvider.deserialize_attribute_as_dict
File "mars/serialize/core.pyx", line 157, in mars.serialize.core.Field.deserialize
File "mars/serialize/pbserializer.pyx", line 878, in mars.serialize.pbserializer.ProtobufSerializeProvider.deserialize_field
File "mars/serialize/pbserializer.pyx", line 293, in mars.serialize.pbserializer.ProtobufSerializeProvider._get_list
File "mars/serialize/pbserializer.pyx", line 812, in mars.serialize.pbserializer.ProtobufSerializeProvider._get_value
File "mars/serialize/pbserializer.pyx", line 802, in mars.serialize.pbserializer.ProtobufSerializeProvider._get_untyped_value
File "mars/serialize/pbserializer.pyx", line 223, in mars.serialize.pbserializer.ProtobufSerializeProvider._get_function
File "/Users/qinxuye/Workspace/mars/mars/remote/core.py", line 46, in __setstate__
chunk = ci[0]().new_chunk(None, _key=ci[1], _id=ci[2], kws=[ci[3]])
File "/Users/qinxuye/Workspace/mars/mars/operands.py", line 347, in new_chunk
return self.new_chunks(inputs, kws=kws, **kw)[0]
File "/Users/qinxuye/Workspace/mars/mars/operands.py", line 341, in new_chunks
return self._new_chunks(inputs, kws=kws, **kwargs)
File "/Users/qinxuye/Workspace/mars/mars/dataframe/fetch/core.py", line 39, in _new_chunks
return super()._new_chunks(inputs, kws=kws, **kw)
File "/Users/qinxuye/Workspace/mars/mars/operands.py", line 315, in _new_chunks
chunk = self._create_chunk(j, index, **create_chunk_kw)
File "/Users/qinxuye/Workspace/mars/mars/operands.py", line 283, in _create_chunk
raise ValueError('output_type should be specified')
ValueError: output_type should be specified
Attempt 1: Unexpected error ValueError occurred in executing operand 6ca5f502ce0c9fbccecc434fde3fbe75 in 0.0.0.0:40516
Traceback (most recent call last):
File "/Users/qinxuye/Workspace/mars/mars/promise.py", line 372, in _wrapped
return func(*args, **kwargs)
File "/Users/qinxuye/Workspace/mars/mars/utils.py", line 353, in _wrapped
return func(*args, **kwargs)
File "/Users/qinxuye/Workspace/mars/mars/worker/execution.py", line 481, in execute_graph
no_prepare_chunk_keys=io_meta.get('no_prepare_chunk_keys') or set(),
File "/Users/qinxuye/Workspace/mars/mars/worker/execution.py", line 57, in __init__
graph = self.graph = deserialize_graph(graph_serialized)
File "/Users/qinxuye/Workspace/mars/mars/utils.py", line 283, in deserialize_graph
return graph_cls.from_pb(g)
File "mars/graph.pyx", line 426, in mars.graph.DirectedGraph.from_pb
return cls.deserialize(SerializableGraph.from_pb(pb_obj))
File "mars/serialize/core.pyx", line 683, in mars.serialize.core.Serializable.from_pb
return cls.deserialize(provider, obj)
File "mars/serialize/core.pyx", line 667, in mars.serialize.core.Serializable.deserialize
obj = provider.deserialize_model(cls, obj, callbacks, key_to_instance)
File "mars/serialize/core.pyx", line 810, in mars.serialize.core.Provider.deserialize_model
field.deserialize(self, model_instance, obj, callbacks, key_to_instance)
File "mars/serialize/core.pyx", line 157, in mars.serialize.core.Field.deserialize
return provider.deserialize_field(self, model_instance, obj, callbacks, key_to_instance)
File "mars/serialize/pbserializer.pyx", line 874, in mars.serialize.pbserializer.ProtobufSerializeProvider.deserialize_field
value = [self._deserial_reference_value(
File "mars/serialize/pbserializer.pyx", line 839, in mars.serialize.pbserializer.ProtobufSerializeProvider._deserial_reference_value
return model.deserialize(self, f_obj, callbacks, key_to_instance)
File "mars/serialize/core.pyx", line 731, in mars.serialize.core.AttributeAsDict.deserialize
obj = provider.deserialize_attribute_as_dict(
File "mars/serialize/pbserializer.pyx", line 972, in mars.serialize.pbserializer.ProtobufSerializeProvider.deserialize_attribute_as_dict
it_field.deserialize(self, model_instance, AttrWrapper(d_obj),
File "mars/serialize/core.pyx", line 157, in mars.serialize.core.Field.deserialize
return provider.deserialize_field(self, model_instance, obj, callbacks, key_to_instance)
File "mars/serialize/pbserializer.pyx", line 878, in mars.serialize.pbserializer.ProtobufSerializeProvider.deserialize_field
value = self._get_list(field_obj, field.type, callbacks, field.weak_ref)
File "mars/serialize/pbserializer.pyx", line 293, in mars.serialize.pbserializer.ProtobufSerializeProvider._get_list
val = self._get_value(it_obj, tp.type if tp is not None else tp,
File "mars/serialize/pbserializer.pyx", line 812, in mars.serialize.pbserializer.ProtobufSerializeProvider._get_value
return self._get_untyped_value(obj, callbacks, weak_ref)
File "mars/serialize/pbserializer.pyx", line 802, in mars.serialize.pbserializer.ProtobufSerializeProvider._get_untyped_value
return ref(self._get_function(obj))
File "mars/serialize/pbserializer.pyx", line 223, in mars.serialize.pbserializer.ProtobufSerializeProvider._get_function
return cloudpickle.loads(x) if x is not None and len(x) > 0 else None
File "/Users/qinxuye/Workspace/mars/mars/remote/core.py", line 46, in __setstate__
chunk = ci[0]().new_chunk(None, _key=ci[1], _id=ci[2], kws=[ci[3]])
File "/Users/qinxuye/Workspace/mars/mars/operands.py", line 347, in new_chunk
return self.new_chunks(inputs, kws=kws, **kw)[0]
File "/Users/qinxuye/Workspace/mars/mars/operands.py", line 341, in new_chunks
return self._new_chunks(inputs, kws=kws, **kwargs)
File "/Users/qinxuye/Workspace/mars/mars/dataframe/fetch/core.py", line 39, in _new_chunks
return super()._new_chunks(inputs, kws=kws, **kw)
File "/Users/qinxuye/Workspace/mars/mars/operands.py", line 315, in _new_chunks
chunk = self._create_chunk(j, index, **create_chunk_kw)
File "/Users/qinxuye/Workspace/mars/mars/operands.py", line 283, in _create_chunk
raise ValueError('output_type should be specified')
ValueError: output_type should be specified
|
ValueError
|
def spawn(func, args=(), kwargs=None, retry_when_fail=False, n_output=None):
"""
Spawn a function and return a Mars Object which can be executed later.
Parameters
----------
func : function
Function to spawn.
args: tuple
Args to pass to function
kwargs: dict
Kwargs to pass to function
retry_when_fail: bool, default False
If True, retry when function failed.
n_output: int
Count of outputs for the function
Returns
-------
Object
Mars Object.
Examples
--------
>>> import mars.remote as mr
>>> def inc(x):
>>> return x + 1
>>>
>>> result = mr.spawn(inc, args=(0,))
>>> result
Object <op=RemoteFunction, key=e0b31261d70dd9b1e00da469666d72d9>
>>> result.execute().fetch()
1
List of spawned functions can be converted to :class:`mars.remote.ExecutableTuple`,
and `.execute()` can be called to run together.
>>> results = [mr.spawn(inc, args=(i,)) for i in range(10)]
>>> mr.ExecutableTuple(results).execute().fetch()
[1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
Mars Object returned by :meth:`mars.remote.spawn` can be treated
as arguments for other spawn functions.
>>> results = [mr.spawn(inc, args=(i,)) for i in range(10)] # list of spawned functions
>>> def sum_all(xs):
return sum(xs)
>>> mr.spawn(sum_all, args=(results,)).execute().fetch()
55
inside a spawned function, new functions can be spawned.
>>> def driver():
>>> results = [mr.spawn(inc, args=(i,)) for i in range(10)]
>>> return mr.ExecutableTuple(results).execute().fetch()
>>>
>>> mr.spawn(driver).execute().fetch()
[1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
Mars tensor, DataFrame and so forth is available in spawned functions as well.
>>> import mars.tensor as mt
>>> def driver2():
>>> t = mt.random.rand(10, 10)
>>> return t.sum().to_numpy()
>>>
>>> mr.spawn(driver2).execute().fetch()
52.47844223908132
Argument of `n_output` can indicate that the spawned function will return multiple outputs.
This is important when some of the outputs may be passed to different functions.
>>> def triage(alist):
>>> ret = [], []
>>> for i in alist:
>>> if i < 0.5:
>>> ret[0].append(i)
>>> else:
>>> ret[1].append(i)
>>> return ret
>>>
>>> def sum_all(xs):
>>> return sum(xs)
>>>
>>> l = [0.4, 0.7, 0.2, 0.8]
>>> la, lb = mr.spawn(triage, args=(l,), n_output=2)
>>>
>>> sa = mr.spawn(sum_all, args=(la,))
>>> sb = mr.spawn(sum_all, args=(lb,))
>>> mr.ExecutableTuple([sa, sb]).execute().fetch()
>>> [0.6000000000000001, 1.5]
"""
if not isinstance(args, tuple):
args = [args]
else:
args = list(args)
if kwargs is None:
kwargs = dict()
if not isinstance(kwargs, dict):
raise TypeError("kwargs has to be a dict")
op = RemoteFunction(
function=func,
function_args=args,
function_kwargs=kwargs,
retry_when_fail=retry_when_fail,
n_output=n_output,
)
return op()
|
def spawn(func, args=(), kwargs=None, retry_when_fail=True, n_output=None):
"""
Spawn a function and return a Mars Object which can be executed later.
Parameters
----------
func : function
Function to spawn.
args: tuple
Args to pass to function
kwargs: dict
Kwargs to pass to function
retry_when_fail: bool, default True
If True, retry when function failed.
n_output: int
Count of outputs for the function
Returns
-------
Object
Mars Object.
Examples
--------
>>> import mars.remote as mr
>>> def inc(x):
>>> return x + 1
>>>
>>> result = mr.spawn(inc, args=(0,))
>>> result
Object <op=RemoteFunction, key=e0b31261d70dd9b1e00da469666d72d9>
>>> result.execute().fetch()
1
List of spawned functions can be converted to :class:`mars.remote.ExecutableTuple`,
and `.execute()` can be called to run together.
>>> results = [mr.spawn(inc, args=(i,)) for i in range(10)]
>>> mr.ExecutableTuple(results).execute().fetch()
[1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
Mars Object returned by :meth:`mars.remote.spawn` can be treated
as arguments for other spawn functions.
>>> results = [mr.spawn(inc, args=(i,)) for i in range(10)] # list of spawned functions
>>> def sum_all(xs):
return sum(xs)
>>> mr.spawn(sum_all, args=(results,)).execute().fetch()
55
inside a spawned function, new functions can be spawned.
>>> def driver():
>>> results = [mr.spawn(inc, args=(i,)) for i in range(10)]
>>> return mr.ExecutableTuple(results).execute().fetch()
>>>
>>> mr.spawn(driver).execute().fetch()
[1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
Mars tensor, DataFrame and so forth is available in spawned functions as well.
>>> import mars.tensor as mt
>>> def driver2():
>>> t = mt.random.rand(10, 10)
>>> return t.sum().to_numpy()
>>>
>>> mr.spawn(driver2).execute().fetch()
52.47844223908132
Argument of `n_output` can indicate that the spawned function will return multiple outputs.
This is important when some of the outputs may be passed to different functions.
>>> def triage(alist):
>>> ret = [], []
>>> for i in alist:
>>> if i < 0.5:
>>> ret[0].append(i)
>>> else:
>>> ret[1].append(i)
>>> return ret
>>>
>>> def sum_all(xs):
>>> return sum(xs)
>>>
>>> l = [0.4, 0.7, 0.2, 0.8]
>>> la, lb = mr.spawn(triage, args=(l,), n_output=2)
>>>
>>> sa = mr.spawn(sum_all, args=(la,))
>>> sb = mr.spawn(sum_all, args=(lb,))
>>> mr.ExecutableTuple([sa, sb]).execute().fetch()
>>> [0.6000000000000001, 1.5]
"""
if not isinstance(args, tuple):
args = [args]
else:
args = list(args)
if kwargs is None:
kwargs = dict()
if not isinstance(kwargs, dict):
raise TypeError("kwargs has to be a dict")
op = RemoteFunction(
function=func,
function_args=args,
function_kwargs=kwargs,
retry_when_fail=retry_when_fail,
n_output=n_output,
)
return op()
|
https://github.com/mars-project/mars/issues/1393
|
In [1]: import mars.dataframe as md
In [2]: from mars.deploy.local import new_cluster
In [3]: cluster = new_cluster()
WARNING: Logging before InitGoogleLogging() is written to STDERR
I0710 12:01:39.413233 286952896 store.cc:1149] Allowing the Plasma store to use up to 3.43597GB of memory.
I0710 12:01:39.414255 286952896 store.cc:1176] Starting object store with directory /tmp and huge page support disabled
In [4]: import mars.tensor as mt
In [5]: df = md.DataFrame(mt.random.rand(10, 3))
In [6]: df.execute()
Out[6]:
0 1 2
0 0.212577 0.758511 0.148990
1 0.525289 0.382298 0.331657
2 0.821829 0.991404 0.504818
3 0.910740 0.971152 0.915968
4 0.540863 0.289341 0.546004
5 0.869099 0.257637 0.282307
6 0.738262 0.636345 0.717278
7 0.064604 0.481792 0.356584
8 0.598765 0.156633 0.140831
9 0.873232 0.527147 0.247436
In [7]: def f(in_df):
...: return in_df.sum().to_pandas()
...:
In [8]: import mars.remote as mr
In [9]: mr.spawn(f, args=(df,)).execute()
Unexpected exception occurred in ExecutionActor.execute_graph. graph_key='6ca5f502ce0c9fbccecc434fde3fbe75'
Traceback (most recent call last):
File "/Users/qinxuye/Workspace/mars/mars/utils.py", line 353, in _wrapped
return func(*args, **kwargs)
File "/Users/qinxuye/Workspace/mars/mars/worker/execution.py", line 481, in execute_graph
no_prepare_chunk_keys=io_meta.get('no_prepare_chunk_keys') or set(),
File "/Users/qinxuye/Workspace/mars/mars/worker/execution.py", line 57, in __init__
graph = self.graph = deserialize_graph(graph_serialized)
File "/Users/qinxuye/Workspace/mars/mars/utils.py", line 283, in deserialize_graph
return graph_cls.from_pb(g)
File "mars/graph.pyx", line 426, in mars.graph.DirectedGraph.from_pb
File "mars/serialize/core.pyx", line 683, in mars.serialize.core.Serializable.from_pb
File "mars/serialize/core.pyx", line 667, in mars.serialize.core.Serializable.deserialize
File "mars/serialize/core.pyx", line 810, in mars.serialize.core.Provider.deserialize_model
File "mars/serialize/core.pyx", line 157, in mars.serialize.core.Field.deserialize
File "mars/serialize/pbserializer.pyx", line 874, in mars.serialize.pbserializer.ProtobufSerializeProvider.deserialize_field
File "mars/serialize/pbserializer.pyx", line 839, in mars.serialize.pbserializer.ProtobufSerializeProvider._deserial_reference_value
File "mars/serialize/core.pyx", line 731, in mars.serialize.core.AttributeAsDict.deserialize
File "mars/serialize/pbserializer.pyx", line 972, in mars.serialize.pbserializer.ProtobufSerializeProvider.deserialize_attribute_as_dict
File "mars/serialize/core.pyx", line 157, in mars.serialize.core.Field.deserialize
File "mars/serialize/pbserializer.pyx", line 878, in mars.serialize.pbserializer.ProtobufSerializeProvider.deserialize_field
File "mars/serialize/pbserializer.pyx", line 293, in mars.serialize.pbserializer.ProtobufSerializeProvider._get_list
File "mars/serialize/pbserializer.pyx", line 812, in mars.serialize.pbserializer.ProtobufSerializeProvider._get_value
File "mars/serialize/pbserializer.pyx", line 802, in mars.serialize.pbserializer.ProtobufSerializeProvider._get_untyped_value
File "mars/serialize/pbserializer.pyx", line 223, in mars.serialize.pbserializer.ProtobufSerializeProvider._get_function
File "/Users/qinxuye/Workspace/mars/mars/remote/core.py", line 46, in __setstate__
chunk = ci[0]().new_chunk(None, _key=ci[1], _id=ci[2], kws=[ci[3]])
File "/Users/qinxuye/Workspace/mars/mars/operands.py", line 347, in new_chunk
return self.new_chunks(inputs, kws=kws, **kw)[0]
File "/Users/qinxuye/Workspace/mars/mars/operands.py", line 341, in new_chunks
return self._new_chunks(inputs, kws=kws, **kwargs)
File "/Users/qinxuye/Workspace/mars/mars/dataframe/fetch/core.py", line 39, in _new_chunks
return super()._new_chunks(inputs, kws=kws, **kw)
File "/Users/qinxuye/Workspace/mars/mars/operands.py", line 315, in _new_chunks
chunk = self._create_chunk(j, index, **create_chunk_kw)
File "/Users/qinxuye/Workspace/mars/mars/operands.py", line 283, in _create_chunk
raise ValueError('output_type should be specified')
ValueError: output_type should be specified
Unhandled exception in promise call
Traceback (most recent call last):
File "/Users/qinxuye/Workspace/mars/mars/promise.py", line 372, in _wrapped
return func(*args, **kwargs)
File "/Users/qinxuye/Workspace/mars/mars/utils.py", line 353, in _wrapped
return func(*args, **kwargs)
File "/Users/qinxuye/Workspace/mars/mars/worker/execution.py", line 481, in execute_graph
no_prepare_chunk_keys=io_meta.get('no_prepare_chunk_keys') or set(),
File "/Users/qinxuye/Workspace/mars/mars/worker/execution.py", line 57, in __init__
graph = self.graph = deserialize_graph(graph_serialized)
File "/Users/qinxuye/Workspace/mars/mars/utils.py", line 283, in deserialize_graph
return graph_cls.from_pb(g)
File "mars/graph.pyx", line 426, in mars.graph.DirectedGraph.from_pb
File "mars/serialize/core.pyx", line 683, in mars.serialize.core.Serializable.from_pb
File "mars/serialize/core.pyx", line 667, in mars.serialize.core.Serializable.deserialize
File "mars/serialize/core.pyx", line 810, in mars.serialize.core.Provider.deserialize_model
File "mars/serialize/core.pyx", line 157, in mars.serialize.core.Field.deserialize
File "mars/serialize/pbserializer.pyx", line 874, in mars.serialize.pbserializer.ProtobufSerializeProvider.deserialize_field
File "mars/serialize/pbserializer.pyx", line 839, in mars.serialize.pbserializer.ProtobufSerializeProvider._deserial_reference_value
File "mars/serialize/core.pyx", line 731, in mars.serialize.core.AttributeAsDict.deserialize
File "mars/serialize/pbserializer.pyx", line 972, in mars.serialize.pbserializer.ProtobufSerializeProvider.deserialize_attribute_as_dict
File "mars/serialize/core.pyx", line 157, in mars.serialize.core.Field.deserialize
File "mars/serialize/pbserializer.pyx", line 878, in mars.serialize.pbserializer.ProtobufSerializeProvider.deserialize_field
File "mars/serialize/pbserializer.pyx", line 293, in mars.serialize.pbserializer.ProtobufSerializeProvider._get_list
File "mars/serialize/pbserializer.pyx", line 812, in mars.serialize.pbserializer.ProtobufSerializeProvider._get_value
File "mars/serialize/pbserializer.pyx", line 802, in mars.serialize.pbserializer.ProtobufSerializeProvider._get_untyped_value
File "mars/serialize/pbserializer.pyx", line 223, in mars.serialize.pbserializer.ProtobufSerializeProvider._get_function
File "/Users/qinxuye/Workspace/mars/mars/remote/core.py", line 46, in __setstate__
chunk = ci[0]().new_chunk(None, _key=ci[1], _id=ci[2], kws=[ci[3]])
File "/Users/qinxuye/Workspace/mars/mars/operands.py", line 347, in new_chunk
return self.new_chunks(inputs, kws=kws, **kw)[0]
File "/Users/qinxuye/Workspace/mars/mars/operands.py", line 341, in new_chunks
return self._new_chunks(inputs, kws=kws, **kwargs)
File "/Users/qinxuye/Workspace/mars/mars/dataframe/fetch/core.py", line 39, in _new_chunks
return super()._new_chunks(inputs, kws=kws, **kw)
File "/Users/qinxuye/Workspace/mars/mars/operands.py", line 315, in _new_chunks
chunk = self._create_chunk(j, index, **create_chunk_kw)
File "/Users/qinxuye/Workspace/mars/mars/operands.py", line 283, in _create_chunk
raise ValueError('output_type should be specified')
ValueError: output_type should be specified
Attempt 1: Unexpected error ValueError occurred in executing operand 6ca5f502ce0c9fbccecc434fde3fbe75 in 0.0.0.0:40516
Traceback (most recent call last):
File "/Users/qinxuye/Workspace/mars/mars/promise.py", line 372, in _wrapped
return func(*args, **kwargs)
File "/Users/qinxuye/Workspace/mars/mars/utils.py", line 353, in _wrapped
return func(*args, **kwargs)
File "/Users/qinxuye/Workspace/mars/mars/worker/execution.py", line 481, in execute_graph
no_prepare_chunk_keys=io_meta.get('no_prepare_chunk_keys') or set(),
File "/Users/qinxuye/Workspace/mars/mars/worker/execution.py", line 57, in __init__
graph = self.graph = deserialize_graph(graph_serialized)
File "/Users/qinxuye/Workspace/mars/mars/utils.py", line 283, in deserialize_graph
return graph_cls.from_pb(g)
File "mars/graph.pyx", line 426, in mars.graph.DirectedGraph.from_pb
return cls.deserialize(SerializableGraph.from_pb(pb_obj))
File "mars/serialize/core.pyx", line 683, in mars.serialize.core.Serializable.from_pb
return cls.deserialize(provider, obj)
File "mars/serialize/core.pyx", line 667, in mars.serialize.core.Serializable.deserialize
obj = provider.deserialize_model(cls, obj, callbacks, key_to_instance)
File "mars/serialize/core.pyx", line 810, in mars.serialize.core.Provider.deserialize_model
field.deserialize(self, model_instance, obj, callbacks, key_to_instance)
File "mars/serialize/core.pyx", line 157, in mars.serialize.core.Field.deserialize
return provider.deserialize_field(self, model_instance, obj, callbacks, key_to_instance)
File "mars/serialize/pbserializer.pyx", line 874, in mars.serialize.pbserializer.ProtobufSerializeProvider.deserialize_field
value = [self._deserial_reference_value(
File "mars/serialize/pbserializer.pyx", line 839, in mars.serialize.pbserializer.ProtobufSerializeProvider._deserial_reference_value
return model.deserialize(self, f_obj, callbacks, key_to_instance)
File "mars/serialize/core.pyx", line 731, in mars.serialize.core.AttributeAsDict.deserialize
obj = provider.deserialize_attribute_as_dict(
File "mars/serialize/pbserializer.pyx", line 972, in mars.serialize.pbserializer.ProtobufSerializeProvider.deserialize_attribute_as_dict
it_field.deserialize(self, model_instance, AttrWrapper(d_obj),
File "mars/serialize/core.pyx", line 157, in mars.serialize.core.Field.deserialize
return provider.deserialize_field(self, model_instance, obj, callbacks, key_to_instance)
File "mars/serialize/pbserializer.pyx", line 878, in mars.serialize.pbserializer.ProtobufSerializeProvider.deserialize_field
value = self._get_list(field_obj, field.type, callbacks, field.weak_ref)
File "mars/serialize/pbserializer.pyx", line 293, in mars.serialize.pbserializer.ProtobufSerializeProvider._get_list
val = self._get_value(it_obj, tp.type if tp is not None else tp,
File "mars/serialize/pbserializer.pyx", line 812, in mars.serialize.pbserializer.ProtobufSerializeProvider._get_value
return self._get_untyped_value(obj, callbacks, weak_ref)
File "mars/serialize/pbserializer.pyx", line 802, in mars.serialize.pbserializer.ProtobufSerializeProvider._get_untyped_value
return ref(self._get_function(obj))
File "mars/serialize/pbserializer.pyx", line 223, in mars.serialize.pbserializer.ProtobufSerializeProvider._get_function
return cloudpickle.loads(x) if x is not None and len(x) > 0 else None
File "/Users/qinxuye/Workspace/mars/mars/remote/core.py", line 46, in __setstate__
chunk = ci[0]().new_chunk(None, _key=ci[1], _id=ci[2], kws=[ci[3]])
File "/Users/qinxuye/Workspace/mars/mars/operands.py", line 347, in new_chunk
return self.new_chunks(inputs, kws=kws, **kw)[0]
File "/Users/qinxuye/Workspace/mars/mars/operands.py", line 341, in new_chunks
return self._new_chunks(inputs, kws=kws, **kwargs)
File "/Users/qinxuye/Workspace/mars/mars/dataframe/fetch/core.py", line 39, in _new_chunks
return super()._new_chunks(inputs, kws=kws, **kw)
File "/Users/qinxuye/Workspace/mars/mars/operands.py", line 315, in _new_chunks
chunk = self._create_chunk(j, index, **create_chunk_kw)
File "/Users/qinxuye/Workspace/mars/mars/operands.py", line 283, in _create_chunk
raise ValueError('output_type should be specified')
ValueError: output_type should be specified
|
ValueError
|
def _collect_info(self, engine_or_conn, selectable, columns, test_rows):
from sqlalchemy import sql
# fetch test DataFrame
if columns:
query = sql.select([sql.column(c) for c in columns], from_obj=selectable).limit(
test_rows
)
else:
query = sql.select("*", from_obj=selectable).limit(test_rows)
test_df = pd.read_sql(
query,
engine_or_conn,
index_col=self._index_col,
coerce_float=self._coerce_float,
parse_dates=self._parse_dates,
)
if len(test_df) == 0:
self._row_memory_usage = None
else:
self._row_memory_usage = test_df.memory_usage(
deep=True, index=True
).sum() / len(test_df)
if self._method == "offset":
# fetch size
size = list(
engine_or_conn.execute(
sql.select([sql.func.count()]).select_from(selectable)
)
)[0][0]
shape = (size, test_df.shape[1])
else:
shape = (np.nan, test_df.shape[1])
return test_df, shape
|
def _collect_info(self, engine_or_conn, selectable, columns, test_rows):
from sqlalchemy import sql
# fetch test DataFrame
if columns:
query = sql.select([sql.column(c) for c in columns], from_obj=selectable).limit(
test_rows
)
else:
query = sql.select("*", from_obj=selectable).limit(test_rows)
test_df = pd.read_sql(
query,
engine_or_conn,
index_col=self._index_col,
coerce_float=self._coerce_float,
parse_dates=self._parse_dates,
)
self._row_memory_usage = (
test_df.memory_usage(deep=True, index=True).sum() / test_rows
)
if self._method == "offset":
# fetch size
size = list(
engine_or_conn.execute(
sql.select([sql.func.count()]).select_from(selectable)
)
)[0][0]
shape = (size, test_df.shape[1])
else:
shape = (np.nan, test_df.shape[1])
return test_df, shape
|
https://github.com/mars-project/mars/issues/1368
|
In [1]: import mars.dataframe as md
In [7]: import sqlalchemy as sa
In [9]: con = sa.create_engine('sqlite:///database.sqlite', echo=False)
In [10]: df = md.read_sql('loan', con)
In [11]: df.head().execute()
Out[11]:
id member_id loan_amnt funded_amnt funded_amnt_inv term int_rate installment grade sub_grade ... hardship_payoff_balance_amount hardship_last_payment_amount disbursement_method debt_settlement_flag debt_settlement_flag_date settlement_status settlement_date settlement_amount settlement_percentage settlement_term
0 1000 1000 0 36 months 10.71 32.61 B B5 ... Cash N
1 1000 1000 0 36 months 16.08 35.2 F F2 ... Cash N
2 1000 1000 0 36 months 9.45 32.01 B B1 ... Cash N
3 1000 1000 0 36 months 9.64 32.11 B B4 ... Cash N
4 1000 1000 0.004353680261 36 months 11.28 32.88 C C1 ... Cash N
[5 rows x 145 columns]
In [12]: df = md.read_sql("select * from loan where grade='A' and disbursement_method='cash'", con)
In [13]: df.head().execute()
/Users/xuyeqin/Workspace/mars/mars/dataframe/datasource/read_sql.py:277: RuntimeWarning: divide by zero encountered in double_scalars
chunk_size = (int(options.chunk_store_limit / op.row_memory_usage), df.shape[1])
---------------------------------------------------------------------------
OverflowError Traceback (most recent call last)
<ipython-input-13-017f342e9a9d> in <module>
----> 1 df.head().execute()
~/Workspace/mars/mars/core.py in execute(self, session, **kw)
560
561 def execute(self, session=None, **kw):
--> 562 self._data.execute(session, **kw)
563 return self
564
~/Workspace/mars/mars/core.py in execute(self, session, **kw)
369
370 # no more fetch, thus just fire run
--> 371 session.run(self, **kw)
372 # return Tileable or ExecutableTuple itself
373 return self
~/Workspace/mars/mars/session.py in run(self, *tileables, **kw)
426 tileables = tuple(mt.tensor(t) if not isinstance(t, (Entity, Base)) else t
427 for t in tileables)
--> 428 result = self._sess.run(*tileables, **kw)
429
430 for t in tileables:
~/Workspace/mars/mars/session.py in run(self, *tileables, **kw)
103 # set number of running cores
104 self.context.set_ncores(kw['n_parallel'])
--> 105 res = self._executor.execute_tileables(tileables, **kw)
106 return res
107
~/Workspace/mars/mars/utils.py in _wrapped(*args, **kwargs)
387 _kernel_mode.eager = False
388 _kernel_mode.eager_count = enter_eager_count + 1
--> 389 return func(*args, **kwargs)
390 finally:
391 _kernel_mode.eager_count -= 1
~/Workspace/mars/mars/utils.py in inner(*args, **kwargs)
481 def inner(*args, **kwargs):
482 with build_mode():
--> 483 return func(*args, **kwargs)
484 return inner
485
~/Workspace/mars/mars/executor.py in execute_tileables(self, tileables, fetch, n_parallel, n_thread, print_progress, mock, compose, name)
840 # build chunk graph, tile will be done during building
841 chunk_graph = chunk_graph_builder.build(
--> 842 tileables, tileable_graph=tileable_graph)
843 tileable_graph = chunk_graph_builder.prev_tileable_graph
844 temp_result_keys = set(result_keys)
~/Workspace/mars/mars/utils.py in _wrapped(*args, **kwargs)
387 _kernel_mode.eager = False
388 _kernel_mode.eager_count = enter_eager_count + 1
--> 389 return func(*args, **kwargs)
390 finally:
391 _kernel_mode.eager_count -= 1
~/Workspace/mars/mars/utils.py in inner(*args, **kwargs)
481 def inner(*args, **kwargs):
482 with build_mode():
--> 483 return func(*args, **kwargs)
484 return inner
485
~/Workspace/mars/mars/tiles.py in build(self, tileables, tileable_graph)
348
349 chunk_graph = super().build(
--> 350 tileables, tileable_graph=tileable_graph)
351 self._iterative_chunk_graphs.append(chunk_graph)
352 if len(self._interrupted_ops) == 0:
~/Workspace/mars/mars/utils.py in _wrapped(*args, **kwargs)
387 _kernel_mode.eager = False
388 _kernel_mode.eager_count = enter_eager_count + 1
--> 389 return func(*args, **kwargs)
390 finally:
391 _kernel_mode.eager_count -= 1
~/Workspace/mars/mars/utils.py in inner(*args, **kwargs)
481 def inner(*args, **kwargs):
482 with build_mode():
--> 483 return func(*args, **kwargs)
484 return inner
485
~/Workspace/mars/mars/tiles.py in build(self, tileables, tileable_graph)
261 # for further execution
262 partial_tiled_chunks = \
--> 263 self._on_tile_failure(tileable_data.op, exc_info)
264 if partial_tiled_chunks is not None and \
265 len(partial_tiled_chunks) > 0:
~/Workspace/mars/mars/tiles.py in inner(op, exc_info)
300 on_tile_failure(op, exc_info)
301 else:
--> 302 raise exc_info[1].with_traceback(exc_info[2]) from None
303 return inner
304
~/Workspace/mars/mars/tiles.py in build(self, tileables, tileable_graph)
241 continue
242 try:
--> 243 tiled = self._tile(tileable_data, tileable_graph)
244 tiled_op.add(tileable_data.op)
245 for t, td in zip(tileable_data.op.outputs, tiled):
~/Workspace/mars/mars/tiles.py in _tile(self, tileable_data, tileable_graph)
336 if any(inp.op in self._interrupted_ops for inp in tileable_data.inputs):
337 raise TilesError('Tile fail due to failure of inputs')
--> 338 return super()._tile(tileable_data, tileable_graph)
339
340 @kernel_mode
~/Workspace/mars/mars/tiles.py in _tile(self, tileable_data, tileable_graph)
199 t._nsplits = o.nsplits
200 elif on_tile is None:
--> 201 tds[0]._inplace_tile()
202 else:
203 tds = on_tile(tileable_data.op.outputs, tds)
~/Workspace/mars/mars/core.py in _inplace_tile(self)
161
162 def _inplace_tile(self):
--> 163 return handler.inplace_tile(self)
164
165 def __getattr__(self, attr):
~/Workspace/mars/mars/tiles.py in inplace_tile(self, to_tile)
134 if not to_tile.is_coarse():
135 return to_tile
--> 136 dispatched = self.dispatch(to_tile.op)
137 self._assign_to([d.data for d in dispatched], to_tile.op.outputs)
138 return to_tile
~/Workspace/mars/mars/utils.py in _wrapped(*args, **kwargs)
387 _kernel_mode.eager = False
388 _kernel_mode.eager_count = enter_eager_count + 1
--> 389 return func(*args, **kwargs)
390 finally:
391 _kernel_mode.eager_count -= 1
~/Workspace/mars/mars/tiles.py in dispatch(self, op)
117 else:
118 try:
--> 119 tiled = op_cls.tile(op)
120 except NotImplementedError as ex:
121 cause = ex
~/Workspace/mars/mars/dataframe/datasource/read_sql.py in tile(cls, op)
370 def tile(cls, op: 'DataFrameReadSQL'):
371 if op.method == 'offset':
--> 372 return cls._tile_offset(op)
373 else:
374 return cls._tile_partition(op)
~/Workspace/mars/mars/dataframe/datasource/read_sql.py in _tile_offset(cls, op)
275 chunk_size = df.extra_params.raw_chunk_size or options.chunk_size
276 if chunk_size is None:
--> 277 chunk_size = (int(options.chunk_store_limit / op.row_memory_usage), df.shape[1])
278 row_chunk_sizes = normalize_chunk_sizes(df.shape, chunk_size)[0]
279 offsets = np.cumsum((0,) + row_chunk_sizes)
OverflowError: cannot convert float infinity to integer
|
OverflowError
|
def _tile_offset(cls, op: "DataFrameReadSQL"):
df = op.outputs[0]
if op.row_memory_usage is not None:
# Data selected
chunk_size = df.extra_params.raw_chunk_size or options.chunk_size
if chunk_size is None:
chunk_size = (
int(options.chunk_store_limit / op.row_memory_usage),
df.shape[1],
)
row_chunk_sizes = normalize_chunk_sizes(df.shape, chunk_size)[0]
else:
# No data selected
row_chunk_sizes = (0,)
offsets = np.cumsum((0,) + row_chunk_sizes)
out_chunks = []
for i, row_size in enumerate(row_chunk_sizes):
chunk_op = op.copy().reset_key()
chunk_op._row_memory_usage = None # no need for chunk
offset = chunk_op._offset = offsets[i]
if df.index_value.has_value():
# range index
index_value = parse_index(
df.index_value.to_pandas()[offset : offsets[i + 1]]
)
else:
index_value = parse_index(
df.index_value.to_pandas(),
op.table_or_sql or str(op.selectable),
op.con,
i,
row_size,
)
out_chunk = chunk_op.new_chunk(
None,
shape=(row_size, df.shape[1]),
columns_value=df.columns_value,
index_value=index_value,
dtypes=df.dtypes,
index=(i, 0),
)
out_chunks.append(out_chunk)
nsplits = (row_chunk_sizes, (df.shape[1],))
new_op = op.copy()
return new_op.new_dataframes(None, chunks=out_chunks, nsplits=nsplits, **df.params)
|
def _tile_offset(cls, op: "DataFrameReadSQL"):
df = op.outputs[0]
chunk_size = df.extra_params.raw_chunk_size or options.chunk_size
if chunk_size is None:
chunk_size = (int(options.chunk_store_limit / op.row_memory_usage), df.shape[1])
row_chunk_sizes = normalize_chunk_sizes(df.shape, chunk_size)[0]
offsets = np.cumsum((0,) + row_chunk_sizes)
out_chunks = []
for i, row_size in enumerate(row_chunk_sizes):
chunk_op = op.copy().reset_key()
chunk_op._row_memory_usage = None # no need for chunk
offset = chunk_op._offset = offsets[i]
if df.index_value.has_value():
# range index
index_value = parse_index(
df.index_value.to_pandas()[offset : offsets[i + 1]]
)
else:
index_value = parse_index(
df.index_value.to_pandas(),
op.table_or_sql or str(op.selectable),
op.con,
i,
row_size,
)
out_chunk = chunk_op.new_chunk(
None,
shape=(row_size, df.shape[1]),
columns_value=df.columns_value,
index_value=index_value,
dtypes=df.dtypes,
index=(i, 0),
)
out_chunks.append(out_chunk)
nsplits = (row_chunk_sizes, (df.shape[1],))
new_op = op.copy()
return new_op.new_dataframes(None, chunks=out_chunks, nsplits=nsplits, **df.params)
|
https://github.com/mars-project/mars/issues/1368
|
In [1]: import mars.dataframe as md
In [7]: import sqlalchemy as sa
In [9]: con = sa.create_engine('sqlite:///database.sqlite', echo=False)
In [10]: df = md.read_sql('loan', con)
In [11]: df.head().execute()
Out[11]:
id member_id loan_amnt funded_amnt funded_amnt_inv term int_rate installment grade sub_grade ... hardship_payoff_balance_amount hardship_last_payment_amount disbursement_method debt_settlement_flag debt_settlement_flag_date settlement_status settlement_date settlement_amount settlement_percentage settlement_term
0 1000 1000 0 36 months 10.71 32.61 B B5 ... Cash N
1 1000 1000 0 36 months 16.08 35.2 F F2 ... Cash N
2 1000 1000 0 36 months 9.45 32.01 B B1 ... Cash N
3 1000 1000 0 36 months 9.64 32.11 B B4 ... Cash N
4 1000 1000 0.004353680261 36 months 11.28 32.88 C C1 ... Cash N
[5 rows x 145 columns]
In [12]: df = md.read_sql("select * from loan where grade='A' and disbursement_method='cash'", con)
In [13]: df.head().execute()
/Users/xuyeqin/Workspace/mars/mars/dataframe/datasource/read_sql.py:277: RuntimeWarning: divide by zero encountered in double_scalars
chunk_size = (int(options.chunk_store_limit / op.row_memory_usage), df.shape[1])
---------------------------------------------------------------------------
OverflowError Traceback (most recent call last)
<ipython-input-13-017f342e9a9d> in <module>
----> 1 df.head().execute()
~/Workspace/mars/mars/core.py in execute(self, session, **kw)
560
561 def execute(self, session=None, **kw):
--> 562 self._data.execute(session, **kw)
563 return self
564
~/Workspace/mars/mars/core.py in execute(self, session, **kw)
369
370 # no more fetch, thus just fire run
--> 371 session.run(self, **kw)
372 # return Tileable or ExecutableTuple itself
373 return self
~/Workspace/mars/mars/session.py in run(self, *tileables, **kw)
426 tileables = tuple(mt.tensor(t) if not isinstance(t, (Entity, Base)) else t
427 for t in tileables)
--> 428 result = self._sess.run(*tileables, **kw)
429
430 for t in tileables:
~/Workspace/mars/mars/session.py in run(self, *tileables, **kw)
103 # set number of running cores
104 self.context.set_ncores(kw['n_parallel'])
--> 105 res = self._executor.execute_tileables(tileables, **kw)
106 return res
107
~/Workspace/mars/mars/utils.py in _wrapped(*args, **kwargs)
387 _kernel_mode.eager = False
388 _kernel_mode.eager_count = enter_eager_count + 1
--> 389 return func(*args, **kwargs)
390 finally:
391 _kernel_mode.eager_count -= 1
~/Workspace/mars/mars/utils.py in inner(*args, **kwargs)
481 def inner(*args, **kwargs):
482 with build_mode():
--> 483 return func(*args, **kwargs)
484 return inner
485
~/Workspace/mars/mars/executor.py in execute_tileables(self, tileables, fetch, n_parallel, n_thread, print_progress, mock, compose, name)
840 # build chunk graph, tile will be done during building
841 chunk_graph = chunk_graph_builder.build(
--> 842 tileables, tileable_graph=tileable_graph)
843 tileable_graph = chunk_graph_builder.prev_tileable_graph
844 temp_result_keys = set(result_keys)
~/Workspace/mars/mars/utils.py in _wrapped(*args, **kwargs)
387 _kernel_mode.eager = False
388 _kernel_mode.eager_count = enter_eager_count + 1
--> 389 return func(*args, **kwargs)
390 finally:
391 _kernel_mode.eager_count -= 1
~/Workspace/mars/mars/utils.py in inner(*args, **kwargs)
481 def inner(*args, **kwargs):
482 with build_mode():
--> 483 return func(*args, **kwargs)
484 return inner
485
~/Workspace/mars/mars/tiles.py in build(self, tileables, tileable_graph)
348
349 chunk_graph = super().build(
--> 350 tileables, tileable_graph=tileable_graph)
351 self._iterative_chunk_graphs.append(chunk_graph)
352 if len(self._interrupted_ops) == 0:
~/Workspace/mars/mars/utils.py in _wrapped(*args, **kwargs)
387 _kernel_mode.eager = False
388 _kernel_mode.eager_count = enter_eager_count + 1
--> 389 return func(*args, **kwargs)
390 finally:
391 _kernel_mode.eager_count -= 1
~/Workspace/mars/mars/utils.py in inner(*args, **kwargs)
481 def inner(*args, **kwargs):
482 with build_mode():
--> 483 return func(*args, **kwargs)
484 return inner
485
~/Workspace/mars/mars/tiles.py in build(self, tileables, tileable_graph)
261 # for further execution
262 partial_tiled_chunks = \
--> 263 self._on_tile_failure(tileable_data.op, exc_info)
264 if partial_tiled_chunks is not None and \
265 len(partial_tiled_chunks) > 0:
~/Workspace/mars/mars/tiles.py in inner(op, exc_info)
300 on_tile_failure(op, exc_info)
301 else:
--> 302 raise exc_info[1].with_traceback(exc_info[2]) from None
303 return inner
304
~/Workspace/mars/mars/tiles.py in build(self, tileables, tileable_graph)
241 continue
242 try:
--> 243 tiled = self._tile(tileable_data, tileable_graph)
244 tiled_op.add(tileable_data.op)
245 for t, td in zip(tileable_data.op.outputs, tiled):
~/Workspace/mars/mars/tiles.py in _tile(self, tileable_data, tileable_graph)
336 if any(inp.op in self._interrupted_ops for inp in tileable_data.inputs):
337 raise TilesError('Tile fail due to failure of inputs')
--> 338 return super()._tile(tileable_data, tileable_graph)
339
340 @kernel_mode
~/Workspace/mars/mars/tiles.py in _tile(self, tileable_data, tileable_graph)
199 t._nsplits = o.nsplits
200 elif on_tile is None:
--> 201 tds[0]._inplace_tile()
202 else:
203 tds = on_tile(tileable_data.op.outputs, tds)
~/Workspace/mars/mars/core.py in _inplace_tile(self)
161
162 def _inplace_tile(self):
--> 163 return handler.inplace_tile(self)
164
165 def __getattr__(self, attr):
~/Workspace/mars/mars/tiles.py in inplace_tile(self, to_tile)
134 if not to_tile.is_coarse():
135 return to_tile
--> 136 dispatched = self.dispatch(to_tile.op)
137 self._assign_to([d.data for d in dispatched], to_tile.op.outputs)
138 return to_tile
~/Workspace/mars/mars/utils.py in _wrapped(*args, **kwargs)
387 _kernel_mode.eager = False
388 _kernel_mode.eager_count = enter_eager_count + 1
--> 389 return func(*args, **kwargs)
390 finally:
391 _kernel_mode.eager_count -= 1
~/Workspace/mars/mars/tiles.py in dispatch(self, op)
117 else:
118 try:
--> 119 tiled = op_cls.tile(op)
120 except NotImplementedError as ex:
121 cause = ex
~/Workspace/mars/mars/dataframe/datasource/read_sql.py in tile(cls, op)
370 def tile(cls, op: 'DataFrameReadSQL'):
371 if op.method == 'offset':
--> 372 return cls._tile_offset(op)
373 else:
374 return cls._tile_partition(op)
~/Workspace/mars/mars/dataframe/datasource/read_sql.py in _tile_offset(cls, op)
275 chunk_size = df.extra_params.raw_chunk_size or options.chunk_size
276 if chunk_size is None:
--> 277 chunk_size = (int(options.chunk_store_limit / op.row_memory_usage), df.shape[1])
278 row_chunk_sizes = normalize_chunk_sizes(df.shape, chunk_size)[0]
279 offsets = np.cumsum((0,) + row_chunk_sizes)
OverflowError: cannot convert float infinity to integer
|
OverflowError
|
def analyze_graph(self, **kwargs):
operand_infos = self._operand_infos
chunk_graph = self.get_chunk_graph()
# remove fetch chunk if exists
if any(isinstance(c.op, Fetch) for c in chunk_graph):
chunk_graph = chunk_graph.copy()
for c in list(chunk_graph):
if isinstance(c.op, Fetch):
chunk_graph.remove_node(c)
if len(chunk_graph) == 0:
return
for n in chunk_graph:
k = n.op.key
succ_size = chunk_graph.count_successors(n)
if k not in operand_infos:
operand_infos[k] = dict(
optimize=dict(
depth=0,
demand_depths=(),
successor_size=succ_size,
descendant_size=0,
)
)
else:
operand_infos[k]["optimize"]["successor_size"] = succ_size
worker_slots = self._get_worker_slots()
if not worker_slots:
raise RuntimeError("No worker attached for execution")
self._assigned_workers = set(worker_slots)
analyzer = GraphAnalyzer(chunk_graph, worker_slots)
for k, v in analyzer.calc_depths().items():
operand_infos[k]["optimize"]["depth"] = v
for k, v in analyzer.calc_descendant_sizes().items():
operand_infos[k]["optimize"]["descendant_size"] = v
if kwargs.get("do_placement", True):
logger.debug("Placing initial chunks for graph %s", self._graph_key)
self._assign_initial_workers(analyzer)
|
def analyze_graph(self, **kwargs):
operand_infos = self._operand_infos
chunk_graph = self.get_chunk_graph()
# remove fetch chunk if exists
if any(isinstance(c.op, Fetch) for c in chunk_graph):
chunk_graph = chunk_graph.copy()
for c in list(chunk_graph):
if isinstance(c.op, Fetch):
chunk_graph.remove_node(c)
if len(chunk_graph) == 0:
return
for n in chunk_graph:
k = n.op.key
succ_size = chunk_graph.count_successors(n)
if k not in operand_infos:
operand_infos[k] = dict(
optimize=dict(
depth=0,
demand_depths=(),
successor_size=succ_size,
descendant_size=0,
)
)
else:
operand_infos[k]["optimize"]["successor_size"] = succ_size
worker_slots = self._get_worker_slots()
self._assigned_workers = set(worker_slots)
analyzer = GraphAnalyzer(chunk_graph, worker_slots)
for k, v in analyzer.calc_depths().items():
operand_infos[k]["optimize"]["depth"] = v
for k, v in analyzer.calc_descendant_sizes().items():
operand_infos[k]["optimize"]["descendant_size"] = v
if kwargs.get("do_placement", True):
logger.debug("Placing initial chunks for graph %s", self._graph_key)
self._assign_initial_workers(analyzer)
|
https://github.com/mars-project/mars/issues/1368
|
In [1]: import mars.dataframe as md
In [7]: import sqlalchemy as sa
In [9]: con = sa.create_engine('sqlite:///database.sqlite', echo=False)
In [10]: df = md.read_sql('loan', con)
In [11]: df.head().execute()
Out[11]:
id member_id loan_amnt funded_amnt funded_amnt_inv term int_rate installment grade sub_grade ... hardship_payoff_balance_amount hardship_last_payment_amount disbursement_method debt_settlement_flag debt_settlement_flag_date settlement_status settlement_date settlement_amount settlement_percentage settlement_term
0 1000 1000 0 36 months 10.71 32.61 B B5 ... Cash N
1 1000 1000 0 36 months 16.08 35.2 F F2 ... Cash N
2 1000 1000 0 36 months 9.45 32.01 B B1 ... Cash N
3 1000 1000 0 36 months 9.64 32.11 B B4 ... Cash N
4 1000 1000 0.004353680261 36 months 11.28 32.88 C C1 ... Cash N
[5 rows x 145 columns]
In [12]: df = md.read_sql("select * from loan where grade='A' and disbursement_method='cash'", con)
In [13]: df.head().execute()
/Users/xuyeqin/Workspace/mars/mars/dataframe/datasource/read_sql.py:277: RuntimeWarning: divide by zero encountered in double_scalars
chunk_size = (int(options.chunk_store_limit / op.row_memory_usage), df.shape[1])
---------------------------------------------------------------------------
OverflowError Traceback (most recent call last)
<ipython-input-13-017f342e9a9d> in <module>
----> 1 df.head().execute()
~/Workspace/mars/mars/core.py in execute(self, session, **kw)
560
561 def execute(self, session=None, **kw):
--> 562 self._data.execute(session, **kw)
563 return self
564
~/Workspace/mars/mars/core.py in execute(self, session, **kw)
369
370 # no more fetch, thus just fire run
--> 371 session.run(self, **kw)
372 # return Tileable or ExecutableTuple itself
373 return self
~/Workspace/mars/mars/session.py in run(self, *tileables, **kw)
426 tileables = tuple(mt.tensor(t) if not isinstance(t, (Entity, Base)) else t
427 for t in tileables)
--> 428 result = self._sess.run(*tileables, **kw)
429
430 for t in tileables:
~/Workspace/mars/mars/session.py in run(self, *tileables, **kw)
103 # set number of running cores
104 self.context.set_ncores(kw['n_parallel'])
--> 105 res = self._executor.execute_tileables(tileables, **kw)
106 return res
107
~/Workspace/mars/mars/utils.py in _wrapped(*args, **kwargs)
387 _kernel_mode.eager = False
388 _kernel_mode.eager_count = enter_eager_count + 1
--> 389 return func(*args, **kwargs)
390 finally:
391 _kernel_mode.eager_count -= 1
~/Workspace/mars/mars/utils.py in inner(*args, **kwargs)
481 def inner(*args, **kwargs):
482 with build_mode():
--> 483 return func(*args, **kwargs)
484 return inner
485
~/Workspace/mars/mars/executor.py in execute_tileables(self, tileables, fetch, n_parallel, n_thread, print_progress, mock, compose, name)
840 # build chunk graph, tile will be done during building
841 chunk_graph = chunk_graph_builder.build(
--> 842 tileables, tileable_graph=tileable_graph)
843 tileable_graph = chunk_graph_builder.prev_tileable_graph
844 temp_result_keys = set(result_keys)
~/Workspace/mars/mars/utils.py in _wrapped(*args, **kwargs)
387 _kernel_mode.eager = False
388 _kernel_mode.eager_count = enter_eager_count + 1
--> 389 return func(*args, **kwargs)
390 finally:
391 _kernel_mode.eager_count -= 1
~/Workspace/mars/mars/utils.py in inner(*args, **kwargs)
481 def inner(*args, **kwargs):
482 with build_mode():
--> 483 return func(*args, **kwargs)
484 return inner
485
~/Workspace/mars/mars/tiles.py in build(self, tileables, tileable_graph)
348
349 chunk_graph = super().build(
--> 350 tileables, tileable_graph=tileable_graph)
351 self._iterative_chunk_graphs.append(chunk_graph)
352 if len(self._interrupted_ops) == 0:
~/Workspace/mars/mars/utils.py in _wrapped(*args, **kwargs)
387 _kernel_mode.eager = False
388 _kernel_mode.eager_count = enter_eager_count + 1
--> 389 return func(*args, **kwargs)
390 finally:
391 _kernel_mode.eager_count -= 1
~/Workspace/mars/mars/utils.py in inner(*args, **kwargs)
481 def inner(*args, **kwargs):
482 with build_mode():
--> 483 return func(*args, **kwargs)
484 return inner
485
~/Workspace/mars/mars/tiles.py in build(self, tileables, tileable_graph)
261 # for further execution
262 partial_tiled_chunks = \
--> 263 self._on_tile_failure(tileable_data.op, exc_info)
264 if partial_tiled_chunks is not None and \
265 len(partial_tiled_chunks) > 0:
~/Workspace/mars/mars/tiles.py in inner(op, exc_info)
300 on_tile_failure(op, exc_info)
301 else:
--> 302 raise exc_info[1].with_traceback(exc_info[2]) from None
303 return inner
304
~/Workspace/mars/mars/tiles.py in build(self, tileables, tileable_graph)
241 continue
242 try:
--> 243 tiled = self._tile(tileable_data, tileable_graph)
244 tiled_op.add(tileable_data.op)
245 for t, td in zip(tileable_data.op.outputs, tiled):
~/Workspace/mars/mars/tiles.py in _tile(self, tileable_data, tileable_graph)
336 if any(inp.op in self._interrupted_ops for inp in tileable_data.inputs):
337 raise TilesError('Tile fail due to failure of inputs')
--> 338 return super()._tile(tileable_data, tileable_graph)
339
340 @kernel_mode
~/Workspace/mars/mars/tiles.py in _tile(self, tileable_data, tileable_graph)
199 t._nsplits = o.nsplits
200 elif on_tile is None:
--> 201 tds[0]._inplace_tile()
202 else:
203 tds = on_tile(tileable_data.op.outputs, tds)
~/Workspace/mars/mars/core.py in _inplace_tile(self)
161
162 def _inplace_tile(self):
--> 163 return handler.inplace_tile(self)
164
165 def __getattr__(self, attr):
~/Workspace/mars/mars/tiles.py in inplace_tile(self, to_tile)
134 if not to_tile.is_coarse():
135 return to_tile
--> 136 dispatched = self.dispatch(to_tile.op)
137 self._assign_to([d.data for d in dispatched], to_tile.op.outputs)
138 return to_tile
~/Workspace/mars/mars/utils.py in _wrapped(*args, **kwargs)
387 _kernel_mode.eager = False
388 _kernel_mode.eager_count = enter_eager_count + 1
--> 389 return func(*args, **kwargs)
390 finally:
391 _kernel_mode.eager_count -= 1
~/Workspace/mars/mars/tiles.py in dispatch(self, op)
117 else:
118 try:
--> 119 tiled = op_cls.tile(op)
120 except NotImplementedError as ex:
121 cause = ex
~/Workspace/mars/mars/dataframe/datasource/read_sql.py in tile(cls, op)
370 def tile(cls, op: 'DataFrameReadSQL'):
371 if op.method == 'offset':
--> 372 return cls._tile_offset(op)
373 else:
374 return cls._tile_partition(op)
~/Workspace/mars/mars/dataframe/datasource/read_sql.py in _tile_offset(cls, op)
275 chunk_size = df.extra_params.raw_chunk_size or options.chunk_size
276 if chunk_size is None:
--> 277 chunk_size = (int(options.chunk_store_limit / op.row_memory_usage), df.shape[1])
278 row_chunk_sizes = normalize_chunk_sizes(df.shape, chunk_size)[0]
279 offsets = np.cumsum((0,) + row_chunk_sizes)
OverflowError: cannot convert float infinity to integer
|
OverflowError
|
def build_empty_df(dtypes, index=None):
columns = dtypes.index
# duplicate column may exist,
# so use RangeIndex first
df = pd.DataFrame(columns=pd.RangeIndex(len(columns)), index=index)
for i, d in enumerate(dtypes):
df[i] = pd.Series(dtype=d, index=index)
df.columns = columns
return df
|
def build_empty_df(dtypes, index=None):
columns = dtypes.index
df = pd.DataFrame(columns=columns, index=index)
for c, d in zip(columns, dtypes):
df[c] = pd.Series(dtype=d, index=index)
return df
|
https://github.com/mars-project/mars/issues/1312
|
KeyError
Traceback (most recent call last)
<ipython-input-73-3d10a0dadb7d> in <module>
5 data = pd.merge(data,data.groupby(['c']).size().reset_index(),on = ['c'],how='left')
6 data = pd.merge(data,data.groupby(['d']).size().reset_index(),on = ['d'],how='left')
----> 7 data = pd.merge(data,data.groupby(['e']).size().reset_index(),on = ['3'],how='left')
8 print(data.columns.execute())
D:\kinggsoft\anaconda\lib\site-packages\mars\dataframe\merge\merge.py in merge(df, right, how, on, left_on, right_on, left_index, right_index, sort, suffixes, copy, indicator, strategy, validate)
357 left_index=left_index, right_index=right_index, sort=sort, suffixes=suffixes,
358 copy=copy, indicator=indicator, validate=validate, object_type=ObjectType.dataframe)
--> 359 return op(df, right)
360
361
D:\kinggsoft\anaconda\lib\site-packages\mars\dataframe\merge\merge.py in __call__(self, left, right)
175
176 def __call__(self, left, right):
--> 177 empty_left, empty_right = build_df(left), build_df(right)
178 # this `merge` will check whether the combination of those arguments is valid
179 merged = empty_left.merge(empty_right, how=self.how, on=self.on,
D:\kinggsoft\anaconda\lib\site-packages\mars\dataframe\utils.py in build_df(df_obj, fill_value, size)
442 empty_df = pd.concat([empty_df] * size)
443 # make sure dtypes correct for MultiIndex
--> 444 empty_df = empty_df.astype(dtypes, copy=False)
445 return empty_df
446
D:\kinggsoft\anaconda\lib\site-packages\pandas\core\generic.py in astype(self, dtype, copy, errors, **kwargs)
5863 results.append(
5864 col.astype(
-> 5865 dtype=dtype[col_name], copy=copy, errors=errors, **kwargs
5866 )
5867 )
D:\kinggsoft\anaconda\lib\site-packages\pandas\core\generic.py in astype(self, dtype, copy, errors, **kwargs)
5846 if len(dtype) > 1 or self.name not in dtype:
5847 raise KeyError(
-> 5848 "Only the Series name can be used for "
5849 "the key in Series dtype mappings."
5850 )
KeyError: 'Only the Series name can be used for the key in Series dtype mappings.'
|
KeyError
|
def build_df(df_obj, fill_value=1, size=1):
empty_df = build_empty_df(df_obj.dtypes, index=df_obj.index_value.to_pandas()[:0])
dtypes = empty_df.dtypes
record = [_generate_value(dtype, fill_value) for dtype in empty_df.dtypes]
if isinstance(empty_df.index, pd.MultiIndex):
index = tuple(
_generate_value(level.dtype, fill_value) for level in empty_df.index.levels
)
empty_df.loc[index,] = record
else:
index = _generate_value(empty_df.index.dtype, fill_value)
empty_df.loc[index] = record
empty_df = pd.concat([empty_df] * size)
# make sure dtypes correct for MultiIndex
for i, dtype in enumerate(dtypes.tolist()):
s = empty_df.iloc[:, i]
if s.dtype != dtype:
empty_df.iloc[:, i] = s.astype(dtype)
return empty_df
|
def build_df(df_obj, fill_value=1, size=1):
empty_df = build_empty_df(df_obj.dtypes, index=df_obj.index_value.to_pandas()[:0])
dtypes = empty_df.dtypes
record = [_generate_value(dtype, fill_value) for dtype in empty_df.dtypes]
if isinstance(empty_df.index, pd.MultiIndex):
index = tuple(
_generate_value(level.dtype, fill_value) for level in empty_df.index.levels
)
empty_df.loc[index,] = record
else:
index = _generate_value(empty_df.index.dtype, fill_value)
empty_df.loc[index] = record
empty_df = pd.concat([empty_df] * size)
# make sure dtypes correct for MultiIndex
empty_df = empty_df.astype(dtypes, copy=False)
return empty_df
|
https://github.com/mars-project/mars/issues/1312
|
KeyError
Traceback (most recent call last)
<ipython-input-73-3d10a0dadb7d> in <module>
5 data = pd.merge(data,data.groupby(['c']).size().reset_index(),on = ['c'],how='left')
6 data = pd.merge(data,data.groupby(['d']).size().reset_index(),on = ['d'],how='left')
----> 7 data = pd.merge(data,data.groupby(['e']).size().reset_index(),on = ['3'],how='left')
8 print(data.columns.execute())
D:\kinggsoft\anaconda\lib\site-packages\mars\dataframe\merge\merge.py in merge(df, right, how, on, left_on, right_on, left_index, right_index, sort, suffixes, copy, indicator, strategy, validate)
357 left_index=left_index, right_index=right_index, sort=sort, suffixes=suffixes,
358 copy=copy, indicator=indicator, validate=validate, object_type=ObjectType.dataframe)
--> 359 return op(df, right)
360
361
D:\kinggsoft\anaconda\lib\site-packages\mars\dataframe\merge\merge.py in __call__(self, left, right)
175
176 def __call__(self, left, right):
--> 177 empty_left, empty_right = build_df(left), build_df(right)
178 # this `merge` will check whether the combination of those arguments is valid
179 merged = empty_left.merge(empty_right, how=self.how, on=self.on,
D:\kinggsoft\anaconda\lib\site-packages\mars\dataframe\utils.py in build_df(df_obj, fill_value, size)
442 empty_df = pd.concat([empty_df] * size)
443 # make sure dtypes correct for MultiIndex
--> 444 empty_df = empty_df.astype(dtypes, copy=False)
445 return empty_df
446
D:\kinggsoft\anaconda\lib\site-packages\pandas\core\generic.py in astype(self, dtype, copy, errors, **kwargs)
5863 results.append(
5864 col.astype(
-> 5865 dtype=dtype[col_name], copy=copy, errors=errors, **kwargs
5866 )
5867 )
D:\kinggsoft\anaconda\lib\site-packages\pandas\core\generic.py in astype(self, dtype, copy, errors, **kwargs)
5846 if len(dtype) > 1 or self.name not in dtype:
5847 raise KeyError(
-> 5848 "Only the Series name can be used for "
5849 "the key in Series dtype mappings."
5850 )
KeyError: 'Only the Series name can be used for the key in Series dtype mappings.'
|
KeyError
|
def _set_inputs(self, inputs):
super()._set_inputs(inputs)
if len(self._inputs) == 2:
self._lhs = self._inputs[0]
self._rhs = self._inputs[1]
else:
if isinstance(self._lhs, (DATAFRAME_TYPE, SERIES_TYPE)):
self._lhs = self._inputs[0]
elif pd.api.types.is_scalar(self._lhs):
self._rhs = self._inputs[0]
|
def _set_inputs(self, inputs):
super()._set_inputs(inputs)
if len(self._inputs) == 2:
self._lhs = self._inputs[0]
self._rhs = self._inputs[1]
else:
if isinstance(self._lhs, (DATAFRAME_TYPE, SERIES_TYPE)):
self._lhs = self._inputs[0]
elif np.isscalar(self._lhs):
self._rhs = self._inputs[0]
|
https://github.com/mars-project/mars/issues/1286
|
In [25]: df = md.DataFrame(mt.random.rand(10, 3), chunk_size=3)
In [26]: df.sort_values(0).reset_index(drop=True).execute()
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-26-e0c111d55eb4> in <module>
----> 1 df.sort_values(0).reset_index(drop=True).execute()
~/Documents/mars_dev/mars/mars/core.py in execute(self, session, **kw)
559
560 def execute(self, session=None, **kw):
--> 561 self._data.execute(session, **kw)
562 return self
563
~/Documents/mars_dev/mars/mars/core.py in execute(self, session, **kw)
368
369 # no more fetch, thus just fire run
--> 370 session.run(self, **kw)
371 # return Tileable or ExecutableTuple itself
372 return self
~/Documents/mars_dev/mars/mars/session.py in run(self, *tileables, **kw)
430 tileables = tuple(mt.tensor(t) if not isinstance(t, (Entity, Base)) else t
431 for t in tileables)
--> 432 result = self._sess.run(*tileables, **kw)
433
434 for t in tileables:
~/Documents/mars_dev/mars/mars/session.py in run(self, *tileables, **kw)
98 # set number of running cores
99 self.context.set_ncores(kw['n_parallel'])
--> 100 res = self._executor.execute_tileables(tileables, **kw)
101 return res
102
~/Documents/mars_dev/mars/mars/utils.py in _wrapped(*args, **kwargs)
383 _kernel_mode.eager = False
384 _kernel_mode.eager_count = enter_eager_count + 1
--> 385 return func(*args, **kwargs)
386 finally:
387 _kernel_mode.eager_count -= 1
~/Documents/mars_dev/mars/mars/utils.py in inner(*args, **kwargs)
471 def inner(*args, **kwargs):
472 with build_mode():
--> 473 return func(*args, **kwargs)
474 return inner
475
~/Documents/mars_dev/mars/mars/executor.py in execute_tileables(self, tileables, fetch, n_parallel, n_thread, print_progress, mock, compose)
817 # build chunk graph, tile will be done during building
818 chunk_graph = chunk_graph_builder.build(
--> 819 tileables, tileable_graph=tileable_graph)
820 tileable_graph = chunk_graph_builder.prev_tileable_graph
821 temp_result_keys = set(result_keys)
~/Documents/mars_dev/mars/mars/utils.py in _wrapped(*args, **kwargs)
383 _kernel_mode.eager = False
384 _kernel_mode.eager_count = enter_eager_count + 1
--> 385 return func(*args, **kwargs)
386 finally:
387 _kernel_mode.eager_count -= 1
~/Documents/mars_dev/mars/mars/utils.py in inner(*args, **kwargs)
471 def inner(*args, **kwargs):
472 with build_mode():
--> 473 return func(*args, **kwargs)
474 return inner
475
~/Documents/mars_dev/mars/mars/tiles.py in build(self, tileables, tileable_graph)
340
341 chunk_graph = super().build(
--> 342 tileables, tileable_graph=tileable_graph)
343 self._iterative_chunk_graphs.append(chunk_graph)
344 if len(self._interrupted_ops) == 0:
~/Documents/mars_dev/mars/mars/utils.py in _wrapped(*args, **kwargs)
383 _kernel_mode.eager = False
384 _kernel_mode.eager_count = enter_eager_count + 1
--> 385 return func(*args, **kwargs)
386 finally:
387 _kernel_mode.eager_count -= 1
~/Documents/mars_dev/mars/mars/utils.py in inner(*args, **kwargs)
471 def inner(*args, **kwargs):
472 with build_mode():
--> 473 return func(*args, **kwargs)
474 return inner
475
~/Documents/mars_dev/mars/mars/tiles.py in build(self, tileables, tileable_graph)
253 # for further execution
254 partial_tiled_chunks = \
--> 255 self._on_tile_failure(tileable_data.op, exc_info)
256 if partial_tiled_chunks is not None and \
257 len(partial_tiled_chunks) > 0:
~/Documents/mars_dev/mars/mars/tiles.py in inner(op, exc_info)
292 on_tile_failure(op, exc_info)
293 else:
--> 294 raise exc_info[1].with_traceback(exc_info[2]) from None
295 return inner
296
~/Documents/mars_dev/mars/mars/tiles.py in build(self, tileables, tileable_graph)
233 continue
234 try:
--> 235 tiled = self._tile(tileable_data, tileable_graph)
236 tiled_op.add(tileable_data.op)
237 for t, td in zip(tileable_data.op.outputs, tiled):
~/Documents/mars_dev/mars/mars/tiles.py in _tile(self, tileable_data, tileable_graph)
328 if any(inp.op in self._interrupted_ops for inp in tileable_data.inputs):
329 raise TilesError('Tile fail due to failure of inputs')
--> 330 return super()._tile(tileable_data, tileable_graph)
331
332 @kernel_mode
~/Documents/mars_dev/mars/mars/tiles.py in _tile(self, tileable_data, tileable_graph)
191 t._nsplits = o.nsplits
192 elif on_tile is None:
--> 193 tds[0]._inplace_tile()
194 else:
195 tds = on_tile(tileable_data.op.outputs, tds)
~/Documents/mars_dev/mars/mars/core.py in _inplace_tile(self)
160
161 def _inplace_tile(self):
--> 162 return handler.inplace_tile(self)
163
164 def __getattr__(self, attr):
~/Documents/mars_dev/mars/mars/tiles.py in inplace_tile(self, to_tile)
126 if not to_tile.is_coarse():
127 return to_tile
--> 128 dispatched = self.dispatch(to_tile.op)
129 self._assign_to([d.data for d in dispatched], to_tile.op.outputs)
130 return to_tile
~/Documents/mars_dev/mars/mars/utils.py in _wrapped(*args, **kwargs)
383 _kernel_mode.eager = False
384 _kernel_mode.eager_count = enter_eager_count + 1
--> 385 return func(*args, **kwargs)
386 finally:
387 _kernel_mode.eager_count -= 1
~/Documents/mars_dev/mars/mars/tiles.py in dispatch(self, op)
113 return self._handlers[op_cls](op)
114 try:
--> 115 return op_cls.tile(op)
116 except NotImplementedError as ex:
117 cause = ex
~/Documents/mars_dev/mars/mars/dataframe/base/reset_index.py in tile(cls, op)
130 def tile(cls, op):
131 if isinstance(op.inputs[0], DATAFRAME_TYPE):
--> 132 return cls._tile_dataframe(op)
133 else:
134 return cls._tile_series(op)
~/Documents/mars_dev/mars/mars/dataframe/base/reset_index.py in _tile_dataframe(cls, op)
101 for c in in_df.chunks:
102 if is_range_index:
--> 103 index_value = parse_index(pd.RangeIndex(cum_range[c.index[0]], cum_range[c.index[0] + 1]))
104 else:
105 index_value = out_df.index_value
~/miniconda3/lib/python3.7/site-packages/pandas/core/indexes/range.py in __new__(cls, start, stop, step, dtype, copy, name)
102 start, stop = 0, start
103 else:
--> 104 stop = ensure_python_int(stop)
105
106 step = ensure_python_int(step) if step is not None else 1
~/miniconda3/lib/python3.7/site-packages/pandas/core/dtypes/common.py in ensure_python_int(value)
200 assert new_value == value
201 except (TypeError, ValueError, AssertionError):
--> 202 raise TypeError(msg.format(type(value), value))
203 return new_value
204
TypeError: Wrong type <class 'numpy.float64'> for value nan
|
TypeError
|
def _tile_scalar(cls, op):
tileable = op.rhs if pd.api.types.is_scalar(op.lhs) else op.lhs
df = op.outputs[0]
out_chunks = []
for chunk in tileable.chunks:
out_op = op.copy().reset_key()
if isinstance(chunk, DATAFRAME_CHUNK_TYPE):
out_chunk = out_op.new_chunk(
[chunk],
shape=chunk.shape,
index=chunk.index,
dtypes=chunk.dtypes,
index_value=chunk.index_value,
columns_value=getattr(chunk, "columns_value"),
)
else:
out_chunk = out_op.new_chunk(
[chunk],
shape=chunk.shape,
index=chunk.index,
dtype=chunk.dtype,
index_value=chunk.index_value,
name=getattr(chunk, "name"),
)
out_chunks.append(out_chunk)
new_op = op.copy()
out = op.outputs[0]
if isinstance(df, SERIES_TYPE):
return new_op.new_seriess(
op.inputs,
df.shape,
nsplits=tileable.nsplits,
dtype=out.dtype,
index_value=df.index_value,
name=df.name,
chunks=out_chunks,
)
else:
return new_op.new_dataframes(
op.inputs,
df.shape,
nsplits=tileable.nsplits,
dtypes=out.dtypes,
index_value=df.index_value,
columns_value=df.columns_value,
chunks=out_chunks,
)
|
def _tile_scalar(cls, op):
tileable = op.rhs if np.isscalar(op.lhs) else op.lhs
df = op.outputs[0]
out_chunks = []
for chunk in tileable.chunks:
out_op = op.copy().reset_key()
if isinstance(chunk, DATAFRAME_CHUNK_TYPE):
out_chunk = out_op.new_chunk(
[chunk],
shape=chunk.shape,
index=chunk.index,
dtypes=chunk.dtypes,
index_value=chunk.index_value,
columns_value=getattr(chunk, "columns_value"),
)
else:
out_chunk = out_op.new_chunk(
[chunk],
shape=chunk.shape,
index=chunk.index,
dtype=chunk.dtype,
index_value=chunk.index_value,
name=getattr(chunk, "name"),
)
out_chunks.append(out_chunk)
new_op = op.copy()
out = op.outputs[0]
if isinstance(df, SERIES_TYPE):
return new_op.new_seriess(
op.inputs,
df.shape,
nsplits=tileable.nsplits,
dtype=out.dtype,
index_value=df.index_value,
name=df.name,
chunks=out_chunks,
)
else:
return new_op.new_dataframes(
op.inputs,
df.shape,
nsplits=tileable.nsplits,
dtypes=out.dtypes,
index_value=df.index_value,
columns_value=df.columns_value,
chunks=out_chunks,
)
|
https://github.com/mars-project/mars/issues/1286
|
In [25]: df = md.DataFrame(mt.random.rand(10, 3), chunk_size=3)
In [26]: df.sort_values(0).reset_index(drop=True).execute()
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-26-e0c111d55eb4> in <module>
----> 1 df.sort_values(0).reset_index(drop=True).execute()
~/Documents/mars_dev/mars/mars/core.py in execute(self, session, **kw)
559
560 def execute(self, session=None, **kw):
--> 561 self._data.execute(session, **kw)
562 return self
563
~/Documents/mars_dev/mars/mars/core.py in execute(self, session, **kw)
368
369 # no more fetch, thus just fire run
--> 370 session.run(self, **kw)
371 # return Tileable or ExecutableTuple itself
372 return self
~/Documents/mars_dev/mars/mars/session.py in run(self, *tileables, **kw)
430 tileables = tuple(mt.tensor(t) if not isinstance(t, (Entity, Base)) else t
431 for t in tileables)
--> 432 result = self._sess.run(*tileables, **kw)
433
434 for t in tileables:
~/Documents/mars_dev/mars/mars/session.py in run(self, *tileables, **kw)
98 # set number of running cores
99 self.context.set_ncores(kw['n_parallel'])
--> 100 res = self._executor.execute_tileables(tileables, **kw)
101 return res
102
~/Documents/mars_dev/mars/mars/utils.py in _wrapped(*args, **kwargs)
383 _kernel_mode.eager = False
384 _kernel_mode.eager_count = enter_eager_count + 1
--> 385 return func(*args, **kwargs)
386 finally:
387 _kernel_mode.eager_count -= 1
~/Documents/mars_dev/mars/mars/utils.py in inner(*args, **kwargs)
471 def inner(*args, **kwargs):
472 with build_mode():
--> 473 return func(*args, **kwargs)
474 return inner
475
~/Documents/mars_dev/mars/mars/executor.py in execute_tileables(self, tileables, fetch, n_parallel, n_thread, print_progress, mock, compose)
817 # build chunk graph, tile will be done during building
818 chunk_graph = chunk_graph_builder.build(
--> 819 tileables, tileable_graph=tileable_graph)
820 tileable_graph = chunk_graph_builder.prev_tileable_graph
821 temp_result_keys = set(result_keys)
~/Documents/mars_dev/mars/mars/utils.py in _wrapped(*args, **kwargs)
383 _kernel_mode.eager = False
384 _kernel_mode.eager_count = enter_eager_count + 1
--> 385 return func(*args, **kwargs)
386 finally:
387 _kernel_mode.eager_count -= 1
~/Documents/mars_dev/mars/mars/utils.py in inner(*args, **kwargs)
471 def inner(*args, **kwargs):
472 with build_mode():
--> 473 return func(*args, **kwargs)
474 return inner
475
~/Documents/mars_dev/mars/mars/tiles.py in build(self, tileables, tileable_graph)
340
341 chunk_graph = super().build(
--> 342 tileables, tileable_graph=tileable_graph)
343 self._iterative_chunk_graphs.append(chunk_graph)
344 if len(self._interrupted_ops) == 0:
~/Documents/mars_dev/mars/mars/utils.py in _wrapped(*args, **kwargs)
383 _kernel_mode.eager = False
384 _kernel_mode.eager_count = enter_eager_count + 1
--> 385 return func(*args, **kwargs)
386 finally:
387 _kernel_mode.eager_count -= 1
~/Documents/mars_dev/mars/mars/utils.py in inner(*args, **kwargs)
471 def inner(*args, **kwargs):
472 with build_mode():
--> 473 return func(*args, **kwargs)
474 return inner
475
~/Documents/mars_dev/mars/mars/tiles.py in build(self, tileables, tileable_graph)
253 # for further execution
254 partial_tiled_chunks = \
--> 255 self._on_tile_failure(tileable_data.op, exc_info)
256 if partial_tiled_chunks is not None and \
257 len(partial_tiled_chunks) > 0:
~/Documents/mars_dev/mars/mars/tiles.py in inner(op, exc_info)
292 on_tile_failure(op, exc_info)
293 else:
--> 294 raise exc_info[1].with_traceback(exc_info[2]) from None
295 return inner
296
~/Documents/mars_dev/mars/mars/tiles.py in build(self, tileables, tileable_graph)
233 continue
234 try:
--> 235 tiled = self._tile(tileable_data, tileable_graph)
236 tiled_op.add(tileable_data.op)
237 for t, td in zip(tileable_data.op.outputs, tiled):
~/Documents/mars_dev/mars/mars/tiles.py in _tile(self, tileable_data, tileable_graph)
328 if any(inp.op in self._interrupted_ops for inp in tileable_data.inputs):
329 raise TilesError('Tile fail due to failure of inputs')
--> 330 return super()._tile(tileable_data, tileable_graph)
331
332 @kernel_mode
~/Documents/mars_dev/mars/mars/tiles.py in _tile(self, tileable_data, tileable_graph)
191 t._nsplits = o.nsplits
192 elif on_tile is None:
--> 193 tds[0]._inplace_tile()
194 else:
195 tds = on_tile(tileable_data.op.outputs, tds)
~/Documents/mars_dev/mars/mars/core.py in _inplace_tile(self)
160
161 def _inplace_tile(self):
--> 162 return handler.inplace_tile(self)
163
164 def __getattr__(self, attr):
~/Documents/mars_dev/mars/mars/tiles.py in inplace_tile(self, to_tile)
126 if not to_tile.is_coarse():
127 return to_tile
--> 128 dispatched = self.dispatch(to_tile.op)
129 self._assign_to([d.data for d in dispatched], to_tile.op.outputs)
130 return to_tile
~/Documents/mars_dev/mars/mars/utils.py in _wrapped(*args, **kwargs)
383 _kernel_mode.eager = False
384 _kernel_mode.eager_count = enter_eager_count + 1
--> 385 return func(*args, **kwargs)
386 finally:
387 _kernel_mode.eager_count -= 1
~/Documents/mars_dev/mars/mars/tiles.py in dispatch(self, op)
113 return self._handlers[op_cls](op)
114 try:
--> 115 return op_cls.tile(op)
116 except NotImplementedError as ex:
117 cause = ex
~/Documents/mars_dev/mars/mars/dataframe/base/reset_index.py in tile(cls, op)
130 def tile(cls, op):
131 if isinstance(op.inputs[0], DATAFRAME_TYPE):
--> 132 return cls._tile_dataframe(op)
133 else:
134 return cls._tile_series(op)
~/Documents/mars_dev/mars/mars/dataframe/base/reset_index.py in _tile_dataframe(cls, op)
101 for c in in_df.chunks:
102 if is_range_index:
--> 103 index_value = parse_index(pd.RangeIndex(cum_range[c.index[0]], cum_range[c.index[0] + 1]))
104 else:
105 index_value = out_df.index_value
~/miniconda3/lib/python3.7/site-packages/pandas/core/indexes/range.py in __new__(cls, start, stop, step, dtype, copy, name)
102 start, stop = 0, start
103 else:
--> 104 stop = ensure_python_int(stop)
105
106 step = ensure_python_int(step) if step is not None else 1
~/miniconda3/lib/python3.7/site-packages/pandas/core/dtypes/common.py in ensure_python_int(value)
200 assert new_value == value
201 except (TypeError, ValueError, AssertionError):
--> 202 raise TypeError(msg.format(type(value), value))
203 return new_value
204
TypeError: Wrong type <class 'numpy.float64'> for value nan
|
TypeError
|
def execute(cls, ctx, op):
if len(op.inputs) == 2:
df, other = ctx[op.inputs[0].key], ctx[op.inputs[1].key]
if isinstance(op.inputs[0], SERIES_CHUNK_TYPE) and isinstance(
op.inputs[1], DATAFRAME_CHUNK_TYPE
):
df, other = other, df
func_name = getattr(cls, "_rfunc_name")
else:
func_name = getattr(cls, "_func_name")
elif pd.api.types.is_scalar(op.lhs) or isinstance(op.lhs, np.ndarray):
df = ctx[op.rhs.key]
other = op.lhs
func_name = getattr(cls, "_rfunc_name")
else:
df = ctx[op.lhs.key]
other = op.rhs
func_name = getattr(cls, "_func_name")
if op.object_type == ObjectType.dataframe:
kw = dict({"axis": op.axis})
else:
kw = dict()
if op.fill_value is not None:
# comparison function like eq does not have `fill_value`
kw["fill_value"] = op.fill_value
if op.level is not None:
# logical function like and may don't have `level` (for Series type)
kw["level"] = op.level
ctx[op.outputs[0].key] = getattr(df, func_name)(other, **kw)
|
def execute(cls, ctx, op):
if len(op.inputs) == 2:
df, other = ctx[op.inputs[0].key], ctx[op.inputs[1].key]
if isinstance(op.inputs[0], SERIES_CHUNK_TYPE) and isinstance(
op.inputs[1], DATAFRAME_CHUNK_TYPE
):
df, other = other, df
func_name = getattr(cls, "_rfunc_name")
else:
func_name = getattr(cls, "_func_name")
elif np.isscalar(op.lhs) or isinstance(op.lhs, np.ndarray):
df = ctx[op.rhs.key]
other = op.lhs
func_name = getattr(cls, "_rfunc_name")
else:
df = ctx[op.lhs.key]
other = op.rhs
func_name = getattr(cls, "_func_name")
if op.object_type == ObjectType.dataframe:
kw = dict({"axis": op.axis})
else:
kw = dict()
if op.fill_value is not None:
# comparison function like eq does not have `fill_value`
kw["fill_value"] = op.fill_value
if op.level is not None:
# logical function like and may don't have `level` (for Series type)
kw["level"] = op.level
ctx[op.outputs[0].key] = getattr(df, func_name)(other, **kw)
|
https://github.com/mars-project/mars/issues/1286
|
In [25]: df = md.DataFrame(mt.random.rand(10, 3), chunk_size=3)
In [26]: df.sort_values(0).reset_index(drop=True).execute()
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-26-e0c111d55eb4> in <module>
----> 1 df.sort_values(0).reset_index(drop=True).execute()
~/Documents/mars_dev/mars/mars/core.py in execute(self, session, **kw)
559
560 def execute(self, session=None, **kw):
--> 561 self._data.execute(session, **kw)
562 return self
563
~/Documents/mars_dev/mars/mars/core.py in execute(self, session, **kw)
368
369 # no more fetch, thus just fire run
--> 370 session.run(self, **kw)
371 # return Tileable or ExecutableTuple itself
372 return self
~/Documents/mars_dev/mars/mars/session.py in run(self, *tileables, **kw)
430 tileables = tuple(mt.tensor(t) if not isinstance(t, (Entity, Base)) else t
431 for t in tileables)
--> 432 result = self._sess.run(*tileables, **kw)
433
434 for t in tileables:
~/Documents/mars_dev/mars/mars/session.py in run(self, *tileables, **kw)
98 # set number of running cores
99 self.context.set_ncores(kw['n_parallel'])
--> 100 res = self._executor.execute_tileables(tileables, **kw)
101 return res
102
~/Documents/mars_dev/mars/mars/utils.py in _wrapped(*args, **kwargs)
383 _kernel_mode.eager = False
384 _kernel_mode.eager_count = enter_eager_count + 1
--> 385 return func(*args, **kwargs)
386 finally:
387 _kernel_mode.eager_count -= 1
~/Documents/mars_dev/mars/mars/utils.py in inner(*args, **kwargs)
471 def inner(*args, **kwargs):
472 with build_mode():
--> 473 return func(*args, **kwargs)
474 return inner
475
~/Documents/mars_dev/mars/mars/executor.py in execute_tileables(self, tileables, fetch, n_parallel, n_thread, print_progress, mock, compose)
817 # build chunk graph, tile will be done during building
818 chunk_graph = chunk_graph_builder.build(
--> 819 tileables, tileable_graph=tileable_graph)
820 tileable_graph = chunk_graph_builder.prev_tileable_graph
821 temp_result_keys = set(result_keys)
~/Documents/mars_dev/mars/mars/utils.py in _wrapped(*args, **kwargs)
383 _kernel_mode.eager = False
384 _kernel_mode.eager_count = enter_eager_count + 1
--> 385 return func(*args, **kwargs)
386 finally:
387 _kernel_mode.eager_count -= 1
~/Documents/mars_dev/mars/mars/utils.py in inner(*args, **kwargs)
471 def inner(*args, **kwargs):
472 with build_mode():
--> 473 return func(*args, **kwargs)
474 return inner
475
~/Documents/mars_dev/mars/mars/tiles.py in build(self, tileables, tileable_graph)
340
341 chunk_graph = super().build(
--> 342 tileables, tileable_graph=tileable_graph)
343 self._iterative_chunk_graphs.append(chunk_graph)
344 if len(self._interrupted_ops) == 0:
~/Documents/mars_dev/mars/mars/utils.py in _wrapped(*args, **kwargs)
383 _kernel_mode.eager = False
384 _kernel_mode.eager_count = enter_eager_count + 1
--> 385 return func(*args, **kwargs)
386 finally:
387 _kernel_mode.eager_count -= 1
~/Documents/mars_dev/mars/mars/utils.py in inner(*args, **kwargs)
471 def inner(*args, **kwargs):
472 with build_mode():
--> 473 return func(*args, **kwargs)
474 return inner
475
~/Documents/mars_dev/mars/mars/tiles.py in build(self, tileables, tileable_graph)
253 # for further execution
254 partial_tiled_chunks = \
--> 255 self._on_tile_failure(tileable_data.op, exc_info)
256 if partial_tiled_chunks is not None and \
257 len(partial_tiled_chunks) > 0:
~/Documents/mars_dev/mars/mars/tiles.py in inner(op, exc_info)
292 on_tile_failure(op, exc_info)
293 else:
--> 294 raise exc_info[1].with_traceback(exc_info[2]) from None
295 return inner
296
~/Documents/mars_dev/mars/mars/tiles.py in build(self, tileables, tileable_graph)
233 continue
234 try:
--> 235 tiled = self._tile(tileable_data, tileable_graph)
236 tiled_op.add(tileable_data.op)
237 for t, td in zip(tileable_data.op.outputs, tiled):
~/Documents/mars_dev/mars/mars/tiles.py in _tile(self, tileable_data, tileable_graph)
328 if any(inp.op in self._interrupted_ops for inp in tileable_data.inputs):
329 raise TilesError('Tile fail due to failure of inputs')
--> 330 return super()._tile(tileable_data, tileable_graph)
331
332 @kernel_mode
~/Documents/mars_dev/mars/mars/tiles.py in _tile(self, tileable_data, tileable_graph)
191 t._nsplits = o.nsplits
192 elif on_tile is None:
--> 193 tds[0]._inplace_tile()
194 else:
195 tds = on_tile(tileable_data.op.outputs, tds)
~/Documents/mars_dev/mars/mars/core.py in _inplace_tile(self)
160
161 def _inplace_tile(self):
--> 162 return handler.inplace_tile(self)
163
164 def __getattr__(self, attr):
~/Documents/mars_dev/mars/mars/tiles.py in inplace_tile(self, to_tile)
126 if not to_tile.is_coarse():
127 return to_tile
--> 128 dispatched = self.dispatch(to_tile.op)
129 self._assign_to([d.data for d in dispatched], to_tile.op.outputs)
130 return to_tile
~/Documents/mars_dev/mars/mars/utils.py in _wrapped(*args, **kwargs)
383 _kernel_mode.eager = False
384 _kernel_mode.eager_count = enter_eager_count + 1
--> 385 return func(*args, **kwargs)
386 finally:
387 _kernel_mode.eager_count -= 1
~/Documents/mars_dev/mars/mars/tiles.py in dispatch(self, op)
113 return self._handlers[op_cls](op)
114 try:
--> 115 return op_cls.tile(op)
116 except NotImplementedError as ex:
117 cause = ex
~/Documents/mars_dev/mars/mars/dataframe/base/reset_index.py in tile(cls, op)
130 def tile(cls, op):
131 if isinstance(op.inputs[0], DATAFRAME_TYPE):
--> 132 return cls._tile_dataframe(op)
133 else:
134 return cls._tile_series(op)
~/Documents/mars_dev/mars/mars/dataframe/base/reset_index.py in _tile_dataframe(cls, op)
101 for c in in_df.chunks:
102 if is_range_index:
--> 103 index_value = parse_index(pd.RangeIndex(cum_range[c.index[0]], cum_range[c.index[0] + 1]))
104 else:
105 index_value = out_df.index_value
~/miniconda3/lib/python3.7/site-packages/pandas/core/indexes/range.py in __new__(cls, start, stop, step, dtype, copy, name)
102 start, stop = 0, start
103 else:
--> 104 stop = ensure_python_int(stop)
105
106 step = ensure_python_int(step) if step is not None else 1
~/miniconda3/lib/python3.7/site-packages/pandas/core/dtypes/common.py in ensure_python_int(value)
200 assert new_value == value
201 except (TypeError, ValueError, AssertionError):
--> 202 raise TypeError(msg.format(type(value), value))
203 return new_value
204
TypeError: Wrong type <class 'numpy.float64'> for value nan
|
TypeError
|
def _calc_properties(cls, x1, x2=None, axis="columns"):
if isinstance(x1, (DATAFRAME_TYPE, DATAFRAME_CHUNK_TYPE)) and (
x2 is None or pd.api.types.is_scalar(x2) or isinstance(x2, TENSOR_TYPE)
):
if x2 is None:
dtypes = x1.dtypes
elif pd.api.types.is_scalar(x2):
dtypes = infer_dtypes(
x1.dtypes, pd.Series(np.array(x2).dtype), cls._operator
)
elif x1.dtypes is not None and isinstance(x2, TENSOR_TYPE):
dtypes = pd.Series(
[infer_dtype(dt, x2.dtype, cls._operator) for dt in x1.dtypes],
index=x1.dtypes.index,
)
else:
dtypes = x1.dtypes
return {
"shape": x1.shape,
"dtypes": dtypes,
"columns_value": x1.columns_value,
"index_value": x1.index_value,
}
if isinstance(x1, (SERIES_TYPE, SERIES_CHUNK_TYPE)) and (
x2 is None or pd.api.types.is_scalar(x2) or isinstance(x2, TENSOR_TYPE)
):
x2_dtype = x2.dtype if hasattr(x2, "dtype") else type(x2)
dtype = infer_dtype(x1.dtype, np.dtype(x2_dtype), cls._operator)
return {"shape": x1.shape, "dtype": dtype, "index_value": x1.index_value}
if isinstance(x1, (DATAFRAME_TYPE, DATAFRAME_CHUNK_TYPE)) and isinstance(
x2, (DATAFRAME_TYPE, DATAFRAME_CHUNK_TYPE)
):
index_shape, column_shape, dtypes, columns, index = (
np.nan,
np.nan,
None,
None,
None,
)
if (
x1.columns_value is not None
and x2.columns_value is not None
and x1.columns_value.key == x2.columns_value.key
):
dtypes = pd.Series(
[
infer_dtype(dt1, dt2, cls._operator)
for dt1, dt2 in zip(x1.dtypes, x2.dtypes)
],
index=x1.dtypes.index,
)
columns = copy.copy(x1.columns_value)
columns.value.should_be_monotonic = False
column_shape = len(dtypes)
elif x1.dtypes is not None and x2.dtypes is not None:
dtypes = infer_dtypes(x1.dtypes, x2.dtypes, cls._operator)
columns = parse_index(dtypes.index, store_data=True)
columns.value.should_be_monotonic = True
column_shape = len(dtypes)
if x1.index_value is not None and x2.index_value is not None:
if x1.index_value.key == x2.index_value.key:
index = copy.copy(x1.index_value)
index.value.should_be_monotonic = False
index_shape = x1.shape[0]
else:
index = infer_index_value(x1.index_value, x2.index_value)
index.value.should_be_monotonic = True
if index.key == x1.index_value.key == x2.index_value.key and (
not np.isnan(x1.shape[0]) or not np.isnan(x2.shape[0])
):
index_shape = (
x1.shape[0] if not np.isnan(x1.shape[0]) else x2.shape[0]
)
return {
"shape": (index_shape, column_shape),
"dtypes": dtypes,
"columns_value": columns,
"index_value": index,
}
if isinstance(x1, (DATAFRAME_TYPE, DATAFRAME_CHUNK_TYPE)) and isinstance(
x2, (SERIES_TYPE, SERIES_CHUNK_TYPE)
):
if axis == "columns" or axis == 1:
index_shape = x1.shape[0]
index = x1.index_value
column_shape, dtypes, columns = np.nan, None, None
if x1.columns_value is not None and x1.index_value is not None:
if x1.columns_value.key == x2.index_value.key:
dtypes = pd.Series(
[infer_dtype(dt, x2.dtype, cls._operator) for dt in x1.dtypes],
index=x1.dtypes.index,
)
columns = copy.copy(x1.columns_value)
columns.value.should_be_monotonic = False
column_shape = len(dtypes)
else: # pragma: no cover
dtypes = x1.dtypes # FIXME
columns = infer_index_value(x1.columns_value, x2.index_value)
columns.value.should_be_monotonic = True
column_shape = np.nan
else:
assert axis == "index" or axis == 0
column_shape = x1.shape[1]
columns = x1.columns_value
dtypes = x1.dtypes
index_shape, index = np.nan, None
if x1.index_value is not None and x1.index_value is not None:
if x1.index_value.key == x2.index_value.key:
dtypes = pd.Series(
[infer_dtype(dt, x2.dtype, cls._operator) for dt in x1.dtypes],
index=x1.dtypes.index,
)
index = copy.copy(x1.index_value)
index.value.should_be_monotonic = False
index_shape = x1.shape[0]
else:
if x1.dtypes is not None:
dtypes = pd.Series(
[
infer_dtype(dt, x2.dtype, cls._operator)
for dt in x1.dtypes
],
index=x1.dtypes.index,
)
index = infer_index_value(x1.index_value, x2.index_value)
index.value.should_be_monotonic = True
index_shape = np.nan
return {
"shape": (index_shape, column_shape),
"dtypes": dtypes,
"columns_value": columns,
"index_value": index,
}
if isinstance(x1, (SERIES_TYPE, SERIES_CHUNK_TYPE)) and isinstance(
x2, (SERIES_TYPE, SERIES_CHUNK_TYPE)
):
index_shape, dtype, index = np.nan, None, None
dtype = infer_dtype(x1.dtype, x2.dtype, cls._operator)
if x1.index_value is not None and x2.index_value is not None:
if x1.index_value.key == x2.index_value.key:
index = copy.copy(x1.index_value)
index.value.should_be_monotonic = False
index_shape = x1.shape[0]
else:
index = infer_index_value(x1.index_value, x2.index_value)
index.value.should_be_monotonic = True
if index.key == x1.index_value.key == x2.index_value.key and (
not np.isnan(x1.shape[0]) or not np.isnan(x2.shape[0])
):
index_shape = (
x1.shape[0] if not np.isnan(x1.shape[0]) else x2.shape[0]
)
return {"shape": (index_shape,), "dtype": dtype, "index_value": index}
raise NotImplementedError("Unknown combination of parameters")
|
def _calc_properties(cls, x1, x2=None, axis="columns"):
if isinstance(x1, (DATAFRAME_TYPE, DATAFRAME_CHUNK_TYPE)) and (
x2 is None or np.isscalar(x2) or isinstance(x2, TENSOR_TYPE)
):
if x2 is None:
dtypes = x1.dtypes
elif np.isscalar(x2):
dtypes = infer_dtypes(
x1.dtypes, pd.Series(np.array(x2).dtype), cls._operator
)
elif x1.dtypes is not None and isinstance(x2, TENSOR_TYPE):
dtypes = pd.Series(
[infer_dtype(dt, x2.dtype, cls._operator) for dt in x1.dtypes],
index=x1.dtypes.index,
)
else:
dtypes = x1.dtypes
return {
"shape": x1.shape,
"dtypes": dtypes,
"columns_value": x1.columns_value,
"index_value": x1.index_value,
}
if isinstance(x1, (SERIES_TYPE, SERIES_CHUNK_TYPE)) and (
x2 is None or np.isscalar(x2) or isinstance(x2, TENSOR_TYPE)
):
x2_dtype = x2.dtype if hasattr(x2, "dtype") else type(x2)
dtype = infer_dtype(x1.dtype, np.dtype(x2_dtype), cls._operator)
return {"shape": x1.shape, "dtype": dtype, "index_value": x1.index_value}
if isinstance(x1, (DATAFRAME_TYPE, DATAFRAME_CHUNK_TYPE)) and isinstance(
x2, (DATAFRAME_TYPE, DATAFRAME_CHUNK_TYPE)
):
index_shape, column_shape, dtypes, columns, index = (
np.nan,
np.nan,
None,
None,
None,
)
if (
x1.columns_value is not None
and x2.columns_value is not None
and x1.columns_value.key == x2.columns_value.key
):
dtypes = pd.Series(
[
infer_dtype(dt1, dt2, cls._operator)
for dt1, dt2 in zip(x1.dtypes, x2.dtypes)
],
index=x1.dtypes.index,
)
columns = copy.copy(x1.columns_value)
columns.value.should_be_monotonic = False
column_shape = len(dtypes)
elif x1.dtypes is not None and x2.dtypes is not None:
dtypes = infer_dtypes(x1.dtypes, x2.dtypes, cls._operator)
columns = parse_index(dtypes.index, store_data=True)
columns.value.should_be_monotonic = True
column_shape = len(dtypes)
if x1.index_value is not None and x2.index_value is not None:
if x1.index_value.key == x2.index_value.key:
index = copy.copy(x1.index_value)
index.value.should_be_monotonic = False
index_shape = x1.shape[0]
else:
index = infer_index_value(x1.index_value, x2.index_value)
index.value.should_be_monotonic = True
if index.key == x1.index_value.key == x2.index_value.key and (
not np.isnan(x1.shape[0]) or not np.isnan(x2.shape[0])
):
index_shape = (
x1.shape[0] if not np.isnan(x1.shape[0]) else x2.shape[0]
)
return {
"shape": (index_shape, column_shape),
"dtypes": dtypes,
"columns_value": columns,
"index_value": index,
}
if isinstance(x1, (DATAFRAME_TYPE, DATAFRAME_CHUNK_TYPE)) and isinstance(
x2, (SERIES_TYPE, SERIES_CHUNK_TYPE)
):
if axis == "columns" or axis == 1:
index_shape = x1.shape[0]
index = x1.index_value
column_shape, dtypes, columns = np.nan, None, None
if x1.columns_value is not None and x1.index_value is not None:
if x1.columns_value.key == x2.index_value.key:
dtypes = pd.Series(
[infer_dtype(dt, x2.dtype, cls._operator) for dt in x1.dtypes],
index=x1.dtypes.index,
)
columns = copy.copy(x1.columns_value)
columns.value.should_be_monotonic = False
column_shape = len(dtypes)
else: # pragma: no cover
dtypes = x1.dtypes # FIXME
columns = infer_index_value(x1.columns_value, x2.index_value)
columns.value.should_be_monotonic = True
column_shape = np.nan
else:
assert axis == "index" or axis == 0
column_shape = x1.shape[1]
columns = x1.columns_value
dtypes = x1.dtypes
index_shape, index = np.nan, None
if x1.index_value is not None and x1.index_value is not None:
if x1.index_value.key == x2.index_value.key:
dtypes = pd.Series(
[infer_dtype(dt, x2.dtype, cls._operator) for dt in x1.dtypes],
index=x1.dtypes.index,
)
index = copy.copy(x1.index_value)
index.value.should_be_monotonic = False
index_shape = x1.shape[0]
else:
if x1.dtypes is not None:
dtypes = pd.Series(
[
infer_dtype(dt, x2.dtype, cls._operator)
for dt in x1.dtypes
],
index=x1.dtypes.index,
)
index = infer_index_value(x1.index_value, x2.index_value)
index.value.should_be_monotonic = True
index_shape = np.nan
return {
"shape": (index_shape, column_shape),
"dtypes": dtypes,
"columns_value": columns,
"index_value": index,
}
if isinstance(x1, (SERIES_TYPE, SERIES_CHUNK_TYPE)) and isinstance(
x2, (SERIES_TYPE, SERIES_CHUNK_TYPE)
):
index_shape, dtype, index = np.nan, None, None
dtype = infer_dtype(x1.dtype, x2.dtype, cls._operator)
if x1.index_value is not None and x2.index_value is not None:
if x1.index_value.key == x2.index_value.key:
index = copy.copy(x1.index_value)
index.value.should_be_monotonic = False
index_shape = x1.shape[0]
else:
index = infer_index_value(x1.index_value, x2.index_value)
index.value.should_be_monotonic = True
if index.key == x1.index_value.key == x2.index_value.key and (
not np.isnan(x1.shape[0]) or not np.isnan(x2.shape[0])
):
index_shape = (
x1.shape[0] if not np.isnan(x1.shape[0]) else x2.shape[0]
)
return {"shape": (index_shape,), "dtype": dtype, "index_value": index}
raise NotImplementedError("Unknown combination of parameters")
|
https://github.com/mars-project/mars/issues/1286
|
In [25]: df = md.DataFrame(mt.random.rand(10, 3), chunk_size=3)
In [26]: df.sort_values(0).reset_index(drop=True).execute()
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-26-e0c111d55eb4> in <module>
----> 1 df.sort_values(0).reset_index(drop=True).execute()
~/Documents/mars_dev/mars/mars/core.py in execute(self, session, **kw)
559
560 def execute(self, session=None, **kw):
--> 561 self._data.execute(session, **kw)
562 return self
563
~/Documents/mars_dev/mars/mars/core.py in execute(self, session, **kw)
368
369 # no more fetch, thus just fire run
--> 370 session.run(self, **kw)
371 # return Tileable or ExecutableTuple itself
372 return self
~/Documents/mars_dev/mars/mars/session.py in run(self, *tileables, **kw)
430 tileables = tuple(mt.tensor(t) if not isinstance(t, (Entity, Base)) else t
431 for t in tileables)
--> 432 result = self._sess.run(*tileables, **kw)
433
434 for t in tileables:
~/Documents/mars_dev/mars/mars/session.py in run(self, *tileables, **kw)
98 # set number of running cores
99 self.context.set_ncores(kw['n_parallel'])
--> 100 res = self._executor.execute_tileables(tileables, **kw)
101 return res
102
~/Documents/mars_dev/mars/mars/utils.py in _wrapped(*args, **kwargs)
383 _kernel_mode.eager = False
384 _kernel_mode.eager_count = enter_eager_count + 1
--> 385 return func(*args, **kwargs)
386 finally:
387 _kernel_mode.eager_count -= 1
~/Documents/mars_dev/mars/mars/utils.py in inner(*args, **kwargs)
471 def inner(*args, **kwargs):
472 with build_mode():
--> 473 return func(*args, **kwargs)
474 return inner
475
~/Documents/mars_dev/mars/mars/executor.py in execute_tileables(self, tileables, fetch, n_parallel, n_thread, print_progress, mock, compose)
817 # build chunk graph, tile will be done during building
818 chunk_graph = chunk_graph_builder.build(
--> 819 tileables, tileable_graph=tileable_graph)
820 tileable_graph = chunk_graph_builder.prev_tileable_graph
821 temp_result_keys = set(result_keys)
~/Documents/mars_dev/mars/mars/utils.py in _wrapped(*args, **kwargs)
383 _kernel_mode.eager = False
384 _kernel_mode.eager_count = enter_eager_count + 1
--> 385 return func(*args, **kwargs)
386 finally:
387 _kernel_mode.eager_count -= 1
~/Documents/mars_dev/mars/mars/utils.py in inner(*args, **kwargs)
471 def inner(*args, **kwargs):
472 with build_mode():
--> 473 return func(*args, **kwargs)
474 return inner
475
~/Documents/mars_dev/mars/mars/tiles.py in build(self, tileables, tileable_graph)
340
341 chunk_graph = super().build(
--> 342 tileables, tileable_graph=tileable_graph)
343 self._iterative_chunk_graphs.append(chunk_graph)
344 if len(self._interrupted_ops) == 0:
~/Documents/mars_dev/mars/mars/utils.py in _wrapped(*args, **kwargs)
383 _kernel_mode.eager = False
384 _kernel_mode.eager_count = enter_eager_count + 1
--> 385 return func(*args, **kwargs)
386 finally:
387 _kernel_mode.eager_count -= 1
~/Documents/mars_dev/mars/mars/utils.py in inner(*args, **kwargs)
471 def inner(*args, **kwargs):
472 with build_mode():
--> 473 return func(*args, **kwargs)
474 return inner
475
~/Documents/mars_dev/mars/mars/tiles.py in build(self, tileables, tileable_graph)
253 # for further execution
254 partial_tiled_chunks = \
--> 255 self._on_tile_failure(tileable_data.op, exc_info)
256 if partial_tiled_chunks is not None and \
257 len(partial_tiled_chunks) > 0:
~/Documents/mars_dev/mars/mars/tiles.py in inner(op, exc_info)
292 on_tile_failure(op, exc_info)
293 else:
--> 294 raise exc_info[1].with_traceback(exc_info[2]) from None
295 return inner
296
~/Documents/mars_dev/mars/mars/tiles.py in build(self, tileables, tileable_graph)
233 continue
234 try:
--> 235 tiled = self._tile(tileable_data, tileable_graph)
236 tiled_op.add(tileable_data.op)
237 for t, td in zip(tileable_data.op.outputs, tiled):
~/Documents/mars_dev/mars/mars/tiles.py in _tile(self, tileable_data, tileable_graph)
328 if any(inp.op in self._interrupted_ops for inp in tileable_data.inputs):
329 raise TilesError('Tile fail due to failure of inputs')
--> 330 return super()._tile(tileable_data, tileable_graph)
331
332 @kernel_mode
~/Documents/mars_dev/mars/mars/tiles.py in _tile(self, tileable_data, tileable_graph)
191 t._nsplits = o.nsplits
192 elif on_tile is None:
--> 193 tds[0]._inplace_tile()
194 else:
195 tds = on_tile(tileable_data.op.outputs, tds)
~/Documents/mars_dev/mars/mars/core.py in _inplace_tile(self)
160
161 def _inplace_tile(self):
--> 162 return handler.inplace_tile(self)
163
164 def __getattr__(self, attr):
~/Documents/mars_dev/mars/mars/tiles.py in inplace_tile(self, to_tile)
126 if not to_tile.is_coarse():
127 return to_tile
--> 128 dispatched = self.dispatch(to_tile.op)
129 self._assign_to([d.data for d in dispatched], to_tile.op.outputs)
130 return to_tile
~/Documents/mars_dev/mars/mars/utils.py in _wrapped(*args, **kwargs)
383 _kernel_mode.eager = False
384 _kernel_mode.eager_count = enter_eager_count + 1
--> 385 return func(*args, **kwargs)
386 finally:
387 _kernel_mode.eager_count -= 1
~/Documents/mars_dev/mars/mars/tiles.py in dispatch(self, op)
113 return self._handlers[op_cls](op)
114 try:
--> 115 return op_cls.tile(op)
116 except NotImplementedError as ex:
117 cause = ex
~/Documents/mars_dev/mars/mars/dataframe/base/reset_index.py in tile(cls, op)
130 def tile(cls, op):
131 if isinstance(op.inputs[0], DATAFRAME_TYPE):
--> 132 return cls._tile_dataframe(op)
133 else:
134 return cls._tile_series(op)
~/Documents/mars_dev/mars/mars/dataframe/base/reset_index.py in _tile_dataframe(cls, op)
101 for c in in_df.chunks:
102 if is_range_index:
--> 103 index_value = parse_index(pd.RangeIndex(cum_range[c.index[0]], cum_range[c.index[0] + 1]))
104 else:
105 index_value = out_df.index_value
~/miniconda3/lib/python3.7/site-packages/pandas/core/indexes/range.py in __new__(cls, start, stop, step, dtype, copy, name)
102 start, stop = 0, start
103 else:
--> 104 stop = ensure_python_int(stop)
105
106 step = ensure_python_int(step) if step is not None else 1
~/miniconda3/lib/python3.7/site-packages/pandas/core/dtypes/common.py in ensure_python_int(value)
200 assert new_value == value
201 except (TypeError, ValueError, AssertionError):
--> 202 raise TypeError(msg.format(type(value), value))
203 return new_value
204
TypeError: Wrong type <class 'numpy.float64'> for value nan
|
TypeError
|
def _process_input(x):
if isinstance(x, (DATAFRAME_TYPE, SERIES_TYPE)) or pd.api.types.is_scalar(x):
return x
elif isinstance(x, pd.Series):
return Series(x)
elif isinstance(x, pd.DataFrame):
return DataFrame(x)
elif isinstance(x, (list, tuple, np.ndarray, TENSOR_TYPE)):
return astensor(x)
raise NotImplementedError
|
def _process_input(x):
if isinstance(x, (DATAFRAME_TYPE, SERIES_TYPE)) or np.isscalar(x):
return x
elif isinstance(x, pd.Series):
return Series(x)
elif isinstance(x, pd.DataFrame):
return DataFrame(x)
elif isinstance(x, (list, tuple, np.ndarray, TENSOR_TYPE)):
return astensor(x)
raise NotImplementedError
|
https://github.com/mars-project/mars/issues/1286
|
In [25]: df = md.DataFrame(mt.random.rand(10, 3), chunk_size=3)
In [26]: df.sort_values(0).reset_index(drop=True).execute()
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-26-e0c111d55eb4> in <module>
----> 1 df.sort_values(0).reset_index(drop=True).execute()
~/Documents/mars_dev/mars/mars/core.py in execute(self, session, **kw)
559
560 def execute(self, session=None, **kw):
--> 561 self._data.execute(session, **kw)
562 return self
563
~/Documents/mars_dev/mars/mars/core.py in execute(self, session, **kw)
368
369 # no more fetch, thus just fire run
--> 370 session.run(self, **kw)
371 # return Tileable or ExecutableTuple itself
372 return self
~/Documents/mars_dev/mars/mars/session.py in run(self, *tileables, **kw)
430 tileables = tuple(mt.tensor(t) if not isinstance(t, (Entity, Base)) else t
431 for t in tileables)
--> 432 result = self._sess.run(*tileables, **kw)
433
434 for t in tileables:
~/Documents/mars_dev/mars/mars/session.py in run(self, *tileables, **kw)
98 # set number of running cores
99 self.context.set_ncores(kw['n_parallel'])
--> 100 res = self._executor.execute_tileables(tileables, **kw)
101 return res
102
~/Documents/mars_dev/mars/mars/utils.py in _wrapped(*args, **kwargs)
383 _kernel_mode.eager = False
384 _kernel_mode.eager_count = enter_eager_count + 1
--> 385 return func(*args, **kwargs)
386 finally:
387 _kernel_mode.eager_count -= 1
~/Documents/mars_dev/mars/mars/utils.py in inner(*args, **kwargs)
471 def inner(*args, **kwargs):
472 with build_mode():
--> 473 return func(*args, **kwargs)
474 return inner
475
~/Documents/mars_dev/mars/mars/executor.py in execute_tileables(self, tileables, fetch, n_parallel, n_thread, print_progress, mock, compose)
817 # build chunk graph, tile will be done during building
818 chunk_graph = chunk_graph_builder.build(
--> 819 tileables, tileable_graph=tileable_graph)
820 tileable_graph = chunk_graph_builder.prev_tileable_graph
821 temp_result_keys = set(result_keys)
~/Documents/mars_dev/mars/mars/utils.py in _wrapped(*args, **kwargs)
383 _kernel_mode.eager = False
384 _kernel_mode.eager_count = enter_eager_count + 1
--> 385 return func(*args, **kwargs)
386 finally:
387 _kernel_mode.eager_count -= 1
~/Documents/mars_dev/mars/mars/utils.py in inner(*args, **kwargs)
471 def inner(*args, **kwargs):
472 with build_mode():
--> 473 return func(*args, **kwargs)
474 return inner
475
~/Documents/mars_dev/mars/mars/tiles.py in build(self, tileables, tileable_graph)
340
341 chunk_graph = super().build(
--> 342 tileables, tileable_graph=tileable_graph)
343 self._iterative_chunk_graphs.append(chunk_graph)
344 if len(self._interrupted_ops) == 0:
~/Documents/mars_dev/mars/mars/utils.py in _wrapped(*args, **kwargs)
383 _kernel_mode.eager = False
384 _kernel_mode.eager_count = enter_eager_count + 1
--> 385 return func(*args, **kwargs)
386 finally:
387 _kernel_mode.eager_count -= 1
~/Documents/mars_dev/mars/mars/utils.py in inner(*args, **kwargs)
471 def inner(*args, **kwargs):
472 with build_mode():
--> 473 return func(*args, **kwargs)
474 return inner
475
~/Documents/mars_dev/mars/mars/tiles.py in build(self, tileables, tileable_graph)
253 # for further execution
254 partial_tiled_chunks = \
--> 255 self._on_tile_failure(tileable_data.op, exc_info)
256 if partial_tiled_chunks is not None and \
257 len(partial_tiled_chunks) > 0:
~/Documents/mars_dev/mars/mars/tiles.py in inner(op, exc_info)
292 on_tile_failure(op, exc_info)
293 else:
--> 294 raise exc_info[1].with_traceback(exc_info[2]) from None
295 return inner
296
~/Documents/mars_dev/mars/mars/tiles.py in build(self, tileables, tileable_graph)
233 continue
234 try:
--> 235 tiled = self._tile(tileable_data, tileable_graph)
236 tiled_op.add(tileable_data.op)
237 for t, td in zip(tileable_data.op.outputs, tiled):
~/Documents/mars_dev/mars/mars/tiles.py in _tile(self, tileable_data, tileable_graph)
328 if any(inp.op in self._interrupted_ops for inp in tileable_data.inputs):
329 raise TilesError('Tile fail due to failure of inputs')
--> 330 return super()._tile(tileable_data, tileable_graph)
331
332 @kernel_mode
~/Documents/mars_dev/mars/mars/tiles.py in _tile(self, tileable_data, tileable_graph)
191 t._nsplits = o.nsplits
192 elif on_tile is None:
--> 193 tds[0]._inplace_tile()
194 else:
195 tds = on_tile(tileable_data.op.outputs, tds)
~/Documents/mars_dev/mars/mars/core.py in _inplace_tile(self)
160
161 def _inplace_tile(self):
--> 162 return handler.inplace_tile(self)
163
164 def __getattr__(self, attr):
~/Documents/mars_dev/mars/mars/tiles.py in inplace_tile(self, to_tile)
126 if not to_tile.is_coarse():
127 return to_tile
--> 128 dispatched = self.dispatch(to_tile.op)
129 self._assign_to([d.data for d in dispatched], to_tile.op.outputs)
130 return to_tile
~/Documents/mars_dev/mars/mars/utils.py in _wrapped(*args, **kwargs)
383 _kernel_mode.eager = False
384 _kernel_mode.eager_count = enter_eager_count + 1
--> 385 return func(*args, **kwargs)
386 finally:
387 _kernel_mode.eager_count -= 1
~/Documents/mars_dev/mars/mars/tiles.py in dispatch(self, op)
113 return self._handlers[op_cls](op)
114 try:
--> 115 return op_cls.tile(op)
116 except NotImplementedError as ex:
117 cause = ex
~/Documents/mars_dev/mars/mars/dataframe/base/reset_index.py in tile(cls, op)
130 def tile(cls, op):
131 if isinstance(op.inputs[0], DATAFRAME_TYPE):
--> 132 return cls._tile_dataframe(op)
133 else:
134 return cls._tile_series(op)
~/Documents/mars_dev/mars/mars/dataframe/base/reset_index.py in _tile_dataframe(cls, op)
101 for c in in_df.chunks:
102 if is_range_index:
--> 103 index_value = parse_index(pd.RangeIndex(cum_range[c.index[0]], cum_range[c.index[0] + 1]))
104 else:
105 index_value = out_df.index_value
~/miniconda3/lib/python3.7/site-packages/pandas/core/indexes/range.py in __new__(cls, start, stop, step, dtype, copy, name)
102 start, stop = 0, start
103 else:
--> 104 stop = ensure_python_int(stop)
105
106 step = ensure_python_int(step) if step is not None else 1
~/miniconda3/lib/python3.7/site-packages/pandas/core/dtypes/common.py in ensure_python_int(value)
200 assert new_value == value
201 except (TypeError, ValueError, AssertionError):
--> 202 raise TypeError(msg.format(type(value), value))
203 return new_value
204
TypeError: Wrong type <class 'numpy.float64'> for value nan
|
TypeError
|
def _call(self, x1, x2):
self._check_inputs(x1, x2)
if isinstance(x1, DATAFRAME_TYPE) or isinstance(x2, DATAFRAME_TYPE):
df1, df2 = (x1, x2) if isinstance(x1, DATAFRAME_TYPE) else (x2, x1)
setattr(self, "_object_type", ObjectType.dataframe)
kw = self._calc_properties(df1, df2, axis=self.axis)
if not pd.api.types.is_scalar(df2):
return self.new_dataframe([x1, x2], **kw)
else:
return self.new_dataframe([df1], **kw)
if isinstance(x1, SERIES_TYPE) or isinstance(x2, SERIES_TYPE):
s1, s2 = (x1, x2) if isinstance(x1, SERIES_TYPE) else (x2, x1)
setattr(self, "_object_type", ObjectType.series)
kw = self._calc_properties(s1, s2)
if not pd.api.types.is_scalar(s2):
return self.new_series([x1, x2], **kw)
else:
return self.new_series([s1], **kw)
raise NotImplementedError("Only support add dataframe, series or scalar for now")
|
def _call(self, x1, x2):
self._check_inputs(x1, x2)
if isinstance(x1, DATAFRAME_TYPE) or isinstance(x2, DATAFRAME_TYPE):
df1, df2 = (x1, x2) if isinstance(x1, DATAFRAME_TYPE) else (x2, x1)
setattr(self, "_object_type", ObjectType.dataframe)
kw = self._calc_properties(df1, df2, axis=self.axis)
if not np.isscalar(df2):
return self.new_dataframe([x1, x2], **kw)
else:
return self.new_dataframe([df1], **kw)
if isinstance(x1, SERIES_TYPE) or isinstance(x2, SERIES_TYPE):
s1, s2 = (x1, x2) if isinstance(x1, SERIES_TYPE) else (x2, x1)
setattr(self, "_object_type", ObjectType.series)
kw = self._calc_properties(s1, s2)
if not np.isscalar(s2):
return self.new_series([x1, x2], **kw)
else:
return self.new_series([s1], **kw)
raise NotImplementedError("Only support add dataframe, series or scalar for now")
|
https://github.com/mars-project/mars/issues/1286
|
In [25]: df = md.DataFrame(mt.random.rand(10, 3), chunk_size=3)
In [26]: df.sort_values(0).reset_index(drop=True).execute()
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-26-e0c111d55eb4> in <module>
----> 1 df.sort_values(0).reset_index(drop=True).execute()
~/Documents/mars_dev/mars/mars/core.py in execute(self, session, **kw)
559
560 def execute(self, session=None, **kw):
--> 561 self._data.execute(session, **kw)
562 return self
563
~/Documents/mars_dev/mars/mars/core.py in execute(self, session, **kw)
368
369 # no more fetch, thus just fire run
--> 370 session.run(self, **kw)
371 # return Tileable or ExecutableTuple itself
372 return self
~/Documents/mars_dev/mars/mars/session.py in run(self, *tileables, **kw)
430 tileables = tuple(mt.tensor(t) if not isinstance(t, (Entity, Base)) else t
431 for t in tileables)
--> 432 result = self._sess.run(*tileables, **kw)
433
434 for t in tileables:
~/Documents/mars_dev/mars/mars/session.py in run(self, *tileables, **kw)
98 # set number of running cores
99 self.context.set_ncores(kw['n_parallel'])
--> 100 res = self._executor.execute_tileables(tileables, **kw)
101 return res
102
~/Documents/mars_dev/mars/mars/utils.py in _wrapped(*args, **kwargs)
383 _kernel_mode.eager = False
384 _kernel_mode.eager_count = enter_eager_count + 1
--> 385 return func(*args, **kwargs)
386 finally:
387 _kernel_mode.eager_count -= 1
~/Documents/mars_dev/mars/mars/utils.py in inner(*args, **kwargs)
471 def inner(*args, **kwargs):
472 with build_mode():
--> 473 return func(*args, **kwargs)
474 return inner
475
~/Documents/mars_dev/mars/mars/executor.py in execute_tileables(self, tileables, fetch, n_parallel, n_thread, print_progress, mock, compose)
817 # build chunk graph, tile will be done during building
818 chunk_graph = chunk_graph_builder.build(
--> 819 tileables, tileable_graph=tileable_graph)
820 tileable_graph = chunk_graph_builder.prev_tileable_graph
821 temp_result_keys = set(result_keys)
~/Documents/mars_dev/mars/mars/utils.py in _wrapped(*args, **kwargs)
383 _kernel_mode.eager = False
384 _kernel_mode.eager_count = enter_eager_count + 1
--> 385 return func(*args, **kwargs)
386 finally:
387 _kernel_mode.eager_count -= 1
~/Documents/mars_dev/mars/mars/utils.py in inner(*args, **kwargs)
471 def inner(*args, **kwargs):
472 with build_mode():
--> 473 return func(*args, **kwargs)
474 return inner
475
~/Documents/mars_dev/mars/mars/tiles.py in build(self, tileables, tileable_graph)
340
341 chunk_graph = super().build(
--> 342 tileables, tileable_graph=tileable_graph)
343 self._iterative_chunk_graphs.append(chunk_graph)
344 if len(self._interrupted_ops) == 0:
~/Documents/mars_dev/mars/mars/utils.py in _wrapped(*args, **kwargs)
383 _kernel_mode.eager = False
384 _kernel_mode.eager_count = enter_eager_count + 1
--> 385 return func(*args, **kwargs)
386 finally:
387 _kernel_mode.eager_count -= 1
~/Documents/mars_dev/mars/mars/utils.py in inner(*args, **kwargs)
471 def inner(*args, **kwargs):
472 with build_mode():
--> 473 return func(*args, **kwargs)
474 return inner
475
~/Documents/mars_dev/mars/mars/tiles.py in build(self, tileables, tileable_graph)
253 # for further execution
254 partial_tiled_chunks = \
--> 255 self._on_tile_failure(tileable_data.op, exc_info)
256 if partial_tiled_chunks is not None and \
257 len(partial_tiled_chunks) > 0:
~/Documents/mars_dev/mars/mars/tiles.py in inner(op, exc_info)
292 on_tile_failure(op, exc_info)
293 else:
--> 294 raise exc_info[1].with_traceback(exc_info[2]) from None
295 return inner
296
~/Documents/mars_dev/mars/mars/tiles.py in build(self, tileables, tileable_graph)
233 continue
234 try:
--> 235 tiled = self._tile(tileable_data, tileable_graph)
236 tiled_op.add(tileable_data.op)
237 for t, td in zip(tileable_data.op.outputs, tiled):
~/Documents/mars_dev/mars/mars/tiles.py in _tile(self, tileable_data, tileable_graph)
328 if any(inp.op in self._interrupted_ops for inp in tileable_data.inputs):
329 raise TilesError('Tile fail due to failure of inputs')
--> 330 return super()._tile(tileable_data, tileable_graph)
331
332 @kernel_mode
~/Documents/mars_dev/mars/mars/tiles.py in _tile(self, tileable_data, tileable_graph)
191 t._nsplits = o.nsplits
192 elif on_tile is None:
--> 193 tds[0]._inplace_tile()
194 else:
195 tds = on_tile(tileable_data.op.outputs, tds)
~/Documents/mars_dev/mars/mars/core.py in _inplace_tile(self)
160
161 def _inplace_tile(self):
--> 162 return handler.inplace_tile(self)
163
164 def __getattr__(self, attr):
~/Documents/mars_dev/mars/mars/tiles.py in inplace_tile(self, to_tile)
126 if not to_tile.is_coarse():
127 return to_tile
--> 128 dispatched = self.dispatch(to_tile.op)
129 self._assign_to([d.data for d in dispatched], to_tile.op.outputs)
130 return to_tile
~/Documents/mars_dev/mars/mars/utils.py in _wrapped(*args, **kwargs)
383 _kernel_mode.eager = False
384 _kernel_mode.eager_count = enter_eager_count + 1
--> 385 return func(*args, **kwargs)
386 finally:
387 _kernel_mode.eager_count -= 1
~/Documents/mars_dev/mars/mars/tiles.py in dispatch(self, op)
113 return self._handlers[op_cls](op)
114 try:
--> 115 return op_cls.tile(op)
116 except NotImplementedError as ex:
117 cause = ex
~/Documents/mars_dev/mars/mars/dataframe/base/reset_index.py in tile(cls, op)
130 def tile(cls, op):
131 if isinstance(op.inputs[0], DATAFRAME_TYPE):
--> 132 return cls._tile_dataframe(op)
133 else:
134 return cls._tile_series(op)
~/Documents/mars_dev/mars/mars/dataframe/base/reset_index.py in _tile_dataframe(cls, op)
101 for c in in_df.chunks:
102 if is_range_index:
--> 103 index_value = parse_index(pd.RangeIndex(cum_range[c.index[0]], cum_range[c.index[0] + 1]))
104 else:
105 index_value = out_df.index_value
~/miniconda3/lib/python3.7/site-packages/pandas/core/indexes/range.py in __new__(cls, start, stop, step, dtype, copy, name)
102 start, stop = 0, start
103 else:
--> 104 stop = ensure_python_int(stop)
105
106 step = ensure_python_int(step) if step is not None else 1
~/miniconda3/lib/python3.7/site-packages/pandas/core/dtypes/common.py in ensure_python_int(value)
200 assert new_value == value
201 except (TypeError, ValueError, AssertionError):
--> 202 raise TypeError(msg.format(type(value), value))
203 return new_value
204
TypeError: Wrong type <class 'numpy.float64'> for value nan
|
TypeError
|
def _tile_dataframe(cls, op):
in_df = op.inputs[0]
out_df = op.outputs[0]
added_columns_num = len(out_df.dtypes) - len(in_df.dtypes)
out_chunks = []
index_has_value = out_df.index_value.has_value()
chunk_has_nan = any(np.isnan(s) for s in in_df.nsplits[0])
cum_range = np.cumsum((0,) + in_df.nsplits[0])
for c in in_df.chunks:
if index_has_value:
if chunk_has_nan:
index_value = parse_index(pd.RangeIndex(-1))
else:
index_value = parse_index(
pd.RangeIndex(cum_range[c.index[0]], cum_range[c.index[0] + 1])
)
else:
index_value = out_df.index_value
if c.index[1] == 0:
chunk_op = op.copy().reset_key()
dtypes = out_df.dtypes[: (added_columns_num + len(c.dtypes))]
columns_value = parse_index(dtypes.index)
new_chunk = chunk_op.new_chunk(
[c],
shape=(c.shape[0], c.shape[1] + added_columns_num),
index=c.index,
index_value=index_value,
columns_value=columns_value,
dtypes=dtypes,
)
else:
chunk_op = op.copy().reset_key()
chunk_op._drop = True
new_chunk = chunk_op.new_chunk(
[c],
shape=c.shape,
index_value=index_value,
index=c.index,
columns_value=c.columns_value,
dtypes=c.dtypes,
)
out_chunks.append(new_chunk)
if not index_has_value or chunk_has_nan:
if isinstance(out_df.index_value._index_value, IndexValue.RangeIndex):
out_chunks = standardize_range_index(out_chunks)
new_op = op.copy()
columns_splits = list(in_df.nsplits[1])
columns_splits[0] += added_columns_num
nsplits = (in_df.nsplits[0], tuple(columns_splits))
return new_op.new_dataframes(
op.inputs,
out_df.shape,
nsplits=nsplits,
chunks=out_chunks,
dtypes=out_df.dtypes,
index_value=out_df.index_value,
columns_value=out_df.columns_value,
)
|
def _tile_dataframe(cls, op):
in_df = op.inputs[0]
out_df = op.outputs[0]
added_columns_num = len(out_df.dtypes) - len(in_df.dtypes)
out_chunks = []
is_range_index = out_df.index_value.has_value()
cum_range = np.cumsum((0,) + in_df.nsplits[0])
for c in in_df.chunks:
if is_range_index:
index_value = parse_index(
pd.RangeIndex(cum_range[c.index[0]], cum_range[c.index[0] + 1])
)
else:
index_value = out_df.index_value
if c.index[1] == 0:
chunk_op = op.copy().reset_key()
dtypes = out_df.dtypes[: (added_columns_num + len(c.dtypes))]
columns_value = parse_index(dtypes.index)
new_chunk = chunk_op.new_chunk(
[c],
shape=(c.shape[0], c.shape[1] + added_columns_num),
index=c.index,
index_value=index_value,
columns_value=columns_value,
dtypes=dtypes,
)
else:
chunk_op = op.copy().reset_key()
chunk_op._drop = True
new_chunk = chunk_op.new_chunk(
[c],
shape=c.shape,
index_value=index_value,
index=c.index,
columns_value=c.columns_value,
dtypes=c.dtypes,
)
out_chunks.append(new_chunk)
if not is_range_index and isinstance(
out_df.index_value._index_value, IndexValue.RangeIndex
):
out_chunks = standardize_range_index(out_chunks)
new_op = op.copy()
columns_splits = list(in_df.nsplits[1])
columns_splits[0] += added_columns_num
nsplits = (in_df.nsplits[0], tuple(columns_splits))
return new_op.new_dataframes(
op.inputs,
out_df.shape,
nsplits=nsplits,
chunks=out_chunks,
dtypes=out_df.dtypes,
index_value=out_df.index_value,
columns_value=out_df.columns_value,
)
|
https://github.com/mars-project/mars/issues/1286
|
In [25]: df = md.DataFrame(mt.random.rand(10, 3), chunk_size=3)
In [26]: df.sort_values(0).reset_index(drop=True).execute()
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-26-e0c111d55eb4> in <module>
----> 1 df.sort_values(0).reset_index(drop=True).execute()
~/Documents/mars_dev/mars/mars/core.py in execute(self, session, **kw)
559
560 def execute(self, session=None, **kw):
--> 561 self._data.execute(session, **kw)
562 return self
563
~/Documents/mars_dev/mars/mars/core.py in execute(self, session, **kw)
368
369 # no more fetch, thus just fire run
--> 370 session.run(self, **kw)
371 # return Tileable or ExecutableTuple itself
372 return self
~/Documents/mars_dev/mars/mars/session.py in run(self, *tileables, **kw)
430 tileables = tuple(mt.tensor(t) if not isinstance(t, (Entity, Base)) else t
431 for t in tileables)
--> 432 result = self._sess.run(*tileables, **kw)
433
434 for t in tileables:
~/Documents/mars_dev/mars/mars/session.py in run(self, *tileables, **kw)
98 # set number of running cores
99 self.context.set_ncores(kw['n_parallel'])
--> 100 res = self._executor.execute_tileables(tileables, **kw)
101 return res
102
~/Documents/mars_dev/mars/mars/utils.py in _wrapped(*args, **kwargs)
383 _kernel_mode.eager = False
384 _kernel_mode.eager_count = enter_eager_count + 1
--> 385 return func(*args, **kwargs)
386 finally:
387 _kernel_mode.eager_count -= 1
~/Documents/mars_dev/mars/mars/utils.py in inner(*args, **kwargs)
471 def inner(*args, **kwargs):
472 with build_mode():
--> 473 return func(*args, **kwargs)
474 return inner
475
~/Documents/mars_dev/mars/mars/executor.py in execute_tileables(self, tileables, fetch, n_parallel, n_thread, print_progress, mock, compose)
817 # build chunk graph, tile will be done during building
818 chunk_graph = chunk_graph_builder.build(
--> 819 tileables, tileable_graph=tileable_graph)
820 tileable_graph = chunk_graph_builder.prev_tileable_graph
821 temp_result_keys = set(result_keys)
~/Documents/mars_dev/mars/mars/utils.py in _wrapped(*args, **kwargs)
383 _kernel_mode.eager = False
384 _kernel_mode.eager_count = enter_eager_count + 1
--> 385 return func(*args, **kwargs)
386 finally:
387 _kernel_mode.eager_count -= 1
~/Documents/mars_dev/mars/mars/utils.py in inner(*args, **kwargs)
471 def inner(*args, **kwargs):
472 with build_mode():
--> 473 return func(*args, **kwargs)
474 return inner
475
~/Documents/mars_dev/mars/mars/tiles.py in build(self, tileables, tileable_graph)
340
341 chunk_graph = super().build(
--> 342 tileables, tileable_graph=tileable_graph)
343 self._iterative_chunk_graphs.append(chunk_graph)
344 if len(self._interrupted_ops) == 0:
~/Documents/mars_dev/mars/mars/utils.py in _wrapped(*args, **kwargs)
383 _kernel_mode.eager = False
384 _kernel_mode.eager_count = enter_eager_count + 1
--> 385 return func(*args, **kwargs)
386 finally:
387 _kernel_mode.eager_count -= 1
~/Documents/mars_dev/mars/mars/utils.py in inner(*args, **kwargs)
471 def inner(*args, **kwargs):
472 with build_mode():
--> 473 return func(*args, **kwargs)
474 return inner
475
~/Documents/mars_dev/mars/mars/tiles.py in build(self, tileables, tileable_graph)
253 # for further execution
254 partial_tiled_chunks = \
--> 255 self._on_tile_failure(tileable_data.op, exc_info)
256 if partial_tiled_chunks is not None and \
257 len(partial_tiled_chunks) > 0:
~/Documents/mars_dev/mars/mars/tiles.py in inner(op, exc_info)
292 on_tile_failure(op, exc_info)
293 else:
--> 294 raise exc_info[1].with_traceback(exc_info[2]) from None
295 return inner
296
~/Documents/mars_dev/mars/mars/tiles.py in build(self, tileables, tileable_graph)
233 continue
234 try:
--> 235 tiled = self._tile(tileable_data, tileable_graph)
236 tiled_op.add(tileable_data.op)
237 for t, td in zip(tileable_data.op.outputs, tiled):
~/Documents/mars_dev/mars/mars/tiles.py in _tile(self, tileable_data, tileable_graph)
328 if any(inp.op in self._interrupted_ops for inp in tileable_data.inputs):
329 raise TilesError('Tile fail due to failure of inputs')
--> 330 return super()._tile(tileable_data, tileable_graph)
331
332 @kernel_mode
~/Documents/mars_dev/mars/mars/tiles.py in _tile(self, tileable_data, tileable_graph)
191 t._nsplits = o.nsplits
192 elif on_tile is None:
--> 193 tds[0]._inplace_tile()
194 else:
195 tds = on_tile(tileable_data.op.outputs, tds)
~/Documents/mars_dev/mars/mars/core.py in _inplace_tile(self)
160
161 def _inplace_tile(self):
--> 162 return handler.inplace_tile(self)
163
164 def __getattr__(self, attr):
~/Documents/mars_dev/mars/mars/tiles.py in inplace_tile(self, to_tile)
126 if not to_tile.is_coarse():
127 return to_tile
--> 128 dispatched = self.dispatch(to_tile.op)
129 self._assign_to([d.data for d in dispatched], to_tile.op.outputs)
130 return to_tile
~/Documents/mars_dev/mars/mars/utils.py in _wrapped(*args, **kwargs)
383 _kernel_mode.eager = False
384 _kernel_mode.eager_count = enter_eager_count + 1
--> 385 return func(*args, **kwargs)
386 finally:
387 _kernel_mode.eager_count -= 1
~/Documents/mars_dev/mars/mars/tiles.py in dispatch(self, op)
113 return self._handlers[op_cls](op)
114 try:
--> 115 return op_cls.tile(op)
116 except NotImplementedError as ex:
117 cause = ex
~/Documents/mars_dev/mars/mars/dataframe/base/reset_index.py in tile(cls, op)
130 def tile(cls, op):
131 if isinstance(op.inputs[0], DATAFRAME_TYPE):
--> 132 return cls._tile_dataframe(op)
133 else:
134 return cls._tile_series(op)
~/Documents/mars_dev/mars/mars/dataframe/base/reset_index.py in _tile_dataframe(cls, op)
101 for c in in_df.chunks:
102 if is_range_index:
--> 103 index_value = parse_index(pd.RangeIndex(cum_range[c.index[0]], cum_range[c.index[0] + 1]))
104 else:
105 index_value = out_df.index_value
~/miniconda3/lib/python3.7/site-packages/pandas/core/indexes/range.py in __new__(cls, start, stop, step, dtype, copy, name)
102 start, stop = 0, start
103 else:
--> 104 stop = ensure_python_int(stop)
105
106 step = ensure_python_int(step) if step is not None else 1
~/miniconda3/lib/python3.7/site-packages/pandas/core/dtypes/common.py in ensure_python_int(value)
200 assert new_value == value
201 except (TypeError, ValueError, AssertionError):
--> 202 raise TypeError(msg.format(type(value), value))
203 return new_value
204
TypeError: Wrong type <class 'numpy.float64'> for value nan
|
TypeError
|
def _call_dataframe(self, a):
if self.drop:
shape = a.shape
columns_value = a.columns_value
dtypes = a.dtypes
range_value = -1 if np.isnan(a.shape[0]) else a.shape[0]
index_value = parse_index(pd.RangeIndex(range_value))
else:
empty_df = build_empty_df(a.dtypes)
empty_df.index = a.index_value.to_pandas()[:0]
empty_df = empty_df.reset_index(
level=self.level, col_level=self.col_level, col_fill=self.col_fill
)
shape = (a.shape[0], len(empty_df.columns))
columns_value = parse_index(empty_df.columns, store_data=True)
dtypes = empty_df.dtypes
index_value = self._get_out_index(empty_df, shape)
return self.new_dataframe(
[a],
shape=shape,
columns_value=columns_value,
index_value=index_value,
dtypes=dtypes,
)
|
def _call_dataframe(self, a):
if self.drop:
shape = a.shape
columns_value = a.columns_value
dtypes = a.dtypes
range_value = -1 if np.isnan(a.shape[0]) else a.shape[0]
index_value = parse_index(pd.RangeIndex(range_value))
else:
empty_df = build_empty_df(a.dtypes)
empty_df.index = a.index_value.to_pandas()[:0]
empty_df = empty_df.reset_index(
level=self.level, col_level=self.col_level, col_fill=self.col_fill
)
shape = (a.shape[0], len(empty_df.columns))
columns_value = parse_index(empty_df.columns)
dtypes = empty_df.dtypes
index_value = self._get_out_index(empty_df, shape)
return self.new_dataframe(
[a],
shape=shape,
columns_value=columns_value,
index_value=index_value,
dtypes=dtypes,
)
|
https://github.com/mars-project/mars/issues/1286
|
In [25]: df = md.DataFrame(mt.random.rand(10, 3), chunk_size=3)
In [26]: df.sort_values(0).reset_index(drop=True).execute()
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-26-e0c111d55eb4> in <module>
----> 1 df.sort_values(0).reset_index(drop=True).execute()
~/Documents/mars_dev/mars/mars/core.py in execute(self, session, **kw)
559
560 def execute(self, session=None, **kw):
--> 561 self._data.execute(session, **kw)
562 return self
563
~/Documents/mars_dev/mars/mars/core.py in execute(self, session, **kw)
368
369 # no more fetch, thus just fire run
--> 370 session.run(self, **kw)
371 # return Tileable or ExecutableTuple itself
372 return self
~/Documents/mars_dev/mars/mars/session.py in run(self, *tileables, **kw)
430 tileables = tuple(mt.tensor(t) if not isinstance(t, (Entity, Base)) else t
431 for t in tileables)
--> 432 result = self._sess.run(*tileables, **kw)
433
434 for t in tileables:
~/Documents/mars_dev/mars/mars/session.py in run(self, *tileables, **kw)
98 # set number of running cores
99 self.context.set_ncores(kw['n_parallel'])
--> 100 res = self._executor.execute_tileables(tileables, **kw)
101 return res
102
~/Documents/mars_dev/mars/mars/utils.py in _wrapped(*args, **kwargs)
383 _kernel_mode.eager = False
384 _kernel_mode.eager_count = enter_eager_count + 1
--> 385 return func(*args, **kwargs)
386 finally:
387 _kernel_mode.eager_count -= 1
~/Documents/mars_dev/mars/mars/utils.py in inner(*args, **kwargs)
471 def inner(*args, **kwargs):
472 with build_mode():
--> 473 return func(*args, **kwargs)
474 return inner
475
~/Documents/mars_dev/mars/mars/executor.py in execute_tileables(self, tileables, fetch, n_parallel, n_thread, print_progress, mock, compose)
817 # build chunk graph, tile will be done during building
818 chunk_graph = chunk_graph_builder.build(
--> 819 tileables, tileable_graph=tileable_graph)
820 tileable_graph = chunk_graph_builder.prev_tileable_graph
821 temp_result_keys = set(result_keys)
~/Documents/mars_dev/mars/mars/utils.py in _wrapped(*args, **kwargs)
383 _kernel_mode.eager = False
384 _kernel_mode.eager_count = enter_eager_count + 1
--> 385 return func(*args, **kwargs)
386 finally:
387 _kernel_mode.eager_count -= 1
~/Documents/mars_dev/mars/mars/utils.py in inner(*args, **kwargs)
471 def inner(*args, **kwargs):
472 with build_mode():
--> 473 return func(*args, **kwargs)
474 return inner
475
~/Documents/mars_dev/mars/mars/tiles.py in build(self, tileables, tileable_graph)
340
341 chunk_graph = super().build(
--> 342 tileables, tileable_graph=tileable_graph)
343 self._iterative_chunk_graphs.append(chunk_graph)
344 if len(self._interrupted_ops) == 0:
~/Documents/mars_dev/mars/mars/utils.py in _wrapped(*args, **kwargs)
383 _kernel_mode.eager = False
384 _kernel_mode.eager_count = enter_eager_count + 1
--> 385 return func(*args, **kwargs)
386 finally:
387 _kernel_mode.eager_count -= 1
~/Documents/mars_dev/mars/mars/utils.py in inner(*args, **kwargs)
471 def inner(*args, **kwargs):
472 with build_mode():
--> 473 return func(*args, **kwargs)
474 return inner
475
~/Documents/mars_dev/mars/mars/tiles.py in build(self, tileables, tileable_graph)
253 # for further execution
254 partial_tiled_chunks = \
--> 255 self._on_tile_failure(tileable_data.op, exc_info)
256 if partial_tiled_chunks is not None and \
257 len(partial_tiled_chunks) > 0:
~/Documents/mars_dev/mars/mars/tiles.py in inner(op, exc_info)
292 on_tile_failure(op, exc_info)
293 else:
--> 294 raise exc_info[1].with_traceback(exc_info[2]) from None
295 return inner
296
~/Documents/mars_dev/mars/mars/tiles.py in build(self, tileables, tileable_graph)
233 continue
234 try:
--> 235 tiled = self._tile(tileable_data, tileable_graph)
236 tiled_op.add(tileable_data.op)
237 for t, td in zip(tileable_data.op.outputs, tiled):
~/Documents/mars_dev/mars/mars/tiles.py in _tile(self, tileable_data, tileable_graph)
328 if any(inp.op in self._interrupted_ops for inp in tileable_data.inputs):
329 raise TilesError('Tile fail due to failure of inputs')
--> 330 return super()._tile(tileable_data, tileable_graph)
331
332 @kernel_mode
~/Documents/mars_dev/mars/mars/tiles.py in _tile(self, tileable_data, tileable_graph)
191 t._nsplits = o.nsplits
192 elif on_tile is None:
--> 193 tds[0]._inplace_tile()
194 else:
195 tds = on_tile(tileable_data.op.outputs, tds)
~/Documents/mars_dev/mars/mars/core.py in _inplace_tile(self)
160
161 def _inplace_tile(self):
--> 162 return handler.inplace_tile(self)
163
164 def __getattr__(self, attr):
~/Documents/mars_dev/mars/mars/tiles.py in inplace_tile(self, to_tile)
126 if not to_tile.is_coarse():
127 return to_tile
--> 128 dispatched = self.dispatch(to_tile.op)
129 self._assign_to([d.data for d in dispatched], to_tile.op.outputs)
130 return to_tile
~/Documents/mars_dev/mars/mars/utils.py in _wrapped(*args, **kwargs)
383 _kernel_mode.eager = False
384 _kernel_mode.eager_count = enter_eager_count + 1
--> 385 return func(*args, **kwargs)
386 finally:
387 _kernel_mode.eager_count -= 1
~/Documents/mars_dev/mars/mars/tiles.py in dispatch(self, op)
113 return self._handlers[op_cls](op)
114 try:
--> 115 return op_cls.tile(op)
116 except NotImplementedError as ex:
117 cause = ex
~/Documents/mars_dev/mars/mars/dataframe/base/reset_index.py in tile(cls, op)
130 def tile(cls, op):
131 if isinstance(op.inputs[0], DATAFRAME_TYPE):
--> 132 return cls._tile_dataframe(op)
133 else:
134 return cls._tile_series(op)
~/Documents/mars_dev/mars/mars/dataframe/base/reset_index.py in _tile_dataframe(cls, op)
101 for c in in_df.chunks:
102 if is_range_index:
--> 103 index_value = parse_index(pd.RangeIndex(cum_range[c.index[0]], cum_range[c.index[0] + 1]))
104 else:
105 index_value = out_df.index_value
~/miniconda3/lib/python3.7/site-packages/pandas/core/indexes/range.py in __new__(cls, start, stop, step, dtype, copy, name)
102 start, stop = 0, start
103 else:
--> 104 stop = ensure_python_int(stop)
105
106 step = ensure_python_int(step) if step is not None else 1
~/miniconda3/lib/python3.7/site-packages/pandas/core/dtypes/common.py in ensure_python_int(value)
200 assert new_value == value
201 except (TypeError, ValueError, AssertionError):
--> 202 raise TypeError(msg.format(type(value), value))
203 return new_value
204
TypeError: Wrong type <class 'numpy.float64'> for value nan
|
TypeError
|
def calc_data_size(dt):
if dt is None:
return 0
if isinstance(dt, tuple):
return sum(calc_data_size(c) for c in dt)
if hasattr(dt, "nbytes"):
return max(sys.getsizeof(dt), dt.nbytes)
if hasattr(dt, "shape") and len(dt.shape) == 0:
return 0
if hasattr(dt, "memory_usage") or hasattr(dt, "groupby_obj"):
return sys.getsizeof(dt)
if hasattr(dt, "dtypes") and hasattr(dt, "shape"):
return dt.shape[0] * sum(dtype.itemsize for dtype in dt.dtypes)
if hasattr(dt, "dtype") and hasattr(dt, "shape"):
return dt.shape[0] * dt.dtype.itemsize
# object chunk
return sys.getsizeof(dt)
|
def calc_data_size(dt):
if dt is None:
return 0
if isinstance(dt, tuple):
return sum(calc_data_size(c) for c in dt)
if hasattr(dt, "nbytes"):
return max(sys.getsizeof(dt), dt.nbytes)
if hasattr(dt, "shape") and len(dt.shape) == 0:
return 0
if hasattr(dt, "memory_usage"):
return sys.getsizeof(dt)
if hasattr(dt, "dtypes") and hasattr(dt, "shape"):
return dt.shape[0] * sum(dtype.itemsize for dtype in dt.dtypes)
if hasattr(dt, "dtype") and hasattr(dt, "shape"):
return dt.shape[0] * dt.dtype.itemsize
# object chunk
return sys.getsizeof(dt)
|
https://github.com/mars-project/mars/issues/1306
|
Attempt 1: Unexpected error AttributeError occurred in executing operand b4e4bc5f7b31094fb234d9ea949251a1 in 0.0.0.0:46150
Traceback (most recent call last):
File "/Users/wenjun.swj/Code/mars/mars/promise.py", line 100, in _wrapped
result = func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/worker/calc.py", line 271, in <lambda>
.then(lambda context_dict: _start_calc(context_dict)) \
File "/Users/wenjun.swj/Code/mars/mars/worker/calc.py", line 244, in _start_calc
return self._calc_results(session_id, graph_key, graph, context_dict, chunk_targets)
File "/Users/wenjun.swj/Code/mars/mars/worker/calc.py", line 192, in _calc_results
result_sizes.append(calc_data_size(v))
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 302, in calc_data_size
return dt.shape[0] * sum(dtype.itemsize for dtype in dt.dtypes)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 302, in <genexpr>
return dt.shape[0] * sum(dtype.itemsize for dtype in dt.dtypes)
AttributeError: 'str' object has no attribute 'itemsize'
|
AttributeError
|
def estimate_graph_finish_time(
self, session_id, graph_key, calc_fetch=True, base_time=None
):
"""
Calc predictions for given chunk graph
"""
session_graph_key = (session_id, graph_key)
if session_graph_key not in self._graph_records:
return
graph_record = self._graph_records[session_graph_key]
graph = graph_record.graph
ops = set(type(c.op).__name__ for c in graph if not isinstance(c.op, Fetch))
op_calc_key = ("calc_speed." + list(ops)[0]) if len(ops) == 1 else None
stats = defaultdict(lambda: dict(count=0))
if self._status_ref:
stats.update(
self._status_ref.get_stats(
[
"disk_read_speed",
"disk_write_speed",
"net_transfer_speed",
op_calc_key,
]
)
)
if op_calc_key not in stats:
return None
if stats[op_calc_key]["count"] < options.optimize.min_stats_count:
return None
if abs(stats[op_calc_key]["count"]) < 1e-6:
return None
input_size = 0
net_size = 0
disk_size = 0
base_time = base_time or time.time()
if calc_fetch:
for c in graph:
if not isinstance(c.op, Fetch):
break
try:
data_size = calc_data_size(c)
except (AttributeError, TypeError, ValueError):
data_size = 0
input_size += data_size
data_locations = self.storage_client.get_data_locations(
session_id, [c.key]
)[0]
if (0, DataStorageDevice.VINEYARD) in data_locations or (
0,
DataStorageDevice.SHARED_MEMORY,
) in data_locations: # pragma: no cover
continue
elif (0, DataStorageDevice.DISK) in data_locations:
disk_size += data_size
else:
net_size += data_size
if stats["net_transfer_speed"]["count"] >= options.optimize.min_stats_count:
base_time += net_size * 1.0 / stats["net_transfer_speed"]["mean"]
if stats["disk_read_speed"]["count"] >= options.optimize.min_stats_count:
base_time += disk_size * 1.0 / stats["disk_read_speed"]["mean"]
else:
base_time += disk_size * 1.0 / options.optimize.default_disk_io_speed
est_finish_time = base_time + input_size * 1.0 / stats[op_calc_key]["mean"]
graph_record.est_finish_time = est_finish_time
self._status_ref.update_stats(
dict(
min_est_finish_time=min(
rec.est_finish_time for rec in self._graph_records.values()
),
max_est_finish_time=max(
rec.est_finish_time for rec in self._graph_records.values()
),
),
_tell=True,
_wait=False,
)
self.ref().estimate_graph_finish_time(session_id, graph_key, _tell=True, _delay=1)
|
def estimate_graph_finish_time(
self, session_id, graph_key, calc_fetch=True, base_time=None
):
"""
Calc predictions for given chunk graph
"""
session_graph_key = (session_id, graph_key)
if session_graph_key not in self._graph_records:
return
graph_record = self._graph_records[session_graph_key]
graph = graph_record.graph
ops = set(type(c.op).__name__ for c in graph if not isinstance(c.op, Fetch))
op_calc_key = ("calc_speed." + list(ops)[0]) if len(ops) == 1 else None
stats = defaultdict(lambda: dict(count=0))
if self._status_ref:
stats.update(
self._status_ref.get_stats(
[
"disk_read_speed",
"disk_write_speed",
"net_transfer_speed",
op_calc_key,
]
)
)
if op_calc_key not in stats:
return None
if stats[op_calc_key]["count"] < options.optimize.min_stats_count:
return None
if abs(stats[op_calc_key]["count"]) < 1e-6:
return None
input_size = 0
net_size = 0
disk_size = 0
base_time = base_time or time.time()
if calc_fetch:
for c in graph:
if not isinstance(c.op, Fetch):
break
data_size = calc_data_size(c)
input_size += data_size
data_locations = self.storage_client.get_data_locations(
session_id, [c.key]
)[0]
if (0, DataStorageDevice.VINEYARD) in data_locations or (
0,
DataStorageDevice.SHARED_MEMORY,
) in data_locations: # pragma: no cover
continue
elif (0, DataStorageDevice.DISK) in data_locations:
disk_size += data_size
else:
net_size += data_size
if stats["net_transfer_speed"]["count"] >= options.optimize.min_stats_count:
base_time += net_size * 1.0 / stats["net_transfer_speed"]["mean"]
if stats["disk_read_speed"]["count"] >= options.optimize.min_stats_count:
base_time += disk_size * 1.0 / stats["disk_read_speed"]["mean"]
else:
base_time += disk_size * 1.0 / options.optimize.default_disk_io_speed
est_finish_time = base_time + input_size * 1.0 / stats[op_calc_key]["mean"]
graph_record.est_finish_time = est_finish_time
self._status_ref.update_stats(
dict(
min_est_finish_time=min(
rec.est_finish_time for rec in self._graph_records.values()
),
max_est_finish_time=max(
rec.est_finish_time for rec in self._graph_records.values()
),
),
_tell=True,
_wait=False,
)
self.ref().estimate_graph_finish_time(session_id, graph_key, _tell=True, _delay=1)
|
https://github.com/mars-project/mars/issues/1306
|
Attempt 1: Unexpected error AttributeError occurred in executing operand b4e4bc5f7b31094fb234d9ea949251a1 in 0.0.0.0:46150
Traceback (most recent call last):
File "/Users/wenjun.swj/Code/mars/mars/promise.py", line 100, in _wrapped
result = func(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/worker/calc.py", line 271, in <lambda>
.then(lambda context_dict: _start_calc(context_dict)) \
File "/Users/wenjun.swj/Code/mars/mars/worker/calc.py", line 244, in _start_calc
return self._calc_results(session_id, graph_key, graph, context_dict, chunk_targets)
File "/Users/wenjun.swj/Code/mars/mars/worker/calc.py", line 192, in _calc_results
result_sizes.append(calc_data_size(v))
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 302, in calc_data_size
return dt.shape[0] * sum(dtype.itemsize for dtype in dt.dtypes)
File "/Users/wenjun.swj/Code/mars/mars/utils.py", line 302, in <genexpr>
return dt.shape[0] * sum(dtype.itemsize for dtype in dt.dtypes)
AttributeError: 'str' object has no attribute 'itemsize'
|
AttributeError
|
def tile(cls, op):
x = astensor(op.input)
axis = op.axis
ord = op.ord
keepdims = op.keepdims
axis_chunk_shapes = tuple(x.chunk_shape[i] for i in axis)
can_apply_norm = all(s == 1 for s in axis_chunk_shapes)
if can_apply_norm:
axis_set = set(axis)
get_shape = lambda shape: tuple(
s if i not in axis_set else 1
for i, s in enumerate(shape)
if i not in axis_set or keepdims
)
out_chunk_shape = get_shape(x.chunk_shape)
out_chunks = []
for idx in itertools.product(*[range(s) for s in out_chunk_shape]):
idx_iter = iter(idx)
in_idx = tuple(
0 if i in axis_set and not keepdims else next(idx_iter)
for i in range(x.ndim)
)
c = x.cix[in_idx]
chunk_op = op.copy().reset_key()
out_chunk = chunk_op.new_chunk([c], shape=get_shape(c.shape), index=idx)
out_chunks.append(out_chunk)
nsplits = [
tuple(
c.shape[i]
for c in out_chunks
if all(idx == 0 for j, idx in enumerate(c.index) if j != i)
)
for i in range(len(out_chunks[0].shape))
]
new_op = op.copy()
return new_op.new_tensors(
op.inputs, op.outputs[0].shape, chunks=out_chunks, nsplits=nsplits
)
r = cls._norm(x.astype(op.outputs[0].dtype), ord, axis, keepdims)
recursive_tile(r)
new_op = op.copy()
return new_op.new_tensors(
op.inputs, op.outputs[0].shape, chunks=r.chunks, nsplits=r.nsplits
)
|
def tile(cls, op):
x = op.input
axis = op.axis
ord = op.ord
keepdims = op.keepdims
axis_chunk_shapes = tuple(x.chunk_shape[i] for i in axis)
can_apply_norm = all(s == 1 for s in axis_chunk_shapes)
if can_apply_norm:
axis_set = set(axis)
get_shape = lambda shape: tuple(
s if i not in axis_set else 1
for i, s in enumerate(shape)
if i not in axis_set or keepdims
)
out_chunk_shape = get_shape(x.chunk_shape)
out_chunks = []
for idx in itertools.product(*[range(s) for s in out_chunk_shape]):
idx_iter = iter(idx)
in_idx = tuple(
0 if i in axis_set and not keepdims else next(idx_iter)
for i in range(x.ndim)
)
c = x.cix[in_idx]
chunk_op = op.copy().reset_key()
out_chunk = chunk_op.new_chunk([c], shape=get_shape(c.shape), index=idx)
out_chunks.append(out_chunk)
nsplits = [
tuple(
c.shape[i]
for c in out_chunks
if all(idx == 0 for j, idx in enumerate(c.index) if j != i)
)
for i in range(len(out_chunks[0].shape))
]
new_op = op.copy()
return new_op.new_tensors(
op.inputs, op.outputs[0].shape, chunks=out_chunks, nsplits=nsplits
)
r = cls._norm(x.astype(op.outputs[0].dtype), ord, axis, keepdims)
recursive_tile(r)
new_op = op.copy()
return new_op.new_tensors(
op.inputs, op.outputs[0].shape, chunks=r.chunks, nsplits=r.nsplits
)
|
https://github.com/mars-project/mars/issues/1301
|
In [2]: import mars.tensor as mt
In [3]: t = mt.random.rand(10, 10, chunk_size=5)
In [4]: mt.linalg.norm(t).execute()
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-4-900a2d2bec75> in <module>
----> 1 mt.linalg.norm(t).execute()
~/Workspace/mars/mars/core.py in execute(self, session, **kw)
559
560 def execute(self, session=None, **kw):
--> 561 self._data.execute(session, **kw)
562 return self
563
~/Workspace/mars/mars/core.py in execute(self, session, **kw)
368
369 # no more fetch, thus just fire run
--> 370 session.run(self, **kw)
371 # return Tileable or ExecutableTuple itself
372 return self
~/Workspace/mars/mars/session.py in run(self, *tileables, **kw)
404 tileables = tuple(mt.tensor(t) if not isinstance(t, (Entity, Base)) else t
405 for t in tileables)
--> 406 result = self._sess.run(*tileables, **kw)
407
408 for t in tileables:
~/Workspace/mars/mars/session.py in run(self, *tileables, **kw)
102 # set number of running cores
103 self.context.set_ncores(kw['n_parallel'])
--> 104 res = self._executor.execute_tileables(tileables, **kw)
105 return res
106
~/Workspace/mars/mars/utils.py in _wrapped(*args, **kwargs)
387 _kernel_mode.eager = False
388 _kernel_mode.eager_count = enter_eager_count + 1
--> 389 return func(*args, **kwargs)
390 finally:
391 _kernel_mode.eager_count -= 1
~/Workspace/mars/mars/utils.py in inner(*args, **kwargs)
481 def inner(*args, **kwargs):
482 with build_mode():
--> 483 return func(*args, **kwargs)
484 return inner
485
~/Workspace/mars/mars/executor.py in execute_tileables(self, tileables, fetch, n_parallel, n_thread, print_progress, mock, compose)
825 # build chunk graph, tile will be done during building
826 chunk_graph = chunk_graph_builder.build(
--> 827 tileables, tileable_graph=tileable_graph)
828 tileable_graph = chunk_graph_builder.prev_tileable_graph
829 temp_result_keys = set(result_keys)
~/Workspace/mars/mars/utils.py in _wrapped(*args, **kwargs)
387 _kernel_mode.eager = False
388 _kernel_mode.eager_count = enter_eager_count + 1
--> 389 return func(*args, **kwargs)
390 finally:
391 _kernel_mode.eager_count -= 1
~/Workspace/mars/mars/utils.py in inner(*args, **kwargs)
481 def inner(*args, **kwargs):
482 with build_mode():
--> 483 return func(*args, **kwargs)
484 return inner
485
~/Workspace/mars/mars/tiles.py in build(self, tileables, tileable_graph)
340
341 chunk_graph = super().build(
--> 342 tileables, tileable_graph=tileable_graph)
343 self._iterative_chunk_graphs.append(chunk_graph)
344 if len(self._interrupted_ops) == 0:
~/Workspace/mars/mars/utils.py in _wrapped(*args, **kwargs)
387 _kernel_mode.eager = False
388 _kernel_mode.eager_count = enter_eager_count + 1
--> 389 return func(*args, **kwargs)
390 finally:
391 _kernel_mode.eager_count -= 1
~/Workspace/mars/mars/utils.py in inner(*args, **kwargs)
481 def inner(*args, **kwargs):
482 with build_mode():
--> 483 return func(*args, **kwargs)
484 return inner
485
~/Workspace/mars/mars/tiles.py in build(self, tileables, tileable_graph)
253 # for further execution
254 partial_tiled_chunks = \
--> 255 self._on_tile_failure(tileable_data.op, exc_info)
256 if partial_tiled_chunks is not None and \
257 len(partial_tiled_chunks) > 0:
~/Workspace/mars/mars/tiles.py in inner(op, exc_info)
292 on_tile_failure(op, exc_info)
293 else:
--> 294 raise exc_info[1].with_traceback(exc_info[2]) from None
295 return inner
296
~/Workspace/mars/mars/tiles.py in build(self, tileables, tileable_graph)
233 continue
234 try:
--> 235 tiled = self._tile(tileable_data, tileable_graph)
236 tiled_op.add(tileable_data.op)
237 for t, td in zip(tileable_data.op.outputs, tiled):
~/Workspace/mars/mars/tiles.py in _tile(self, tileable_data, tileable_graph)
328 if any(inp.op in self._interrupted_ops for inp in tileable_data.inputs):
329 raise TilesError('Tile fail due to failure of inputs')
--> 330 return super()._tile(tileable_data, tileable_graph)
331
332 @kernel_mode
~/Workspace/mars/mars/tiles.py in _tile(self, tileable_data, tileable_graph)
191 t._nsplits = o.nsplits
192 elif on_tile is None:
--> 193 tds[0]._inplace_tile()
194 else:
195 tds = on_tile(tileable_data.op.outputs, tds)
~/Workspace/mars/mars/core.py in _inplace_tile(self)
160
161 def _inplace_tile(self):
--> 162 return handler.inplace_tile(self)
163
164 def __getattr__(self, attr):
~/Workspace/mars/mars/tiles.py in inplace_tile(self, to_tile)
126 if not to_tile.is_coarse():
127 return to_tile
--> 128 dispatched = self.dispatch(to_tile.op)
129 self._assign_to([d.data for d in dispatched], to_tile.op.outputs)
130 return to_tile
~/Workspace/mars/mars/utils.py in _wrapped(*args, **kwargs)
387 _kernel_mode.eager = False
388 _kernel_mode.eager_count = enter_eager_count + 1
--> 389 return func(*args, **kwargs)
390 finally:
391 _kernel_mode.eager_count -= 1
~/Workspace/mars/mars/tiles.py in dispatch(self, op)
113 return self._handlers[op_cls](op)
114 try:
--> 115 return op_cls.tile(op)
116 except NotImplementedError as ex:
117 cause = ex
~/Workspace/mars/mars/tensor/linalg/norm.py in tile(cls, op)
96 return new_op.new_tensors(op.inputs, op.outputs[0].shape, chunks=out_chunks, nsplits=nsplits)
97
---> 98 r = cls._norm(x.astype(op.outputs[0].dtype), ord, axis, keepdims)
99 recursive_tile(r)
100 new_op = op.copy()
~/Workspace/mars/mars/tensor/base/astype.py in _astype(tensor, dtype, order, casting, copy)
154
155 if tensor.dtype == dtype and tensor.order == tensor_order:
--> 156 return tensor if not copy else tensor.copy(order=order)
157 elif not np.can_cast(tensor.dtype, dtype, casting=casting):
158 raise TypeError('Cannot cast array from {0!r} to {1!r} '
TypeError: copy() got an unexpected keyword argument 'order'
|
TypeError
|
def start(self, event=None, block=False):
self._configure_loop()
self._try_start_web_server()
if not block:
self._server_thread = threading.Thread(target=self._server.io_loop.start)
self._server_thread.daemon = True
self._server_thread.start()
if event:
event.set()
else:
if event:
event.set()
try:
self._server.io_loop.start()
except KeyboardInterrupt:
pass
|
def start(self, event=None, block=False):
self._configure_loop()
self._try_start_web_server()
if not block:
self._server_thread = threading.Thread(target=self._server.io_loop.start)
self._server_thread.daemon = True
self._server_thread.start()
if event:
event.set()
else:
if event:
event.set()
self._server.io_loop.start()
|
https://github.com/mars-project/mars/issues/1270
|
In [1]: import mars.remote as mr
In [2]: from mars.deploy.local import new_cluster
In [3]: c = new_cluster()
WARNING: Logging before InitGoogleLogging() is written to STDERR
I0604 14:30:01.529353 132840896 store.cc:1149] Allowing the Plasma store to use up to 3.43597GB of memory.
I0604 14:30:01.534931 132840896 store.cc:1176] Starting object store with directory /tmp and huge page support disabled
In [4]: def f(x):
...: return
...:
In [5]: mr.spawn(f, 3).execute()
Unhandled exception in promise
Traceback (most recent call last):
File "/Users/qinxuye/Workspace/mars/mars/worker/storage/client.py", line 299, in copy_to
return promise.finished(*build_exc_info(KeyError, err_msg), _accept=False)
KeyError: 'Data key (6f8edc26-0267-44a0-a64d-e939bac89fbe, d9f9097c4ccb8ca4716366cf6680fff2) not exist, proc_id=9'
Unhandled exception in promise
Traceback (most recent call last):
File "/Users/qinxuye/Workspace/mars/mars/worker/storage/client.py", line 299, in copy_to
return promise.finished(*build_exc_info(KeyError, err_msg), _accept=False)
KeyError: 'Data key (6f8edc26-0267-44a0-a64d-e939bac89fbe, d9f9097c4ccb8ca4716366cf6680fff2) not exist, proc_id=9'
Unexpected error occurred in executing graph e0879bcab1971118787b182578bff091
Traceback (most recent call last):
File "/Users/qinxuye/Workspace/mars/mars/worker/storage/client.py", line 299, in copy_to
return promise.finished(*build_exc_info(KeyError, err_msg), _accept=False)
KeyError: 'Data key (6f8edc26-0267-44a0-a64d-e939bac89fbe, d9f9097c4ccb8ca4716366cf6680fff2) not exist, proc_id=9'
Attempt 1: Unexpected error KeyError occurred in executing operand e0879bcab1971118787b182578bff091 in 0.0.0.0:37928
Traceback (most recent call last):
File "/Users/qinxuye/Workspace/mars/mars/worker/storage/client.py", line 299, in copy_to
return promise.finished(*build_exc_info(KeyError, err_msg), _accept=False)
KeyError: 'Data key (6f8edc26-0267-44a0-a64d-e939bac89fbe, d9f9097c4ccb8ca4716366cf6680fff2) not exist, proc_id=9'
Unhandled exception in promise
Traceback (most recent call last):
File "/Users/qinxuye/Workspace/mars/mars/worker/storage/client.py", line 299, in copy_to
return promise.finished(*build_exc_info(KeyError, err_msg), _accept=False)
KeyError: 'Data key (6f8edc26-0267-44a0-a64d-e939bac89fbe, d9f9097c4ccb8ca4716366cf6680fff2) not exist, proc_id=9'
Unhandled exception in promise
Traceback (most recent call last):
File "/Users/qinxuye/Workspace/mars/mars/worker/storage/client.py", line 299, in copy_to
return promise.finished(*build_exc_info(KeyError, err_msg), _accept=False)
KeyError: 'Data key (6f8edc26-0267-44a0-a64d-e939bac89fbe, d9f9097c4ccb8ca4716366cf6680fff2) not exist, proc_id=9'
Unexpected error occurred in executing graph e0879bcab1971118787b182578bff091
Traceback (most recent call last):
File "/Users/qinxuye/Workspace/mars/mars/worker/storage/client.py", line 299, in copy_to
return promise.finished(*build_exc_info(KeyError, err_msg), _accept=False)
KeyError: 'Data key (6f8edc26-0267-44a0-a64d-e939bac89fbe, d9f9097c4ccb8ca4716366cf6680fff2) not exist, proc_id=9'
Attempt 2: Unexpected error KeyError occurred in executing operand e0879bcab1971118787b182578bff091 in 0.0.0.0:37928
Traceback (most recent call last):
File "/Users/qinxuye/Workspace/mars/mars/worker/storage/client.py", line 299, in copy_to
return promise.finished(*build_exc_info(KeyError, err_msg), _accept=False)
KeyError: 'Data key (6f8edc26-0267-44a0-a64d-e939bac89fbe, d9f9097c4ccb8ca4716366cf6680fff2) not exist, proc_id=9'
|
KeyError
|
def stop(self):
try:
destroy_futures = []
for actor in (
self._cpu_calc_actors
+ self._sender_actors
+ self._inproc_holder_actors
+ self._inproc_io_runner_actors
+ self._cuda_calc_actors
+ self._cuda_holder_actors
+ self._receiver_actors
+ self._spill_actors
+ self._process_helper_actors
):
if actor and actor.ctx:
destroy_futures.append(actor.destroy(wait=False))
if self._result_sender_ref:
destroy_futures.append(self._result_sender_ref.destroy(wait=False))
if self._status_ref:
destroy_futures.append(self._status_ref.destroy(wait=False))
if self._shared_holder_ref:
destroy_futures.append(self._shared_holder_ref.destroy(wait=False))
if self._storage_manager_ref:
destroy_futures.append(self._storage_manager_ref.destroy(wait=False))
if self._events_ref:
destroy_futures.append(self._events_ref.destroy(wait=False))
if self._dispatch_ref:
destroy_futures.append(self._dispatch_ref.destroy(wait=False))
if self._execution_ref:
destroy_futures.append(self._execution_ref.destroy(wait=False))
[f.result(5) for f in destroy_futures]
finally:
self._plasma_store.__exit__(None, None, None)
|
def stop(self):
try:
for actor in (
self._cpu_calc_actors
+ self._sender_actors
+ self._inproc_holder_actors
+ self._inproc_io_runner_actors
+ self._cuda_calc_actors
+ self._cuda_holder_actors
+ self._receiver_actors
+ self._spill_actors
+ self._process_helper_actors
):
if actor and actor.ctx:
actor.destroy(wait=False)
if self._result_sender_ref:
self._result_sender_ref.destroy(wait=False)
if self._status_ref:
self._status_ref.destroy(wait=False)
if self._shared_holder_ref:
self._shared_holder_ref.destroy(wait=False)
if self._storage_manager_ref:
self._storage_manager_ref.destroy(wait=False)
if self._events_ref:
self._events_ref.destroy(wait=False)
if self._dispatch_ref:
self._dispatch_ref.destroy(wait=False)
if self._execution_ref:
self._execution_ref.destroy(wait=False)
finally:
self._plasma_store.__exit__(None, None, None)
|
https://github.com/mars-project/mars/issues/1270
|
In [1]: import mars.remote as mr
In [2]: from mars.deploy.local import new_cluster
In [3]: c = new_cluster()
WARNING: Logging before InitGoogleLogging() is written to STDERR
I0604 14:30:01.529353 132840896 store.cc:1149] Allowing the Plasma store to use up to 3.43597GB of memory.
I0604 14:30:01.534931 132840896 store.cc:1176] Starting object store with directory /tmp and huge page support disabled
In [4]: def f(x):
...: return
...:
In [5]: mr.spawn(f, 3).execute()
Unhandled exception in promise
Traceback (most recent call last):
File "/Users/qinxuye/Workspace/mars/mars/worker/storage/client.py", line 299, in copy_to
return promise.finished(*build_exc_info(KeyError, err_msg), _accept=False)
KeyError: 'Data key (6f8edc26-0267-44a0-a64d-e939bac89fbe, d9f9097c4ccb8ca4716366cf6680fff2) not exist, proc_id=9'
Unhandled exception in promise
Traceback (most recent call last):
File "/Users/qinxuye/Workspace/mars/mars/worker/storage/client.py", line 299, in copy_to
return promise.finished(*build_exc_info(KeyError, err_msg), _accept=False)
KeyError: 'Data key (6f8edc26-0267-44a0-a64d-e939bac89fbe, d9f9097c4ccb8ca4716366cf6680fff2) not exist, proc_id=9'
Unexpected error occurred in executing graph e0879bcab1971118787b182578bff091
Traceback (most recent call last):
File "/Users/qinxuye/Workspace/mars/mars/worker/storage/client.py", line 299, in copy_to
return promise.finished(*build_exc_info(KeyError, err_msg), _accept=False)
KeyError: 'Data key (6f8edc26-0267-44a0-a64d-e939bac89fbe, d9f9097c4ccb8ca4716366cf6680fff2) not exist, proc_id=9'
Attempt 1: Unexpected error KeyError occurred in executing operand e0879bcab1971118787b182578bff091 in 0.0.0.0:37928
Traceback (most recent call last):
File "/Users/qinxuye/Workspace/mars/mars/worker/storage/client.py", line 299, in copy_to
return promise.finished(*build_exc_info(KeyError, err_msg), _accept=False)
KeyError: 'Data key (6f8edc26-0267-44a0-a64d-e939bac89fbe, d9f9097c4ccb8ca4716366cf6680fff2) not exist, proc_id=9'
Unhandled exception in promise
Traceback (most recent call last):
File "/Users/qinxuye/Workspace/mars/mars/worker/storage/client.py", line 299, in copy_to
return promise.finished(*build_exc_info(KeyError, err_msg), _accept=False)
KeyError: 'Data key (6f8edc26-0267-44a0-a64d-e939bac89fbe, d9f9097c4ccb8ca4716366cf6680fff2) not exist, proc_id=9'
Unhandled exception in promise
Traceback (most recent call last):
File "/Users/qinxuye/Workspace/mars/mars/worker/storage/client.py", line 299, in copy_to
return promise.finished(*build_exc_info(KeyError, err_msg), _accept=False)
KeyError: 'Data key (6f8edc26-0267-44a0-a64d-e939bac89fbe, d9f9097c4ccb8ca4716366cf6680fff2) not exist, proc_id=9'
Unexpected error occurred in executing graph e0879bcab1971118787b182578bff091
Traceback (most recent call last):
File "/Users/qinxuye/Workspace/mars/mars/worker/storage/client.py", line 299, in copy_to
return promise.finished(*build_exc_info(KeyError, err_msg), _accept=False)
KeyError: 'Data key (6f8edc26-0267-44a0-a64d-e939bac89fbe, d9f9097c4ccb8ca4716366cf6680fff2) not exist, proc_id=9'
Attempt 2: Unexpected error KeyError occurred in executing operand e0879bcab1971118787b182578bff091 in 0.0.0.0:37928
Traceback (most recent call last):
File "/Users/qinxuye/Workspace/mars/mars/worker/storage/client.py", line 299, in copy_to
return promise.finished(*build_exc_info(KeyError, err_msg), _accept=False)
KeyError: 'Data key (6f8edc26-0267-44a0-a64d-e939bac89fbe, d9f9097c4ccb8ca4716366cf6680fff2) not exist, proc_id=9'
|
KeyError
|
def copy_to(self, session_id, data_keys, device_order, ensure=True, pin_token=None):
device_order = self._normalize_devices(device_order)
existing_devs = self._manager_ref.get_data_locations(session_id, data_keys)
data_sizes = self._manager_ref.get_data_sizes(session_id, data_keys)
device_to_keys = defaultdict(list)
device_total_size = defaultdict(lambda: 0)
lift_reqs = defaultdict(list)
for k, devices, size in zip(data_keys, existing_devs, data_sizes):
if not devices or size is None:
err_msg = "Data key (%s, %s) not exist, proc_id=%s" % (
session_id,
k,
self.proc_id,
)
return promise.finished(*build_exc_info(KeyError, err_msg), _accept=False)
target = next((d for d in device_order if d in devices), None)
if target is not None:
lift_reqs[target].append(k)
else:
max_device = max(devices)
device_to_keys[max_device].append(k)
device_total_size[max_device] += size
for target, data_keys in lift_reqs.items():
handler = self.get_storage_handler(target)
if getattr(handler, "_spillable", False):
handler.lift_data_keys(session_id, data_keys)
if not device_to_keys:
return promise.finished()
def _action(src_handler, h, keys):
return h.load_from(session_id, keys, src_handler, pin_token=pin_token)
def _handle_exc(keys, *exc):
existing = self._manager_ref.get_data_locations(session_id, keys)
for devices in existing:
if not any(d for d in device_order if d in devices):
raise exc[1].with_traceback(exc[2]) from None
promises = []
for d in device_to_keys.keys():
action = functools.partial(_action, self.get_storage_handler(d))
keys = device_to_keys[d]
total_size = device_total_size[d]
promises.append(
self._do_with_spill(
action, keys, total_size, device_order, ensure=ensure
).catch(functools.partial(_handle_exc, keys))
)
return promise.all_(promises)
|
def copy_to(self, session_id, data_keys, device_order, ensure=True, pin_token=None):
device_order = self._normalize_devices(device_order)
existing_devs = self._manager_ref.get_data_locations(session_id, data_keys)
data_sizes = self._manager_ref.get_data_sizes(session_id, data_keys)
device_to_keys = defaultdict(list)
device_total_size = defaultdict(lambda: 0)
lift_reqs = defaultdict(list)
for k, devices, size in zip(data_keys, existing_devs, data_sizes):
if not devices or not size:
err_msg = "Data key (%s, %s) not exist, proc_id=%s" % (
session_id,
k,
self.proc_id,
)
return promise.finished(*build_exc_info(KeyError, err_msg), _accept=False)
target = next((d for d in device_order if d in devices), None)
if target is not None:
lift_reqs[target].append(k)
else:
max_device = max(devices)
device_to_keys[max_device].append(k)
device_total_size[max_device] += size
for target, data_keys in lift_reqs.items():
handler = self.get_storage_handler(target)
if getattr(handler, "_spillable", False):
handler.lift_data_keys(session_id, data_keys)
if not device_to_keys:
return promise.finished()
def _action(src_handler, h, keys):
return h.load_from(session_id, keys, src_handler, pin_token=pin_token)
def _handle_exc(keys, *exc):
existing = self._manager_ref.get_data_locations(session_id, keys)
for devices in existing:
if not any(d for d in device_order if d in devices):
raise exc[1].with_traceback(exc[2]) from None
promises = []
for d in device_to_keys.keys():
action = functools.partial(_action, self.get_storage_handler(d))
keys = device_to_keys[d]
total_size = device_total_size[d]
promises.append(
self._do_with_spill(
action, keys, total_size, device_order, ensure=ensure
).catch(functools.partial(_handle_exc, keys))
)
return promise.all_(promises)
|
https://github.com/mars-project/mars/issues/1270
|
In [1]: import mars.remote as mr
In [2]: from mars.deploy.local import new_cluster
In [3]: c = new_cluster()
WARNING: Logging before InitGoogleLogging() is written to STDERR
I0604 14:30:01.529353 132840896 store.cc:1149] Allowing the Plasma store to use up to 3.43597GB of memory.
I0604 14:30:01.534931 132840896 store.cc:1176] Starting object store with directory /tmp and huge page support disabled
In [4]: def f(x):
...: return
...:
In [5]: mr.spawn(f, 3).execute()
Unhandled exception in promise
Traceback (most recent call last):
File "/Users/qinxuye/Workspace/mars/mars/worker/storage/client.py", line 299, in copy_to
return promise.finished(*build_exc_info(KeyError, err_msg), _accept=False)
KeyError: 'Data key (6f8edc26-0267-44a0-a64d-e939bac89fbe, d9f9097c4ccb8ca4716366cf6680fff2) not exist, proc_id=9'
Unhandled exception in promise
Traceback (most recent call last):
File "/Users/qinxuye/Workspace/mars/mars/worker/storage/client.py", line 299, in copy_to
return promise.finished(*build_exc_info(KeyError, err_msg), _accept=False)
KeyError: 'Data key (6f8edc26-0267-44a0-a64d-e939bac89fbe, d9f9097c4ccb8ca4716366cf6680fff2) not exist, proc_id=9'
Unexpected error occurred in executing graph e0879bcab1971118787b182578bff091
Traceback (most recent call last):
File "/Users/qinxuye/Workspace/mars/mars/worker/storage/client.py", line 299, in copy_to
return promise.finished(*build_exc_info(KeyError, err_msg), _accept=False)
KeyError: 'Data key (6f8edc26-0267-44a0-a64d-e939bac89fbe, d9f9097c4ccb8ca4716366cf6680fff2) not exist, proc_id=9'
Attempt 1: Unexpected error KeyError occurred in executing operand e0879bcab1971118787b182578bff091 in 0.0.0.0:37928
Traceback (most recent call last):
File "/Users/qinxuye/Workspace/mars/mars/worker/storage/client.py", line 299, in copy_to
return promise.finished(*build_exc_info(KeyError, err_msg), _accept=False)
KeyError: 'Data key (6f8edc26-0267-44a0-a64d-e939bac89fbe, d9f9097c4ccb8ca4716366cf6680fff2) not exist, proc_id=9'
Unhandled exception in promise
Traceback (most recent call last):
File "/Users/qinxuye/Workspace/mars/mars/worker/storage/client.py", line 299, in copy_to
return promise.finished(*build_exc_info(KeyError, err_msg), _accept=False)
KeyError: 'Data key (6f8edc26-0267-44a0-a64d-e939bac89fbe, d9f9097c4ccb8ca4716366cf6680fff2) not exist, proc_id=9'
Unhandled exception in promise
Traceback (most recent call last):
File "/Users/qinxuye/Workspace/mars/mars/worker/storage/client.py", line 299, in copy_to
return promise.finished(*build_exc_info(KeyError, err_msg), _accept=False)
KeyError: 'Data key (6f8edc26-0267-44a0-a64d-e939bac89fbe, d9f9097c4ccb8ca4716366cf6680fff2) not exist, proc_id=9'
Unexpected error occurred in executing graph e0879bcab1971118787b182578bff091
Traceback (most recent call last):
File "/Users/qinxuye/Workspace/mars/mars/worker/storage/client.py", line 299, in copy_to
return promise.finished(*build_exc_info(KeyError, err_msg), _accept=False)
KeyError: 'Data key (6f8edc26-0267-44a0-a64d-e939bac89fbe, d9f9097c4ccb8ca4716366cf6680fff2) not exist, proc_id=9'
Attempt 2: Unexpected error KeyError occurred in executing operand e0879bcab1971118787b182578bff091 in 0.0.0.0:37928
Traceback (most recent call last):
File "/Users/qinxuye/Workspace/mars/mars/worker/storage/client.py", line 299, in copy_to
return promise.finished(*build_exc_info(KeyError, err_msg), _accept=False)
KeyError: 'Data key (6f8edc26-0267-44a0-a64d-e939bac89fbe, d9f9097c4ccb8ca4716366cf6680fff2) not exist, proc_id=9'
|
KeyError
|
def build_fetch_graph(self, tileable_key):
"""
Convert single tileable node to tiled fetch tileable node and
put into a graph which only contains one tileable node
:param tileable_key: the key of tileable node
"""
tileable = self._get_tileable_by_key(tileable_key)
graph = DAG()
new_tileable = build_fetch_tileable(tileable).data
graph.add_node(new_tileable)
return serialize_graph(graph)
|
def build_fetch_graph(self, tileable_key):
"""
Convert single tileable node to tiled fetch tileable node and
put into a graph which only contains one tileable node
:param tileable_key: the key of tileable node
"""
tileable = self._get_tileable_by_key(tileable_key)
graph = DAG()
new_tileable = build_fetch_tileable(tileable)
graph.add_node(new_tileable)
return serialize_graph(graph)
|
https://github.com/mars-project/mars/issues/1260
|
import numpy as np
import mars.tensor as mt
from mars.learn.neighbors import NearestNeighbors
from mars.deploy.local import new_cluster
with new_cluster(scheduler_n_process=2, worker_n_process=2, shared_memory='20M', web=False) as cluster:
rs = np.random.RandomState(0)
raw_X = rs.rand(10, 5)
raw_Y = rs.rand(8, 5)
X = mt.tensor(raw_X, chunk_size=7)
Y = mt.tensor(raw_Y, chunk_size=(5, 3))
nn = NearestNeighbors(n_neighbors=3)
nn.fit(X)
-----------------------------------------------------------------------------------------
Traceback (most recent call last):
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 339, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/scheduler/graph.py", line 436, in get_chunk_graph
return self._chunk_graph_builder.iterative_chunk_graphs[-1]
IndexError: list index out of range
Unexpected exception occurred in GraphActor.stop_graph.
Traceback (most recent call last):
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/scheduler/graph.py", line 371, in _execute_graph
self.prepare_graph(compose=compose)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 339, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 473, in inner
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/scheduler/graph.py", line 600, in prepare_graph
self._target_tileable_datas + fetch_tileables, tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 385, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 473, in inner
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 342, in build
tileables, tileable_graph=tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 385, in _wrapped
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 473, in inner
return func(*args, **kwargs)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 255, in build
self._on_tile_failure(tileable_data.op, exc_info)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 294, in inner
raise exc_info[1].with_traceback(exc_info[2]) from None
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 235, in build
tiled = self._tile(tileable_data, tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 330, in _tile
return super()._tile(tileable_data, tileable_graph)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/tiles.py", line 195, in _tile
tds = on_tile(tileable_data.op.outputs, tds)
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/scheduler/graph.py", line 581, in on_tile
return [self.tile_fetch_tileable(first)]
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/scheduler/graph.py", line 1139, in tile_fetch_tileable
fetch_graph = deserialize_graph(graph_ref.build_fetch_graph(tileable_key))
File "/Users/hekaisheng/Documents/mars_dev/mars/mars/utils.py", line 272, in deserialize_graph
return graph_cls.from_pb(g)
File "mars/graph.pyx", line 440, in mars.graph.DirectedGraph.from_pb
File "mars/graph.pyx", line 437, in mars.graph.DirectedGraph.from_pb
File "mars/serialize/core.pyx", line 683, in mars.serialize.core.Serializable.from_pb
File "mars/serialize/core.pyx", line 669, in mars.serialize.core.Serializable.deserialize
File "mars/serialize/pbserializer.pyx", line 876, in mars.serialize.pbserializer.ProtobufSerializeProvider.deserialize_field.cb
KeyError: ('9a6fa72cf124c991c7f75a489b4c1b13', '5299152656')
|
IndexError
|
def _start_cluster(endpoint, event, n_process=None, shared_memory=None, **kw):
modules = kw.pop("modules", None) or []
for m in modules:
__import__(m, globals(), locals(), [])
cluster = LocalDistributedCluster(
endpoint, n_process=n_process, shared_memory=shared_memory, **kw
)
cluster.start_service()
event.set()
try:
cluster.serve_forever()
finally:
cluster.stop_service()
|
def _start_cluster(endpoint, event, n_process=None, shared_memory=None, **kw):
cluster = LocalDistributedCluster(
endpoint, n_process=n_process, shared_memory=shared_memory, **kw
)
cluster.start_service()
event.set()
try:
cluster.serve_forever()
finally:
cluster.stop_service()
|
https://github.com/mars-project/mars/issues/1231
|
Traceback (most recent call last):
File "/Users/wenjun.swj/miniconda3/lib/python3.7/multiprocessing/process.py", line 297, in _bootstrap
self.run()
File "/Users/wenjun.swj/miniconda3/lib/python3.7/multiprocessing/process.py", line 99, in run
self._target(*self._args, **self._kwargs)
File "mars/lib/gipc.pyx", line 419, in mars.lib.gipc._child
target(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/deploy/local/core.py", line 211, in _start_web
web.start(event=event, block=True)
File "/Users/wenjun.swj/Code/mars/mars/web/server.py", line 234, in start
self._try_start_web_server()
File "/Users/wenjun.swj/Code/mars/mars/web/server.py", line 219, in _try_start_web_server
http_server_kwargs={'max_buffer_size': 2 ** 32},
File "/Users/wenjun.swj/miniconda3/lib/python3.7/site-packages/bokeh/server/server.py", line 400, in __init__
http_server.add_sockets(sockets)
File "/Users/wenjun.swj/miniconda3/lib/python3.7/site-packages/tornado/tcpserver.py", line 166, in add_sockets
sock, self._handle_connection
File "/Users/wenjun.swj/miniconda3/lib/python3.7/site-packages/tornado/netutil.py", line 279, in add_accept_handler
io_loop.add_handler(sock, accept_handler, IOLoop.READ)
File "/Users/wenjun.swj/miniconda3/lib/python3.7/site-packages/tornado/platform/asyncio.py", line 100, in add_handler
self.asyncio_loop.add_reader(fd, self._handle_events, fd, IOLoop.READ)
File "/Users/wenjun.swj/miniconda3/lib/python3.7/asyncio/selector_events.py", line 329, in add_reader
return self._add_reader(fd, callback, *args)
File "/Users/wenjun.swj/miniconda3/lib/python3.7/asyncio/selector_events.py", line 259, in _add_reader
(handle, None))
File "/Users/wenjun.swj/miniconda3/lib/python3.7/selectors.py", line 520, in register
kev = select.kevent(key.fd, select.KQ_FILTER_READ,
AttributeError: module 'select' has no attribute 'kevent'
|
AttributeError
|
def _start_cluster_process(endpoint, n_process, shared_memory, **kw):
event = _mp_spawn_context.Event()
kw = kw.copy()
kw["n_process"] = n_process
kw["shared_memory"] = shared_memory or "20%"
process = _mp_spawn_context.Process(
target=_start_cluster, args=(endpoint, event), kwargs=kw
)
process.start()
while True:
event.wait(5)
if not event.is_set():
# service not started yet
continue
if not process.is_alive():
raise SystemError("New local cluster failed")
else:
break
return process
|
def _start_cluster_process(endpoint, n_process, shared_memory, **kw):
event = multiprocessing.Event()
kw = kw.copy()
kw["n_process"] = n_process
kw["shared_memory"] = shared_memory or "20%"
process = gipc.start_process(_start_cluster, args=(endpoint, event), kwargs=kw)
while True:
event.wait(5)
if not event.is_set():
# service not started yet
continue
if not process.is_alive():
raise SystemError("New local cluster failed")
else:
break
return process
|
https://github.com/mars-project/mars/issues/1231
|
Traceback (most recent call last):
File "/Users/wenjun.swj/miniconda3/lib/python3.7/multiprocessing/process.py", line 297, in _bootstrap
self.run()
File "/Users/wenjun.swj/miniconda3/lib/python3.7/multiprocessing/process.py", line 99, in run
self._target(*self._args, **self._kwargs)
File "mars/lib/gipc.pyx", line 419, in mars.lib.gipc._child
target(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/deploy/local/core.py", line 211, in _start_web
web.start(event=event, block=True)
File "/Users/wenjun.swj/Code/mars/mars/web/server.py", line 234, in start
self._try_start_web_server()
File "/Users/wenjun.swj/Code/mars/mars/web/server.py", line 219, in _try_start_web_server
http_server_kwargs={'max_buffer_size': 2 ** 32},
File "/Users/wenjun.swj/miniconda3/lib/python3.7/site-packages/bokeh/server/server.py", line 400, in __init__
http_server.add_sockets(sockets)
File "/Users/wenjun.swj/miniconda3/lib/python3.7/site-packages/tornado/tcpserver.py", line 166, in add_sockets
sock, self._handle_connection
File "/Users/wenjun.swj/miniconda3/lib/python3.7/site-packages/tornado/netutil.py", line 279, in add_accept_handler
io_loop.add_handler(sock, accept_handler, IOLoop.READ)
File "/Users/wenjun.swj/miniconda3/lib/python3.7/site-packages/tornado/platform/asyncio.py", line 100, in add_handler
self.asyncio_loop.add_reader(fd, self._handle_events, fd, IOLoop.READ)
File "/Users/wenjun.swj/miniconda3/lib/python3.7/asyncio/selector_events.py", line 329, in add_reader
return self._add_reader(fd, callback, *args)
File "/Users/wenjun.swj/miniconda3/lib/python3.7/asyncio/selector_events.py", line 259, in _add_reader
(handle, None))
File "/Users/wenjun.swj/miniconda3/lib/python3.7/selectors.py", line 520, in register
kev = select.kevent(key.fd, select.KQ_FILTER_READ,
AttributeError: module 'select' has no attribute 'kevent'
|
AttributeError
|
def _start_web_process(scheduler_endpoint, web_endpoint):
ui_port = int(web_endpoint.rsplit(":", 1)[1])
web_event = _mp_spawn_context.Event()
web_process = _mp_spawn_context.Process(
target=_start_web, args=(scheduler_endpoint, ui_port, web_event), daemon=True
)
web_process.start()
while True:
web_event.wait(5)
if not web_event.is_set():
# web not started yet
continue
if not web_process.is_alive():
raise SystemError("New web interface failed")
else:
break
return web_process
|
def _start_web_process(scheduler_endpoint, web_endpoint):
web_event = multiprocessing.Event()
ui_port = int(web_endpoint.rsplit(":", 1)[1])
web_process = gipc.start_process(
_start_web, args=(scheduler_endpoint, ui_port, web_event), daemon=True
)
while True:
web_event.wait(5)
if not web_event.is_set():
# web not started yet
continue
if not web_process.is_alive():
raise SystemError("New web interface failed")
else:
break
return web_process
|
https://github.com/mars-project/mars/issues/1231
|
Traceback (most recent call last):
File "/Users/wenjun.swj/miniconda3/lib/python3.7/multiprocessing/process.py", line 297, in _bootstrap
self.run()
File "/Users/wenjun.swj/miniconda3/lib/python3.7/multiprocessing/process.py", line 99, in run
self._target(*self._args, **self._kwargs)
File "mars/lib/gipc.pyx", line 419, in mars.lib.gipc._child
target(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/deploy/local/core.py", line 211, in _start_web
web.start(event=event, block=True)
File "/Users/wenjun.swj/Code/mars/mars/web/server.py", line 234, in start
self._try_start_web_server()
File "/Users/wenjun.swj/Code/mars/mars/web/server.py", line 219, in _try_start_web_server
http_server_kwargs={'max_buffer_size': 2 ** 32},
File "/Users/wenjun.swj/miniconda3/lib/python3.7/site-packages/bokeh/server/server.py", line 400, in __init__
http_server.add_sockets(sockets)
File "/Users/wenjun.swj/miniconda3/lib/python3.7/site-packages/tornado/tcpserver.py", line 166, in add_sockets
sock, self._handle_connection
File "/Users/wenjun.swj/miniconda3/lib/python3.7/site-packages/tornado/netutil.py", line 279, in add_accept_handler
io_loop.add_handler(sock, accept_handler, IOLoop.READ)
File "/Users/wenjun.swj/miniconda3/lib/python3.7/site-packages/tornado/platform/asyncio.py", line 100, in add_handler
self.asyncio_loop.add_reader(fd, self._handle_events, fd, IOLoop.READ)
File "/Users/wenjun.swj/miniconda3/lib/python3.7/asyncio/selector_events.py", line 329, in add_reader
return self._add_reader(fd, callback, *args)
File "/Users/wenjun.swj/miniconda3/lib/python3.7/asyncio/selector_events.py", line 259, in _add_reader
(handle, None))
File "/Users/wenjun.swj/miniconda3/lib/python3.7/selectors.py", line 520, in register
kev = select.kevent(key.fd, select.KQ_FILTER_READ,
AttributeError: module 'select' has no attribute 'kevent'
|
AttributeError
|
def deserialize_graph(ser_graph, graph_cls=None):
from google.protobuf.message import DecodeError
from .serialize.protos.graph_pb2 import GraphDef
from .graph import DirectedGraph
graph_cls = graph_cls or DirectedGraph
ser_graph_bin = to_binary(ser_graph)
g = GraphDef()
try:
ser_graph = ser_graph
g.ParseFromString(ser_graph_bin)
return graph_cls.from_pb(g)
except DecodeError:
pass
try:
ser_graph_bin = zlib.decompress(ser_graph_bin)
g.ParseFromString(ser_graph_bin)
return graph_cls.from_pb(g)
except (zlib.error, DecodeError):
pass
json_obj = json.loads(to_str(ser_graph))
return graph_cls.from_json(json_obj)
|
def deserialize_graph(ser_graph, graph_cls=None):
from google.protobuf.message import DecodeError
from .serialize.protos.graph_pb2 import GraphDef
from .graph import DirectedGraph
graph_cls = graph_cls or DirectedGraph
ser_graph_bin = to_binary(ser_graph)
g = GraphDef()
try:
ser_graph = ser_graph
g.ParseFromString(ser_graph_bin)
return graph_cls.from_pb(g)
except DecodeError:
pass
try:
ser_graph_bin = zlib.decompress(ser_graph_bin)
g.ParseFromString(ser_graph_bin)
return graph_cls.from_pb(g)
except (zlib.error, DecodeError):
json_obj = json.loads(to_str(ser_graph))
return graph_cls.from_json(json_obj)
|
https://github.com/mars-project/mars/issues/1231
|
Traceback (most recent call last):
File "/Users/wenjun.swj/miniconda3/lib/python3.7/multiprocessing/process.py", line 297, in _bootstrap
self.run()
File "/Users/wenjun.swj/miniconda3/lib/python3.7/multiprocessing/process.py", line 99, in run
self._target(*self._args, **self._kwargs)
File "mars/lib/gipc.pyx", line 419, in mars.lib.gipc._child
target(*args, **kwargs)
File "/Users/wenjun.swj/Code/mars/mars/deploy/local/core.py", line 211, in _start_web
web.start(event=event, block=True)
File "/Users/wenjun.swj/Code/mars/mars/web/server.py", line 234, in start
self._try_start_web_server()
File "/Users/wenjun.swj/Code/mars/mars/web/server.py", line 219, in _try_start_web_server
http_server_kwargs={'max_buffer_size': 2 ** 32},
File "/Users/wenjun.swj/miniconda3/lib/python3.7/site-packages/bokeh/server/server.py", line 400, in __init__
http_server.add_sockets(sockets)
File "/Users/wenjun.swj/miniconda3/lib/python3.7/site-packages/tornado/tcpserver.py", line 166, in add_sockets
sock, self._handle_connection
File "/Users/wenjun.swj/miniconda3/lib/python3.7/site-packages/tornado/netutil.py", line 279, in add_accept_handler
io_loop.add_handler(sock, accept_handler, IOLoop.READ)
File "/Users/wenjun.swj/miniconda3/lib/python3.7/site-packages/tornado/platform/asyncio.py", line 100, in add_handler
self.asyncio_loop.add_reader(fd, self._handle_events, fd, IOLoop.READ)
File "/Users/wenjun.swj/miniconda3/lib/python3.7/asyncio/selector_events.py", line 329, in add_reader
return self._add_reader(fd, callback, *args)
File "/Users/wenjun.swj/miniconda3/lib/python3.7/asyncio/selector_events.py", line 259, in _add_reader
(handle, None))
File "/Users/wenjun.swj/miniconda3/lib/python3.7/selectors.py", line 520, in register
kev = select.kevent(key.fd, select.KQ_FILTER_READ,
AttributeError: module 'select' has no attribute 'kevent'
|
AttributeError
|
def _calc_chunk_params(
cls, in_chunk, axes, output, output_type, chunk_op, no_shuffle: bool
):
params = {"index": in_chunk.index}
if output_type == OutputType.tensor:
chunk_shape = list(in_chunk.shape)
for ax in axes:
if not no_shuffle:
chunk_shape[ax] = np.nan
params["shape"] = tuple(chunk_shape)
params["dtype"] = in_chunk.dtype
params["order"] = output.order
elif output_type == OutputType.dataframe:
chunk_shape = list(in_chunk.shape)
if 0 in axes:
if not no_shuffle:
chunk_shape[0] = np.nan
params["shape"] = tuple(chunk_shape)
params["dtypes"] = output.dtypes
params["columns_value"] = output.columns_value
params["index_value"] = _shuffle_index_value(chunk_op, in_chunk.index_value)
else:
assert output_type == OutputType.series
if no_shuffle:
params["shape"] = in_chunk.shape
else:
params["shape"] = (np.nan,)
params["name"] = in_chunk.name
params["index_value"] = _shuffle_index_value(chunk_op, in_chunk.index_value)
params["dtype"] = in_chunk.dtype
return params
|
def _calc_chunk_params(cls, in_chunk, axes, output, output_type, chunk_op):
params = {"index": in_chunk.index}
if output_type == OutputType.tensor:
chunk_shape = list(in_chunk.shape)
for ax in axes:
chunk_shape[ax] = np.nan
params["shape"] = tuple(chunk_shape)
params["dtype"] = in_chunk.dtype
params["order"] = output.order
elif output_type == OutputType.dataframe:
chunk_shape = list(in_chunk.shape)
if 0 in axes:
chunk_shape[0] = np.nan
params["shape"] = tuple(chunk_shape)
params["dtypes"] = output.dtypes
params["columns_value"] = output.columns_value
params["index_value"] = _shuffle_index_value(chunk_op, in_chunk.index_value)
else:
assert output_type == OutputType.series
params["shape"] = (np.nan,)
params["name"] = in_chunk.name
params["index_value"] = _shuffle_index_value(chunk_op, in_chunk.index_value)
params["dtype"] = in_chunk.dtype
return params
|
https://github.com/mars-project/mars/issues/1184
|
In [14]: from mars.learn.utils import shuffle
In [15]: X, y = shuffle(X, y)
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
~/Workspace/mars/mars/serialize/jsonserializer.pyx in mars.serialize.jsonserializer.JsonSerializeProvider.serialize_field()
~/Workspace/mars/mars/serialize/jsonserializer.pyx in mars.serialize.jsonserializer.JsonSerializeProvider._serialize_value()
~/Workspace/mars/mars/serialize/jsonserializer.pyx in mars.serialize.jsonserializer.JsonSerializeProvider._serialize_typed_value()
TypeError: Expected tuple, got numpy.ndarray
The above exception was the direct cause of the following exception:
TypeError Traceback (most recent call last)
<ipython-input-15-0c15ee335b21> in <module>
----> 1 X, y = shuffle(X, y)
~/Workspace/mars/mars/learn/utils/shuffle.py in shuffle(*arrays, **options)
413 op = LearnShuffle(axes=axes, seeds=seeds,
414 output_types=get_output_types(*arrays))
--> 415 shuffled_arrays = op(arrays)
416 if len(arrays) == 1:
417 return shuffled_arrays[0]
~/Workspace/mars/mars/learn/utils/shuffle.py in __call__(self, arrays)
92 def __call__(self, arrays):
93 params = self._calc_params([ar.params for ar in arrays])
---> 94 return self.new_tileables(arrays, kws=params)
95
96 def _shuffle_index_value(self, index_value):
~/Workspace/mars/mars/operands.py in new_tileables(self, inputs, kws, **kw)
351 tileables = self._new_tileables(inputs, kws=kws, **kw)
352 if is_eager_mode():
--> 353 ExecutableTuple(tileables).execute(fetch=False)
354 return tileables
355
~/Workspace/mars/mars/core.py in execute(self, session, **kw)
626 if session is None:
627 session = Session.default_or_local()
--> 628 return session.run(self, **kw)
629
630 def fetch(self, session=None, **kw):
~/Workspace/mars/mars/session.py in run(self, *tileables, **kw)
184 tileables = tuple(mt.tensor(t) if not isinstance(t, (Entity, Base)) else t
185 for t in tileables)
--> 186 result = self._sess.run(*tileables, **kw)
187
188 for t in tileables:
~/Workspace/mars/mars/deploy/local/session.py in run(self, *tileables, **kw)
124
125 # submit graph to local cluster
--> 126 self._api.submit_graph(self._session_id, json.dumps(graph.to_json(), separators=(',', ':')),
127 graph_key, targets, compose=compose)
128
~/Workspace/mars/mars/graph.pyx in mars.graph.DirectedGraph.to_json()
~/Workspace/mars/mars/serialize/core.pyx in mars.serialize.core.Serializable.to_json()
~/Workspace/mars/mars/serialize/core.pyx in mars.serialize.core.Serializable.serialize()
~/Workspace/mars/mars/serialize/core.pyx in mars.serialize.core.Provider.serialize_model()
~/Workspace/mars/mars/serialize/core.pyx in mars.serialize.core.Field.serialize()
~/Workspace/mars/mars/serialize/core.pyx in mars.serialize.core.Field.serialize()
~/Workspace/mars/mars/serialize/jsonserializer.pyx in mars.serialize.jsonserializer.JsonSerializeProvider.serialize_field()
~/Workspace/mars/mars/serialize/jsonserializer.pyx in mars.serialize.jsonserializer.JsonSerializeProvider._serialize_reference()
~/Workspace/mars/mars/serialize/core.pyx in mars.serialize.core.AttributeAsDict.serialize()
~/Workspace/mars/mars/serialize/core.pyx in mars.serialize.core.Provider.serialize_attribute_as_dict()
~/Workspace/mars/mars/serialize/core.pyx in mars.serialize.core.Provider.serialize_model()
~/Workspace/mars/mars/serialize/core.pyx in mars.serialize.core.Field.serialize()
~/Workspace/mars/mars/serialize/core.pyx in mars.serialize.core.Field.serialize()
~/Workspace/mars/mars/serialize/jsonserializer.pyx in mars.serialize.jsonserializer.JsonSerializeProvider.serialize_field()
~/Workspace/mars/mars/serialize/jsonserializer.pyx in mars.serialize.jsonserializer.JsonSerializeProvider.serialize_field()
~/Workspace/mars/mars/serialize/jsonserializer.pyx in mars.serialize.jsonserializer.JsonSerializeProvider._serialize_value()
~/Workspace/mars/mars/serialize/jsonserializer.pyx in mars.serialize.jsonserializer.JsonSerializeProvider._serialize_typed_value()
TypeError: Fail to serialize field `seeds` for LearnShuffle <key=4901c18065398f9e19eec455538bc65a>, reason: Expected tuple, got numpy.ndarray
|
TypeError
|
def tile(cls, op):
inputs = op.inputs
check_chunks_unknown_shape(inputs, TilesError)
axis_to_nsplits = defaultdict(list)
has_dataframe = any(
output_type == OutputType.dataframe for output_type in op.output_types
)
for ax in op.axes:
if has_dataframe and ax == 1:
# if DataFrame exists, for the columns axis,
# we only allow 1 chunk to ensure the columns consistent
axis_to_nsplits[ax].append((inputs[0].shape[ax],))
continue
for inp in inputs:
if ax < inp.ndim:
axis_to_nsplits[ax].append(inp.nsplits[ax])
ax_nsplit = {ax: decide_unify_split(*ns) for ax, ns in axis_to_nsplits.items()}
inputs = [cls._safe_rechunk(inp, ax_nsplit) for inp in inputs]
mapper_seeds = [None] * len(op.axes)
reducer_seeds = [None] * len(op.axes)
for i, ax in enumerate(op.axes):
rs = np.random.RandomState(op.seeds[i])
size = len(ax_nsplit[ax])
if size > 1:
mapper_seeds[i] = gen_random_seeds(size, rs)
reducer_seeds[i] = gen_random_seeds(size, rs)
else:
mapper_seeds[i] = reducer_seeds[i] = [op.seeds[i]] * size
out_chunks = []
out_nsplits = []
for output_type, inp, oup in zip(op.output_types, inputs, op.outputs):
inp_axes = tuple(ax for ax in op.axes if ax < inp.ndim)
reduce_sizes = tuple(inp.chunk_shape[ax] for ax in inp_axes)
output_types = [output_type]
if len(inp_axes) == 0:
continue
nsplits = list(inp.nsplits)
for ax in inp_axes:
cs = len(nsplits[ax])
if cs > 1:
nsplits[ax] = (np.nan,) * cs
out_nsplits.append(tuple(nsplits))
if all(reduce_size == 1 for reduce_size in reduce_sizes):
# no need to do shuffle
chunks = []
for c in inp.chunks:
chunk_op = LearnShuffle(
axes=inp_axes,
seeds=op.seeds[: len(inp_axes)],
output_types=output_types,
)
params = cls._calc_chunk_params(
c, inp_axes, oup, output_type, chunk_op, True
)
out_chunk = chunk_op.new_chunk([c], kws=[params])
chunks.append(out_chunk)
out_chunks.append(chunks)
continue
if inp.ndim > 1:
left_chunk_shape = [
s for ax, s in enumerate(inp.chunk_shape) if ax not in inp_axes
]
idx_iter = itertools.product(*[range(s) for s in left_chunk_shape])
else:
idx_iter = [()]
reduce_chunks = []
out_chunks.append(reduce_chunks)
for idx in idx_iter:
map_chunks = []
for reducer_inds in itertools.product(*[range(s) for s in reduce_sizes]):
inp_index = list(idx)
for ax, reducer_ind in zip(inp_axes, reducer_inds):
inp_index.insert(ax, reducer_ind)
inp_index = tuple(inp_index)
in_chunk = inp.cix[inp_index]
params = in_chunk.params
map_chunk_op = LearnShuffle(
stage=OperandStage.map,
output_types=output_types,
axes=inp_axes,
seeds=tuple(
mapper_seeds[j][in_chunk.index[ax]]
for j, ax in enumerate(inp_axes)
),
reduce_sizes=reduce_sizes,
)
map_chunk = map_chunk_op.new_chunk([in_chunk], **params)
map_chunks.append(map_chunk)
proxy_chunk = LearnShuffleProxy(_tensor_keys=[inp.key]).new_chunk(
map_chunks
)
reduce_axes = tuple(
ax for j, ax in enumerate(inp_axes) if reduce_sizes[j] > 1
)
reduce_sizes_ = tuple(rs for rs in reduce_sizes if rs > 1)
for c in map_chunks:
shuffle_key = ",".join(str(idx) for idx in c.index)
chunk_op = LearnShuffle(
stage=OperandStage.reduce,
output_types=output_types,
axes=reduce_axes,
seeds=tuple(
reducer_seeds[j][c.index[ax]]
for j, ax in enumerate(inp_axes)
if reduce_sizes[j] > 1
),
reduce_sizes=reduce_sizes_,
shuffle_key=shuffle_key,
)
params = cls._calc_chunk_params(
c, inp_axes, oup, output_type, chunk_op, False
)
reduce_chunk = chunk_op.new_chunk([proxy_chunk], kws=[params])
reduce_chunks.append(reduce_chunk)
new_op = op.copy()
params = [out.params for out in op.outputs]
if len(out_chunks) < len(op.outputs):
# axes are all higher than its ndim
for i, inp in enumerate(op.inputs):
if all(ax >= inp.ndim for ax in op.axes):
out_chunks.insert(i, inp.chunks)
out_nsplits.insert(i, inp.nsplits)
assert len(out_chunks) == len(op.outputs)
for i, param, chunks, ns in zip(itertools.count(), params, out_chunks, out_nsplits):
param["chunks"] = chunks
param["nsplits"] = ns
param["_position_"] = i
return new_op.new_tileables(op.inputs, kws=params)
|
def tile(cls, op):
inputs = op.inputs
check_chunks_unknown_shape(inputs, TilesError)
axis_to_nsplits = defaultdict(list)
has_dataframe = any(
output_type == OutputType.dataframe for output_type in op.output_types
)
for ax in op.axes:
if has_dataframe and ax == 1:
# if DataFrame exists, for the columns axis,
# we only allow 1 chunk to ensure the columns consistent
axis_to_nsplits[ax].append((inputs[0].shape[ax],))
continue
for inp in inputs:
if ax < inp.ndim:
axis_to_nsplits[ax].append(inp.nsplits[ax])
ax_nsplit = {ax: decide_unify_split(*ns) for ax, ns in axis_to_nsplits.items()}
inputs = [cls._safe_rechunk(inp, ax_nsplit) for inp in inputs]
mapper_seeds = [None] * len(op.axes)
reducer_seeds = [None] * len(op.axes)
for i, ax in enumerate(op.axes):
rs = np.random.RandomState(op.seeds[i])
size = len(ax_nsplit[ax])
if size > 1:
mapper_seeds[i] = gen_random_seeds(size, rs)
reducer_seeds[i] = gen_random_seeds(size, rs)
else:
mapper_seeds[i] = reducer_seeds[i] = [op.seeds[i]] * size
out_chunks = []
out_nsplits = []
for output_type, inp, oup in zip(op.output_types, inputs, op.outputs):
inp_axes = tuple(ax for ax in op.axes if ax < inp.ndim)
reduce_sizes = tuple(inp.chunk_shape[ax] for ax in inp_axes)
output_types = [output_type]
if len(inp_axes) == 0:
continue
nsplits = list(inp.nsplits)
for ax in inp_axes:
cs = len(nsplits[ax])
if cs > 1:
nsplits[ax] = (np.nan,) * cs
out_nsplits.append(tuple(nsplits))
if all(reduce_size == 1 for reduce_size in reduce_sizes):
# no need to do shuffle
chunks = []
for c in inp.chunks:
chunk_op = LearnShuffle(
axes=inp_axes,
seeds=op.seeds[: len(inp_axes)],
output_types=output_types,
)
params = cls._calc_chunk_params(c, inp_axes, oup, output_type, chunk_op)
out_chunk = chunk_op.new_chunk([c], kws=[params])
chunks.append(out_chunk)
out_chunks.append(chunks)
continue
if inp.ndim > 1:
left_chunk_shape = [
s for ax, s in enumerate(inp.chunk_shape) if ax not in inp_axes
]
idx_iter = itertools.product(*[range(s) for s in left_chunk_shape])
else:
idx_iter = [()]
reduce_chunks = []
out_chunks.append(reduce_chunks)
for idx in idx_iter:
map_chunks = []
for reducer_inds in itertools.product(*[range(s) for s in reduce_sizes]):
inp_index = list(idx)
for ax, reducer_ind in zip(inp_axes, reducer_inds):
inp_index.insert(ax, reducer_ind)
inp_index = tuple(inp_index)
in_chunk = inp.cix[inp_index]
params = in_chunk.params
map_chunk_op = LearnShuffle(
stage=OperandStage.map,
output_types=output_types,
axes=inp_axes,
seeds=[
mapper_seeds[j][in_chunk.index[ax]]
for j, ax in enumerate(inp_axes)
],
reduce_sizes=reduce_sizes,
)
map_chunk = map_chunk_op.new_chunk([in_chunk], **params)
map_chunks.append(map_chunk)
proxy_chunk = LearnShuffleProxy(_tensor_keys=[inp.key]).new_chunk(
map_chunks
)
reduce_axes = tuple(
ax for j, ax in enumerate(inp_axes) if reduce_sizes[j] > 1
)
reduce_sizes_ = tuple(rs for rs in reduce_sizes if rs > 1)
for c in map_chunks:
shuffle_key = ",".join(str(idx) for idx in c.index)
chunk_op = LearnShuffle(
stage=OperandStage.reduce,
output_types=output_types,
axes=reduce_axes,
seeds=[
reducer_seeds[j][c.index[ax]]
for j, ax in enumerate(inp_axes)
if reduce_sizes[j] > 1
],
reduce_sizes=reduce_sizes_,
shuffle_key=shuffle_key,
)
params = cls._calc_chunk_params(c, inp_axes, oup, output_type, chunk_op)
reduce_chunk = chunk_op.new_chunk([proxy_chunk], kws=[params])
reduce_chunks.append(reduce_chunk)
new_op = op.copy()
params = [out.params for out in op.outputs]
if len(out_chunks) < len(op.outputs):
# axes are all higher than its ndim
for i, inp in enumerate(op.inputs):
if all(ax >= inp.ndim for ax in op.axes):
out_chunks.insert(i, inp.chunks)
out_nsplits.insert(i, inp.nsplits)
assert len(out_chunks) == len(op.outputs)
for param, chunks, ns in zip(params, out_chunks, out_nsplits):
param["chunks"] = chunks
param["nsplits"] = ns
return new_op.new_tileables(op.inputs, kws=params)
|
https://github.com/mars-project/mars/issues/1184
|
In [14]: from mars.learn.utils import shuffle
In [15]: X, y = shuffle(X, y)
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
~/Workspace/mars/mars/serialize/jsonserializer.pyx in mars.serialize.jsonserializer.JsonSerializeProvider.serialize_field()
~/Workspace/mars/mars/serialize/jsonserializer.pyx in mars.serialize.jsonserializer.JsonSerializeProvider._serialize_value()
~/Workspace/mars/mars/serialize/jsonserializer.pyx in mars.serialize.jsonserializer.JsonSerializeProvider._serialize_typed_value()
TypeError: Expected tuple, got numpy.ndarray
The above exception was the direct cause of the following exception:
TypeError Traceback (most recent call last)
<ipython-input-15-0c15ee335b21> in <module>
----> 1 X, y = shuffle(X, y)
~/Workspace/mars/mars/learn/utils/shuffle.py in shuffle(*arrays, **options)
413 op = LearnShuffle(axes=axes, seeds=seeds,
414 output_types=get_output_types(*arrays))
--> 415 shuffled_arrays = op(arrays)
416 if len(arrays) == 1:
417 return shuffled_arrays[0]
~/Workspace/mars/mars/learn/utils/shuffle.py in __call__(self, arrays)
92 def __call__(self, arrays):
93 params = self._calc_params([ar.params for ar in arrays])
---> 94 return self.new_tileables(arrays, kws=params)
95
96 def _shuffle_index_value(self, index_value):
~/Workspace/mars/mars/operands.py in new_tileables(self, inputs, kws, **kw)
351 tileables = self._new_tileables(inputs, kws=kws, **kw)
352 if is_eager_mode():
--> 353 ExecutableTuple(tileables).execute(fetch=False)
354 return tileables
355
~/Workspace/mars/mars/core.py in execute(self, session, **kw)
626 if session is None:
627 session = Session.default_or_local()
--> 628 return session.run(self, **kw)
629
630 def fetch(self, session=None, **kw):
~/Workspace/mars/mars/session.py in run(self, *tileables, **kw)
184 tileables = tuple(mt.tensor(t) if not isinstance(t, (Entity, Base)) else t
185 for t in tileables)
--> 186 result = self._sess.run(*tileables, **kw)
187
188 for t in tileables:
~/Workspace/mars/mars/deploy/local/session.py in run(self, *tileables, **kw)
124
125 # submit graph to local cluster
--> 126 self._api.submit_graph(self._session_id, json.dumps(graph.to_json(), separators=(',', ':')),
127 graph_key, targets, compose=compose)
128
~/Workspace/mars/mars/graph.pyx in mars.graph.DirectedGraph.to_json()
~/Workspace/mars/mars/serialize/core.pyx in mars.serialize.core.Serializable.to_json()
~/Workspace/mars/mars/serialize/core.pyx in mars.serialize.core.Serializable.serialize()
~/Workspace/mars/mars/serialize/core.pyx in mars.serialize.core.Provider.serialize_model()
~/Workspace/mars/mars/serialize/core.pyx in mars.serialize.core.Field.serialize()
~/Workspace/mars/mars/serialize/core.pyx in mars.serialize.core.Field.serialize()
~/Workspace/mars/mars/serialize/jsonserializer.pyx in mars.serialize.jsonserializer.JsonSerializeProvider.serialize_field()
~/Workspace/mars/mars/serialize/jsonserializer.pyx in mars.serialize.jsonserializer.JsonSerializeProvider._serialize_reference()
~/Workspace/mars/mars/serialize/core.pyx in mars.serialize.core.AttributeAsDict.serialize()
~/Workspace/mars/mars/serialize/core.pyx in mars.serialize.core.Provider.serialize_attribute_as_dict()
~/Workspace/mars/mars/serialize/core.pyx in mars.serialize.core.Provider.serialize_model()
~/Workspace/mars/mars/serialize/core.pyx in mars.serialize.core.Field.serialize()
~/Workspace/mars/mars/serialize/core.pyx in mars.serialize.core.Field.serialize()
~/Workspace/mars/mars/serialize/jsonserializer.pyx in mars.serialize.jsonserializer.JsonSerializeProvider.serialize_field()
~/Workspace/mars/mars/serialize/jsonserializer.pyx in mars.serialize.jsonserializer.JsonSerializeProvider.serialize_field()
~/Workspace/mars/mars/serialize/jsonserializer.pyx in mars.serialize.jsonserializer.JsonSerializeProvider._serialize_value()
~/Workspace/mars/mars/serialize/jsonserializer.pyx in mars.serialize.jsonserializer.JsonSerializeProvider._serialize_typed_value()
TypeError: Fail to serialize field `seeds` for LearnShuffle <key=4901c18065398f9e19eec455538bc65a>, reason: Expected tuple, got numpy.ndarray
|
TypeError
|
def shuffle(*arrays, **options):
arrays = [convert_to_tensor_or_dataframe(ar) for ar in arrays]
axes = options.pop("axes", (0,))
if not isinstance(axes, Iterable):
axes = (axes,)
elif not isinstance(axes, tuple):
axes = tuple(axes)
random_state = check_random_state(options.pop("random_state", None)).to_numpy()
if options:
raise TypeError(
"shuffle() got an unexpected keyword argument {0}".format(
next(iter(options))
)
)
max_ndim = max(ar.ndim for ar in arrays)
axes = tuple(np.unique([validate_axis(max_ndim, ax) for ax in axes]))
seeds = gen_random_seeds(len(axes), random_state)
# verify shape
for ax in axes:
shapes = {ar.shape[ax] for ar in arrays if ax < ar.ndim}
if len(shapes) > 1:
raise ValueError("arrays do not have same shape on axis {0}".format(ax))
op = LearnShuffle(
axes=axes, seeds=tuple(seeds), output_types=get_output_types(*arrays)
)
shuffled_arrays = op(arrays)
if len(arrays) == 1:
return shuffled_arrays[0]
else:
return ExecutableTuple(shuffled_arrays)
|
def shuffle(*arrays, **options):
arrays = [convert_to_tensor_or_dataframe(ar) for ar in arrays]
axes = options.pop("axes", (0,))
if not isinstance(axes, Iterable):
axes = (axes,)
elif not isinstance(axes, tuple):
axes = tuple(axes)
random_state = check_random_state(options.pop("random_state", None)).to_numpy()
if options:
raise TypeError(
"shuffle() got an unexpected keyword argument {0}".format(
next(iter(options))
)
)
max_ndim = max(ar.ndim for ar in arrays)
axes = tuple(np.unique([validate_axis(max_ndim, ax) for ax in axes]))
seeds = gen_random_seeds(len(axes), random_state)
# verify shape
for ax in axes:
shapes = {ar.shape[ax] for ar in arrays if ax < ar.ndim}
if len(shapes) > 1:
raise ValueError("arrays do not have same shape on axis {0}".format(ax))
op = LearnShuffle(axes=axes, seeds=seeds, output_types=get_output_types(*arrays))
shuffled_arrays = op(arrays)
if len(arrays) == 1:
return shuffled_arrays[0]
else:
return ExecutableTuple(shuffled_arrays)
|
https://github.com/mars-project/mars/issues/1184
|
In [14]: from mars.learn.utils import shuffle
In [15]: X, y = shuffle(X, y)
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
~/Workspace/mars/mars/serialize/jsonserializer.pyx in mars.serialize.jsonserializer.JsonSerializeProvider.serialize_field()
~/Workspace/mars/mars/serialize/jsonserializer.pyx in mars.serialize.jsonserializer.JsonSerializeProvider._serialize_value()
~/Workspace/mars/mars/serialize/jsonserializer.pyx in mars.serialize.jsonserializer.JsonSerializeProvider._serialize_typed_value()
TypeError: Expected tuple, got numpy.ndarray
The above exception was the direct cause of the following exception:
TypeError Traceback (most recent call last)
<ipython-input-15-0c15ee335b21> in <module>
----> 1 X, y = shuffle(X, y)
~/Workspace/mars/mars/learn/utils/shuffle.py in shuffle(*arrays, **options)
413 op = LearnShuffle(axes=axes, seeds=seeds,
414 output_types=get_output_types(*arrays))
--> 415 shuffled_arrays = op(arrays)
416 if len(arrays) == 1:
417 return shuffled_arrays[0]
~/Workspace/mars/mars/learn/utils/shuffle.py in __call__(self, arrays)
92 def __call__(self, arrays):
93 params = self._calc_params([ar.params for ar in arrays])
---> 94 return self.new_tileables(arrays, kws=params)
95
96 def _shuffle_index_value(self, index_value):
~/Workspace/mars/mars/operands.py in new_tileables(self, inputs, kws, **kw)
351 tileables = self._new_tileables(inputs, kws=kws, **kw)
352 if is_eager_mode():
--> 353 ExecutableTuple(tileables).execute(fetch=False)
354 return tileables
355
~/Workspace/mars/mars/core.py in execute(self, session, **kw)
626 if session is None:
627 session = Session.default_or_local()
--> 628 return session.run(self, **kw)
629
630 def fetch(self, session=None, **kw):
~/Workspace/mars/mars/session.py in run(self, *tileables, **kw)
184 tileables = tuple(mt.tensor(t) if not isinstance(t, (Entity, Base)) else t
185 for t in tileables)
--> 186 result = self._sess.run(*tileables, **kw)
187
188 for t in tileables:
~/Workspace/mars/mars/deploy/local/session.py in run(self, *tileables, **kw)
124
125 # submit graph to local cluster
--> 126 self._api.submit_graph(self._session_id, json.dumps(graph.to_json(), separators=(',', ':')),
127 graph_key, targets, compose=compose)
128
~/Workspace/mars/mars/graph.pyx in mars.graph.DirectedGraph.to_json()
~/Workspace/mars/mars/serialize/core.pyx in mars.serialize.core.Serializable.to_json()
~/Workspace/mars/mars/serialize/core.pyx in mars.serialize.core.Serializable.serialize()
~/Workspace/mars/mars/serialize/core.pyx in mars.serialize.core.Provider.serialize_model()
~/Workspace/mars/mars/serialize/core.pyx in mars.serialize.core.Field.serialize()
~/Workspace/mars/mars/serialize/core.pyx in mars.serialize.core.Field.serialize()
~/Workspace/mars/mars/serialize/jsonserializer.pyx in mars.serialize.jsonserializer.JsonSerializeProvider.serialize_field()
~/Workspace/mars/mars/serialize/jsonserializer.pyx in mars.serialize.jsonserializer.JsonSerializeProvider._serialize_reference()
~/Workspace/mars/mars/serialize/core.pyx in mars.serialize.core.AttributeAsDict.serialize()
~/Workspace/mars/mars/serialize/core.pyx in mars.serialize.core.Provider.serialize_attribute_as_dict()
~/Workspace/mars/mars/serialize/core.pyx in mars.serialize.core.Provider.serialize_model()
~/Workspace/mars/mars/serialize/core.pyx in mars.serialize.core.Field.serialize()
~/Workspace/mars/mars/serialize/core.pyx in mars.serialize.core.Field.serialize()
~/Workspace/mars/mars/serialize/jsonserializer.pyx in mars.serialize.jsonserializer.JsonSerializeProvider.serialize_field()
~/Workspace/mars/mars/serialize/jsonserializer.pyx in mars.serialize.jsonserializer.JsonSerializeProvider.serialize_field()
~/Workspace/mars/mars/serialize/jsonserializer.pyx in mars.serialize.jsonserializer.JsonSerializeProvider._serialize_value()
~/Workspace/mars/mars/serialize/jsonserializer.pyx in mars.serialize.jsonserializer.JsonSerializeProvider._serialize_typed_value()
TypeError: Fail to serialize field `seeds` for LearnShuffle <key=4901c18065398f9e19eec455538bc65a>, reason: Expected tuple, got numpy.ndarray
|
TypeError
|
def __init__(
self,
obj,
groupby_obj=None,
keys=None,
axis=0,
level=None,
grouper=None,
exclusions=None,
selection=None,
as_index=True,
sort=True,
group_keys=True,
squeeze=False,
observed=False,
mutated=False,
grouper_cache=None,
):
def fill_value(v, key):
return v if v is not None or groupby_obj is None else getattr(groupby_obj, key)
self.obj = obj
self.keys = fill_value(keys, "keys")
self.axis = fill_value(axis, "axis")
self.level = fill_value(level, "level")
self.exclusions = fill_value(exclusions, "exclusions")
self.selection = selection
self.as_index = fill_value(as_index, "as_index")
self.sort = fill_value(sort, "sort")
self.group_keys = fill_value(group_keys, "group_keys")
self.squeeze = fill_value(squeeze, "squeeze")
self.observed = fill_value(observed, "observed")
self.mutated = fill_value(mutated, "mutated")
if groupby_obj is None:
if obj.ndim == 2:
self.groupby_obj = DataFrameGroupBy(
obj,
keys=keys,
axis=axis,
level=level,
grouper=grouper,
exclusions=exclusions,
as_index=as_index,
group_keys=group_keys,
squeeze=squeeze,
observed=observed,
mutated=mutated,
)
else:
self.groupby_obj = SeriesGroupBy(
obj,
keys=keys,
axis=axis,
level=level,
grouper=grouper,
exclusions=exclusions,
as_index=as_index,
group_keys=group_keys,
squeeze=squeeze,
observed=observed,
mutated=mutated,
)
else:
self.groupby_obj = groupby_obj
if grouper_cache:
self.groupby_obj.grouper._cache = grouper_cache
if selection:
self.groupby_obj = self.groupby_obj[selection]
self.is_frame = isinstance(self.groupby_obj, DataFrameGroupBy)
|
def __init__(
self,
obj,
groupby_obj=None,
keys=None,
axis=0,
level=None,
grouper=None,
exclusions=None,
selection=None,
as_index=True,
sort=True,
group_keys=True,
squeeze=False,
observed=False,
mutated=False,
grouper_cache=None,
):
def fill_value(v, key):
return v if v is not None or groupby_obj is None else getattr(groupby_obj, key)
self.obj = obj
self.keys = fill_value(keys, "keys")
self.axis = fill_value(axis, "axis")
self.level = fill_value(level, "level")
self.exclusions = fill_value(exclusions, "exclusions")
self.selection = selection
self.as_index = fill_value(as_index, "as_index")
self.sort = fill_value(sort, "sort")
self.group_keys = fill_value(group_keys, "group_keys")
self.squeeze = fill_value(squeeze, "squeeze")
self.observed = fill_value(observed, "observed")
self.mutated = fill_value(mutated, "mutated")
if groupby_obj is None:
if obj.ndim == 2:
self.groupby_obj = DataFrameGroupBy(
obj,
keys=keys,
axis=axis,
level=level,
grouper=grouper,
exclusions=exclusions,
as_index=as_index,
group_keys=group_keys,
squeeze=squeeze,
observed=observed,
mutated=mutated,
)
else:
self.groupby_obj = SeriesGroupBy(
obj,
keys=keys,
axis=axis,
level=level,
grouper=grouper,
exclusions=exclusions,
as_index=as_index,
group_keys=group_keys,
squeeze=squeeze,
observed=observed,
mutated=mutated,
)
else:
self.groupby_obj = groupby_obj
self.is_frame = isinstance(self.groupby_obj, DataFrameGroupBy)
if grouper_cache:
self.groupby_obj.grouper._cache = grouper_cache
if selection:
self.groupby_obj = self.groupby_obj[selection]
|
https://github.com/mars-project/mars/issues/1154
|
In [1]: import pandas as pd; import numpy as np
In [2]: df = pd.DataFrame(np.random.rand(4, 3), index=np.arange(5, 1, -1))
In [4]: import mars.dataframe as md
In [5]: mdf = md.DataFrame(df)
In [6]: mdf.groupby(0).execute()
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-6-491b51043e08> in <module>
----> 1 mdf.groupby(0).execute()
~/Workspace/mars/mars/core.py in execute(self, session, **kw)
426 if session is None:
427 session = Session.default_or_local()
--> 428 return session.run(self, **kw)
429
430 def fetch(self, session=None, **kw):
~/Workspace/mars/mars/session.py in run(self, *tileables, **kw)
181 tileables = tuple(mt.tensor(t) if not isinstance(t, (Entity, Base)) else t
182 for t in tileables)
--> 183 result = self._sess.run(*tileables, **kw)
184
185 for t in tileables:
~/Workspace/mars/mars/session.py in run(self, *tileables, **kw)
88 # set number of running cores
89 self.context.set_ncores(kw['n_parallel'])
---> 90 res = self._executor.execute_tileables(tileables, **kw)
91 return res
92
~/Workspace/mars/mars/utils.py in _wrapped(*args, **kwargs)
380 _kernel_mode.eager = False
381 _kernel_mode.eager_count = enter_eager_count + 1
--> 382 return func(*args, **kwargs)
383 finally:
384 _kernel_mode.eager_count -= 1
~/Workspace/mars/mars/utils.py in inner(*args, **kwargs)
468 def inner(*args, **kwargs):
469 with build_mode():
--> 470 return func(*args, **kwargs)
471 return inner
472
~/Workspace/mars/mars/executor.py in execute_tileables(self, tileables, fetch, n_parallel, n_thread, print_progress, mock, compose)
828 # update shape of tileable and its chunks whatever it's successful or not
829 self._update_tileable_and_chunk_shape(
--> 830 tileable_graph, chunk_result, chunk_graph_builder.interrupted_ops)
831 if chunk_graph_builder.done:
832 if len(intermediate_result_keys) > 0:
~/Workspace/mars/mars/executor.py in _update_tileable_and_chunk_shape(self, tileable_graph, chunk_result, failed_ops)
726 continue
727 for c in tiled_n.chunks:
--> 728 c.data._shape = chunk_result[c.key].shape
729 new_nsplits = self.get_tileable_nsplits(n, chunk_result=chunk_result)
730 for node in (n, tiled_n):
~/Workspace/mars/mars/lib/groupby_wrapper.py in __getattr__(self, item)
74 if item in getattr(self.obj, 'columns', ()):
75 return self.__getitem__(item)
---> 76 return getattr(self.groupby_obj, item)
77
78 def __iter__(self):
~/miniconda3/lib/python3.7/site-packages/pandas/core/groupby/groupby.py in __getattr__(self, attr)
578
579 raise AttributeError(
--> 580 f"'{type(self).__name__}' object has no attribute '{attr}'"
581 )
582
AttributeError: 'DataFrameGroupBy' object has no attribute 'shape'
|
AttributeError
|
def execute_map(cls, ctx, op):
chunk = op.outputs[0]
df = ctx[op.inputs[0].key]
shuffle_on = op.shuffle_on
if shuffle_on is not None:
# shuffle on field may be resident in index
to_reset_index_names = []
if not isinstance(shuffle_on, (list, tuple)):
if shuffle_on not in df.dtypes:
to_reset_index_names.append(shuffle_on)
else:
for son in shuffle_on:
if son not in df.dtypes:
to_reset_index_names.append(shuffle_on)
if len(to_reset_index_names) > 0:
df = df.reset_index(to_reset_index_names)
filters = hash_dataframe_on(df, shuffle_on, op.index_shuffle_size)
# shuffle on index
for index_idx, index_filter in enumerate(filters):
group_key = ",".join([str(index_idx), str(chunk.index[1])])
if index_filter is not None and index_filter is not list():
ctx[(chunk.key, group_key)] = df.loc[index_filter]
else:
ctx[(chunk.key, group_key)] = None
|
def execute_map(cls, ctx, op):
chunk = op.outputs[0]
df = ctx[op.inputs[0].key]
filters = hash_dataframe_on(df, op.shuffle_on, op.index_shuffle_size)
# shuffle on index
for index_idx, index_filter in enumerate(filters):
group_key = ",".join([str(index_idx), str(chunk.index[1])])
if index_filter is not None and index_filter is not list():
ctx[(chunk.key, group_key)] = df.loc[index_filter]
else:
ctx[(chunk.key, group_key)] = None
|
https://github.com/mars-project/mars/issues/1110
|
In [4]: df = pd.DataFrame({'a': np.arange(10), 'b': np.random.rand(10)})
In [5]: df2 = df.copy()
In [6]: df2.set_index('a', inplace=True)
In [7]: df2
Out[7]:
b
a
0 0.984265
1 0.544014
2 0.592392
3 0.269762
4 0.236130
5 0.846061
6 0.308780
7 0.604834
8 0.973824
9 0.867099
In [8]: df.merge(df2, on='a') # can work for pandas
Out[8]:
a b_x b_y
0 0 0.984265 0.984265
1 1 0.544014 0.544014
2 2 0.592392 0.592392
3 3 0.269762 0.269762
4 4 0.236130 0.236130
5 5 0.846061 0.846061
6 6 0.308780 0.308780
7 7 0.604834 0.604834
8 8 0.973824 0.973824
9 9 0.867099 0.867099
In [9]: import mars.dataframe as md
In [10]: mdf = md.DataFrame(df)
In [11]: mdf2 = md.DataFrame(df2)
In [12]: mdf.merge(mdf2, on='a') # cannot work for mars dataframe
---------------------------------------------------------------------------
KeyError Traceback (most recent call last)
<ipython-input-12-bd6a81883d3a> in <module>
----> 1 mdf.merge(mdf2, on='a')
~/Workspace/mars/mars/dataframe/merge/merge.py in merge(df, right, how, on, left_on, right_on, left_index, right_index, sort, suffixes, copy, indicator, strategy, validate)
350 left_index=left_index, right_index=right_index, sort=sort, suffixes=suffixes,
351 copy=copy, indicator=indicator, validate=validate, object_type=ObjectType.dataframe)
--> 352 return op(df, right)
353
354
~/Workspace/mars/mars/dataframe/merge/merge.py in __call__(self, left, right)
174 left_index=self.left_index, right_index=self.right_index,
175 sort=self.sort, suffixes=self.suffixes,
--> 176 copy=self.copy_, indicator=self.indicator, validate=self.validate)
177
178 # the `index_value` doesn't matter.
~/miniconda3/lib/python3.7/site-packages/pandas/core/frame.py in merge(self, right, how, on, left_on, right_on, left_index, right_index, sort, suffixes, copy, indicator, validate)
7292 copy=copy,
7293 indicator=indicator,
-> 7294 validate=validate,
7295 )
7296
~/miniconda3/lib/python3.7/site-packages/pandas/core/reshape/merge.py in merge(left, right, how, on, left_on, right_on, left_index, right_index, sort, suffixes, copy, indicator, validate)
84 copy=copy,
85 indicator=indicator,
---> 86 validate=validate,
87 )
88 return op.get_result()
~/miniconda3/lib/python3.7/site-packages/pandas/core/reshape/merge.py in __init__(self, left, right, how, on, left_on, right_on, axis, left_index, right_index, sort, suffixes, copy, indicator, validate)
625 self.right_join_keys,
626 self.join_names,
--> 627 ) = self._get_merge_keys()
628
629 # validate the merge keys dtypes. We may need to coerce
~/miniconda3/lib/python3.7/site-packages/pandas/core/reshape/merge.py in _get_merge_keys(self)
981 if not is_rkey(rk):
982 if rk is not None:
--> 983 right_keys.append(right._get_label_or_level_values(rk))
984 else:
985 # work-around for merge_asof(right_index=True)
~/miniconda3/lib/python3.7/site-packages/pandas/core/generic.py in _get_label_or_level_values(self, key, axis)
1689 values = self.axes[axis].get_level_values(key)._values
1690 else:
-> 1691 raise KeyError(key)
1692
1693 # Check for duplicates
KeyError: 'a'
|
KeyError
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.