Dataset Viewer
Auto-converted to Parquet Duplicate
after_merge
stringlengths
64
17k
before_merge
stringlengths
60
17k
source code and errors
stringlengths
236
32.3k
full_traceback
stringlengths
170
17.7k
traceback_type
stringclasses
60 values
def cmdb_get_mainline_object_topo(request, bk_biz_id, bk_supplier_account=''): """ @summary: 获取配置平台业务拓扑模型 @param request: @param bk_biz_id: @param bk_supplier_account: @return: """ kwargs = { 'bk_biz_id': bk_biz_id, 'bk_supplier_account': bk_supplier_account, } cl...
def cmdb_get_mainline_object_topo(request, bk_biz_id, bk_supplier_account=''): """ @summary: 获取配置平台业务拓扑模型 @param request: @param bk_biz_id: @param bk_supplier_account: @return: """ kwargs = { 'bk_biz_id': bk_biz_id, 'bk_supplier_account': bk_supplier_account, } cl...
[{'piece_type': 'error message', 'piece_content': '------STARTING: Migrate Database------\\nTraceback (most recent call last):\\nFile "manage.py", line 27, in <module>\\nexecute_from_command_line(sys.argv)\\nFile "/cache/.bk/env/lib/python2.7/site-packages/django/core/management/__init__.py", line 354, in execute_from_...
------STARTING: Migrate Database------ Traceback (most recent call last): File "manage.py", line 27, in <module> execute_from_command_line(sys.argv) File "/cache/.bk/env/lib/python2.7/site-packages/django/core/management/__init__.py", line 354, in execute_from_command_line utility.execute() File "/cache/.bk/env/lib/pyt...
ImportError
def cc_search_object_attribute(request, obj_id, biz_cc_id, supplier_account): """ @summary: 获取对象自定义属性 @param request: @param biz_cc_id: @return: """ client = get_client_by_user(request.user.username) kwargs = { 'bk_obj_id': obj_id, 'bk_supplier_account': supplier_account ...
def cc_search_object_attribute(request, obj_id, biz_cc_id, supplier_account): """ @summary: 获取对象自定义属性 @param request: @param biz_cc_id: @return: """ client = get_client_by_request(request) kwargs = { 'bk_obj_id': obj_id, 'bk_supplier_account': supplier_account } c...
[{'piece_type': 'error message', 'piece_content': '------STARTING: Migrate Database------\\nTraceback (most recent call last):\\nFile "manage.py", line 27, in <module>\\nexecute_from_command_line(sys.argv)\\nFile "/cache/.bk/env/lib/python2.7/site-packages/django/core/management/__init__.py", line 354, in execute_from_...
------STARTING: Migrate Database------ Traceback (most recent call last): File "manage.py", line 27, in <module> execute_from_command_line(sys.argv) File "/cache/.bk/env/lib/python2.7/site-packages/django/core/management/__init__.py", line 354, in execute_from_command_line utility.execute() File "/cache/.bk/env/lib/pyt...
ImportError
def cc_search_create_object_attribute(request, obj_id, biz_cc_id, supplier_account): client = get_client_by_user(request.user.username) kwargs = { 'bk_obj_id': obj_id, 'bk_supplier_account': supplier_account } cc_result = client.cc.search_object_attribute(kwargs) if not cc_result['re...
def cc_search_create_object_attribute(request, obj_id, biz_cc_id, supplier_account): client = get_client_by_request(request) kwargs = { 'bk_obj_id': obj_id, 'bk_supplier_account': supplier_account } cc_result = client.cc.search_object_attribute(kwargs) if not cc_result['result']: ...
[{'piece_type': 'error message', 'piece_content': '------STARTING: Migrate Database------\\nTraceback (most recent call last):\\nFile "manage.py", line 27, in <module>\\nexecute_from_command_line(sys.argv)\\nFile "/cache/.bk/env/lib/python2.7/site-packages/django/core/management/__init__.py", line 354, in execute_from_...
------STARTING: Migrate Database------ Traceback (most recent call last): File "manage.py", line 27, in <module> execute_from_command_line(sys.argv) File "/cache/.bk/env/lib/python2.7/site-packages/django/core/management/__init__.py", line 354, in execute_from_command_line utility.execute() File "/cache/.bk/env/lib/pyt...
ImportError
def cc_search_topo(request, obj_id, category, biz_cc_id, supplier_account): """ @summary: 查询对象拓扑 @param request: @param biz_cc_id: @return: """ client = get_client_by_user(request.user.username) kwargs = { 'bk_biz_id': biz_cc_id, 'bk_supplier_account': supplier_account ...
def cc_search_topo(request, obj_id, category, biz_cc_id, supplier_account): """ @summary: 查询对象拓扑 @param request: @param biz_cc_id: @return: """ client = get_client_by_request(request) kwargs = { 'bk_biz_id': biz_cc_id, 'bk_supplier_account': supplier_account } cc_...
[{'piece_type': 'error message', 'piece_content': '------STARTING: Migrate Database------\\nTraceback (most recent call last):\\nFile "manage.py", line 27, in <module>\\nexecute_from_command_line(sys.argv)\\nFile "/cache/.bk/env/lib/python2.7/site-packages/django/core/management/__init__.py", line 354, in execute_from_...
------STARTING: Migrate Database------ Traceback (most recent call last): File "manage.py", line 27, in <module> execute_from_command_line(sys.argv) File "/cache/.bk/env/lib/python2.7/site-packages/django/core/management/__init__.py", line 354, in execute_from_command_line utility.execute() File "/cache/.bk/env/lib/pyt...
ImportError
def job_get_script_list(request, biz_cc_id): """ 查询业务脚本列表 :param request: :param biz_cc_id: :return: """ # 查询脚本列表 client = get_client_by_user(request.user.username) script_type = request.GET.get('type') kwargs = { 'bk_biz_id': biz_cc_id, 'is_public': True if scrip...
def job_get_script_list(request, biz_cc_id): """ 查询业务脚本列表 :param request: :param biz_cc_id: :return: """ # 查询脚本列表 client = get_client_by_request(request) script_type = request.GET.get('type') kwargs = { 'bk_biz_id': biz_cc_id, 'is_public': True if script_type == '...
[{'piece_type': 'error message', 'piece_content': '------STARTING: Migrate Database------\\nTraceback (most recent call last):\\nFile "manage.py", line 27, in <module>\\nexecute_from_command_line(sys.argv)\\nFile "/cache/.bk/env/lib/python2.7/site-packages/django/core/management/__init__.py", line 354, in execute_from_...
------STARTING: Migrate Database------ Traceback (most recent call last): File "manage.py", line 27, in <module> execute_from_command_line(sys.argv) File "/cache/.bk/env/lib/python2.7/site-packages/django/core/management/__init__.py", line 354, in execute_from_command_line utility.execute() File "/cache/.bk/env/lib/pyt...
ImportError
def job_get_job_tasks_by_biz(request, biz_cc_id): client = get_client_by_user(request.user.username) job_result = client.job.get_job_list({'bk_biz_id': biz_cc_id}) if not job_result['result']: message = _(u"查询作业平台(JOB)的作业模板[app_id=%s]接口job.get_task返回失败: %s") % ( biz_cc_id, job_result['me...
def job_get_job_tasks_by_biz(request, biz_cc_id): client = get_client_by_request(request) job_result = client.job.get_job_list({'bk_biz_id': biz_cc_id}) if not job_result['result']: message = _(u"查询作业平台(JOB)的作业模板[app_id=%s]接口job.get_task返回失败: %s") % ( biz_cc_id, job_result['message']) ...
[{'piece_type': 'error message', 'piece_content': '------STARTING: Migrate Database------\\nTraceback (most recent call last):\\nFile "manage.py", line 27, in <module>\\nexecute_from_command_line(sys.argv)\\nFile "/cache/.bk/env/lib/python2.7/site-packages/django/core/management/__init__.py", line 354, in execute_from_...
------STARTING: Migrate Database------ Traceback (most recent call last): File "manage.py", line 27, in <module> execute_from_command_line(sys.argv) File "/cache/.bk/env/lib/python2.7/site-packages/django/core/management/__init__.py", line 354, in execute_from_command_line utility.execute() File "/cache/.bk/env/lib/pyt...
ImportError
def job_get_job_task_detail(request, biz_cc_id, task_id): client = get_client_by_user(request.user.username) job_result = client.job.get_job_detail({'bk_biz_id': biz_cc_id, 'bk_job_id': task_id}) if not job_result['result']: message = _(u"查询作业平台(JOB)的作业模板详...
def job_get_job_task_detail(request, biz_cc_id, task_id): client = get_client_by_request(request) job_result = client.job.get_job_detail({'bk_biz_id': biz_cc_id, 'bk_job_id': task_id}) if not job_result['result']: message = _(u"查询作业平台(JOB)的作业模板详情[app_id=%s...
[{'piece_type': 'error message', 'piece_content': '------STARTING: Migrate Database------\\nTraceback (most recent call last):\\nFile "manage.py", line 27, in <module>\\nexecute_from_command_line(sys.argv)\\nFile "/cache/.bk/env/lib/python2.7/site-packages/django/core/management/__init__.py", line 354, in execute_from_...
------STARTING: Migrate Database------ Traceback (most recent call last): File "manage.py", line 27, in <module> execute_from_command_line(sys.argv) File "/cache/.bk/env/lib/python2.7/site-packages/django/core/management/__init__.py", line 354, in execute_from_command_line utility.execute() File "/cache/.bk/env/lib/pyt...
ImportError
def get_bk_user(request): bkuser = None if request.weixin_user and not isinstance(request.weixin_user, AnonymousUser): user_model = get_user_model() try: user_property = UserProperty.objects.get(key='wx_userid', value=request.weixin_user.userid) except UserProperty.DoesNotExi...
def get_bk_user(request): bkuser = None if request.weixin_user and not isinstance(request.weixin_user, AnonymousUser): try: user_property = UserProperty.objects.get(key='wx_userid', value=request.weixin_user.userid) bkuser = user_property.user except UserProperty.DoesNotE...
[{'piece_type': 'error message', 'piece_content': '------STARTING: Migrate Database------\\nTraceback (most recent call last):\\nFile "manage.py", line 27, in <module>\\nexecute_from_command_line(sys.argv)\\nFile "/cache/.bk/env/lib/python2.7/site-packages/django/core/management/__init__.py", line 354, in execute_from_...
------STARTING: Migrate Database------ Traceback (most recent call last): File "manage.py", line 27, in <module> execute_from_command_line(sys.argv) File "/cache/.bk/env/lib/python2.7/site-packages/django/core/management/__init__.py", line 354, in execute_from_command_line utility.execute() File "/cache/.bk/env/lib/pyt...
ImportError
def fit(self, dataset: Dataset): """Calculates statistics for this workflow on the input dataset Parameters ----------- dataset: Dataset The input dataset to calculate statistics for. If there is a train/test split this data should be the training dataset onl...
def fit(self, dataset: Dataset): """Calculates statistics for this workflow on the input dataset Parameters ----------- dataset: Dataset The input dataset to calculate statistics for. If there is a train/test split this data should be the training dataset onl...
[{'piece_type': 'error message', 'piece_content': 'E0224 15:58:10.330248 178 model_repository_manager.cc:963] failed to load \\'amazonreview_tf\\' version 1: Internal: unable to create stream: the provided PTX was compiled with an unsupported toolchain.\\n/nvtabular/nvtabular/workflow.py:236: UserWarning: Loading workf...
E0224 15:58:10.330248 178 model_repository_manager.cc:963] failed to load 'amazonreview_tf' version 1: Internal: unable to create stream: the provided PTX was compiled with an unsupported toolchain. /nvtabular/nvtabular/workflow.py:236: UserWarning: Loading workflow generated with cudf version 0+untagged.1.gbd321d1 - b...
TypeError
def main(args): """Multi-GPU Criteo/DLRM Preprocessing Benchmark This benchmark is designed to measure the time required to preprocess the Criteo (1TB) dataset for Facebook’s DLRM model. The user must specify the path of the raw dataset (using the `--data-path` flag), as well as the output directo...
def main(args): """Multi-GPU Criteo/DLRM Preprocessing Benchmark This benchmark is designed to measure the time required to preprocess the Criteo (1TB) dataset for Facebook’s DLRM model. The user must specify the path of the raw dataset (using the `--data-path` flag), as well as the output directo...
[{'piece_type': 'error message', 'piece_content': '(rapids) root@dafff4b22f48:/nvtabular# python examples/dask-nvtabular-criteo-benchmark.py -d 0,1,2,3,4,5,6,7 --data-path gs://merlin-datasets/crit_int_pq --out-path gs://merlin-datasets/output --freq-limit 0 --part-mem-frac 0.12 --device-limit-f\\nrac 0.7 --device-poo...
(rapids) root@dafff4b22f48:/nvtabular# python examples/dask-nvtabular-criteo-benchmark.py -d 0,1,2,3,4,5,6,7 --data-path gs://merlin-datasets/crit_int_pq --out-path gs://merlin-datasets/output --freq-limit 0 --part-mem-frac 0.12 --device-limit-f rac 0.7 --device-pool-frac 0.8 distributed.worker - WARNING - Compute Fa...
FileNotFoundError
def __init__(self, out_dir, **kwargs): super().__init__(out_dir, **kwargs) self.data_paths = [] self.data_files = [] self.data_writers = [] self.data_bios = [] self._lock = threading.RLock() self.pwriter = self._pwriter self.pwriter_kwargs = {}
def __init__(self, out_dir, **kwargs): super().__init__(out_dir, **kwargs) self.data_paths = [] self.data_writers = [] self.data_bios = [] self._lock = threading.RLock() self.pwriter = self._pwriter self.pwriter_kwargs = {}
[{'piece_type': 'error message', 'piece_content': '(rapids) root@dafff4b22f48:/nvtabular# python examples/dask-nvtabular-criteo-benchmark.py -d 0,1,2,3,4,5,6,7 --data-path gs://merlin-datasets/crit_int_pq --out-path gs://merlin-datasets/output --freq-limit 0 --part-mem-frac 0.12 --device-limit-f\\nrac 0.7 --device-poo...
(rapids) root@dafff4b22f48:/nvtabular# python examples/dask-nvtabular-criteo-benchmark.py -d 0,1,2,3,4,5,6,7 --data-path gs://merlin-datasets/crit_int_pq --out-path gs://merlin-datasets/output --freq-limit 0 --part-mem-frac 0.12 --device-limit-f rac 0.7 --device-pool-frac 0.8 distributed.worker - WARNING - Compute Fa...
FileNotFoundError
def _append_writer(self, path, schema=None, add_args=None, add_kwargs=None): # Add additional args and kwargs _args = add_args or [] _kwargs = tlz.merge(self.pwriter_kwargs, add_kwargs or {}) if self.bytes_io: bio = BytesIO() self.data_bios.append(bio) ...
def _append_writer(self, path, schema=None, add_args=None, add_kwargs=None): # Add additional args and kwargs _args = add_args or [] _kwargs = tlz.merge(self.pwriter_kwargs, add_kwargs or {}) if self.bytes_io: bio = BytesIO() self.data_bios.append(bio) ...
[{'piece_type': 'error message', 'piece_content': '(rapids) root@dafff4b22f48:/nvtabular# python examples/dask-nvtabular-criteo-benchmark.py -d 0,1,2,3,4,5,6,7 --data-path gs://merlin-datasets/crit_int_pq --out-path gs://merlin-datasets/output --freq-limit 0 --part-mem-frac 0.12 --device-limit-f\\nrac 0.7 --device-poo...
(rapids) root@dafff4b22f48:/nvtabular# python examples/dask-nvtabular-criteo-benchmark.py -d 0,1,2,3,4,5,6,7 --data-path gs://merlin-datasets/crit_int_pq --out-path gs://merlin-datasets/output --freq-limit 0 --part-mem-frac 0.12 --device-limit-f rac 0.7 --device-pool-frac 0.8 distributed.worker - WARNING - Compute Fa...
FileNotFoundError
def _close_writers(self): md_dict = {} for writer, path in zip(self.data_writers, self.data_paths): fn = path.split(self.fs.sep)[-1] md_dict[fn] = writer.close(metadata_file_path=fn) for f in self.data_files: f.close() return md_dict
def _close_writers(self): md_dict = {} for writer, path in zip(self.data_writers, self.data_paths): fn = path.split(self.fs.sep)[-1] md_dict[fn] = writer.close(metadata_file_path=fn) return md_dict
[{'piece_type': 'error message', 'piece_content': '(rapids) root@dafff4b22f48:/nvtabular# python examples/dask-nvtabular-criteo-benchmark.py -d 0,1,2,3,4,5,6,7 --data-path gs://merlin-datasets/crit_int_pq --out-path gs://merlin-datasets/output --freq-limit 0 --part-mem-frac 0.12 --device-limit-f\\nrac 0.7 --device-poo...
(rapids) root@dafff4b22f48:/nvtabular# python examples/dask-nvtabular-criteo-benchmark.py -d 0,1,2,3,4,5,6,7 --data-path gs://merlin-datasets/crit_int_pq --out-path gs://merlin-datasets/output --freq-limit 0 --part-mem-frac 0.12 --device-limit-f rac 0.7 --device-pool-frac 0.8 distributed.worker - WARNING - Compute Fa...
FileNotFoundError
def fetch_table_data( table_cache, path, cache="disk", cats_only=False, reader=None, columns=None, **kwargs ): """Utility to retrieve a cudf DataFrame from a cache (and add the DataFrame to a cache if the element is missing). Note that `cats_only=True` results in optimized logic for the `Categorify` tr...
def fetch_table_data( table_cache, path, cache="disk", cats_only=False, reader=None, columns=None, **kwargs ): """Utility to retrieve a cudf DataFrame from a cache (and add the DataFrame to a cache if the element is missing). Note that `cats_only=True` results in optimized logic for the `Categorify` tr...
[{'piece_type': 'error message', 'piece_content': '(rapids) root@dafff4b22f48:/nvtabular# python examples/dask-nvtabular-criteo-benchmark.py -d 0,1,2,3,4,5,6,7 --data-path gs://merlin-datasets/crit_int_pq --out-path gs://merlin-datasets/output --freq-limit 0 --part-mem-frac 0.12 --device-limit-f\\nrac 0.7 --device-poo...
(rapids) root@dafff4b22f48:/nvtabular# python examples/dask-nvtabular-criteo-benchmark.py -d 0,1,2,3,4,5,6,7 --data-path gs://merlin-datasets/crit_int_pq --out-path gs://merlin-datasets/output --freq-limit 0 --part-mem-frac 0.12 --device-limit-f rac 0.7 --device-pool-frac 0.8 distributed.worker - WARNING - Compute Fa...
FileNotFoundError
def _chunkwise_moments(df): df2 = cudf.DataFrame() for col in df.columns: df2[col] = df[col].astype("float64").pow(2) vals = { "df-count": df.count().to_frame().transpose(), "df-sum": df.sum().astype("float64").to_frame().transpose(), "df2-sum": df2.sum().to_frame().transpose...
def _chunkwise_moments(df): df2 = cudf.DataFrame() for col in df.columns: df2[col] = df[col].astype("float64").pow(2) vals = { "df-count": df.count().to_frame().transpose(), "df-sum": df.sum().to_frame().transpose(), "df2-sum": df2.sum().to_frame().transpose(), } # NO...
[{'piece_type': 'error message', 'piece_content': '/opt/conda/envs/rapids/lib/python3.7/site-packages/pandas/core/series.py:726: RuntimeWarning: invalid value encountered in sqrt\\nresult = getattr(ufunc, method)(*inputs, **kwargs)\\n---------------------------------------------------------------------------\\nValueErr...
/opt/conda/envs/rapids/lib/python3.7/site-packages/pandas/core/series.py:726: RuntimeWarning: invalid value encountered in sqrt result = getattr(ufunc, method)(*inputs, **kwargs) --------------------------------------------------------------------------- ValueError Traceback (most recent ...
ValueError
def to_ddf(self, columns=None): return dask_cudf.read_parquet( self.paths, columns=columns, # can't omit reading the index in if we aren't being passed columns index=None if columns is None else False, gather_statistics=False, split_row...
def to_ddf(self, columns=None): return dask_cudf.read_parquet( self.paths, columns=columns, index=False, gather_statistics=False, split_row_groups=self.row_groups_per_part, storage_options=self.storage_options, )
[{'piece_type': 'error message', 'piece_content': '---------------------------------------------------------------------------\\nValueError Traceback (most recent call last)\\n<ipython-input-13-b133e2b51cbf> in <module>\\n2 valid_dataset = nvt.Dataset(OUTPUT_BUCKET_FOLDER+\\'valid_gdf.par...
--------------------------------------------------------------------------- ValueError Traceback (most recent call last) <ipython-input-13-b133e2b51cbf> in <module> 2 valid_dataset = nvt.Dataset(OUTPUT_BUCKET_FOLDER+'valid_gdf.parquet', part_mem_fraction=0.12) 3 ----> 4 workflow.apply(tra...
ValueError
def get_ddf(self): if self.ddf is None: raise ValueError("No dask_cudf frame available.") elif isinstance(self.ddf, Dataset): # Right now we can't distinguish between input columns and generated columns # in the dataset, we don't limit the columm set right now in ...
def get_ddf(self): if self.ddf is None: raise ValueError("No dask_cudf frame available.") elif isinstance(self.ddf, Dataset): columns = self.columns_ctx["all"]["base"] return self.ddf.to_ddf(columns=columns, shuffle=self._shuffle_parts) return self.ddf
[{'piece_type': 'error message', 'piece_content': '---------------------------------------------------------------------------\\nValueError Traceback (most recent call last)\\n<ipython-input-13-b133e2b51cbf> in <module>\\n2 valid_dataset = nvt.Dataset(OUTPUT_BUCKET_FOLDER+\\'valid_gdf.par...
--------------------------------------------------------------------------- ValueError Traceback (most recent call last) <ipython-input-13-b133e2b51cbf> in <module> 2 valid_dataset = nvt.Dataset(OUTPUT_BUCKET_FOLDER+'valid_gdf.parquet', part_mem_fraction=0.12) 3 ----> 4 workflow.apply(tra...
ValueError
def add_data(self, gdf): # Populate columns idxs if not self.col_idx: for i, x in enumerate(gdf.columns.values): self.col_idx[str(x)] = i # list columns in cudf don't currently support chunked writing in parquet. # hack around this by just writing a singl...
def add_data(self, gdf): # Populate columns idxs if not self.col_idx: for i, x in enumerate(gdf.columns.values): self.col_idx[str(x)] = i # list columns in cudf don't currently support chunked writing in parquet. # hack around this by just writing a singl...
[{'piece_type': 'other', 'piece_content': "df = cudf.DataFrame({'doc_id': [1, 1, 2, 2, 3, 3, 4, 4], 'category_id': [1, 2, 3, 3, 5, 6, 6, 1], 'confidence_level': [0.92, 0.251, 0.352, 0.359, 0.978, 0.988, 0.978, 0.988]})\\n\\ndf_grouped = df.groupby('doc_id', as_index=False).agg({'category_id': ['collect'], 'confidence_l...
--------------------------------------------------------------------------- AttributeError Traceback (most recent call last) <ipython-input-19-f93c44c3b381> in <module> 11 proc.add_preprocess(JoinExternal(df_grouped, on= ['doc_id'], on_ext= ['doc_id'], kind_ext=kind_ext, columns_ext=columns_e...
AttributeError
def __init__( self, paths, part_size, storage_options, row_groups_per_part=None, legacy=False, batch_size=None, ): # TODO: Improve dask_cudf.read_parquet performance so that # this class can be slimmed down. super().__init__(paths, ...
def __init__( self, paths, part_size, storage_options, row_groups_per_part=None, legacy=False, batch_size=None, ): # TODO: Improve dask_cudf.read_parquet performance so that # this class can be slimmed down. super().__init__(paths, ...
[{'piece_type': 'other', 'piece_content': 'python dataloader_bench.py torch <PATH TO Folder with Parquet Files on local> parquet 0.2'}, {'piece_type': 'error message', 'piece_content': 'Traceback (most recent call last):\\nFile "main.py", line 106, in <module>\\nmain(args)\\nFile "main.py", line 61, in main\\ntrain_pat...
Traceback (most recent call last): File "main.py", line 106, in <module> main(args) File "main.py", line 61, in main train_paths, engine="parquet", part_mem_fraction=float(args.gpu_mem_frac) File "/root/miniconda/lib/python3.7/site-packages/nvtabular/io/dataset.py", line 224, in __init__ paths, part_size, storage_optio...
AttributeError
def __init__(self, *args, **kwargs): super().__init__(*args) self._meta = {} self.csv_kwargs = kwargs self.names = self.csv_kwargs.get("names", None) # CSV reader needs a list of files # (Assume flat directory structure if this is a dir) if len(self.paths) == ...
def __init__(self, *args, **kwargs): super().__init__(*args) self._meta = {} self.names = kwargs.pop("names", None) self.csv_kwargs = kwargs # CSV reader needs a list of files # (Assume flat directory structure if this is a dir) if len(self.paths) == 1 and sel...
[{'piece_type': 'error message', 'piece_content': "AttributeErrorTraceback (most recent call last)\\n<ipython-input-1-84910288ec3f> in <module>\\n44 del gdf\\n45 path_out = '/raid/criteo/tests/jp_csv_orig/'\\n---> 46 file_to_pq(train_set, 'csv', output_folder=path_out, cols=cols, dtypes=dtypes)\\n\\n<ipython-in...
AttributeErrorTraceback (most recent call last) <ipython-input-1-84910288ec3f> in <module> 44 del gdf 45 path_out = '/raid/criteo/tests/jp_csv_orig/' ---> 46 file_to_pq(train_set, 'csv', output_folder=path_out, cols=cols, dtypes=dtypes) <ipython-input-1-84910288ec3f> in file_to_pq(target_files, file_type, outp...
AttributeError
def to_ddf(self, columns=None): return dask_cudf.read_csv(self.paths, chunksize=self.part_size, **self.csv_kwargs)[columns]
def to_ddf(self, columns=None): return dask_cudf.read_csv( self.paths, names=self.names, chunksize=self.part_size, **self.csv_kwargs )[columns]
[{'piece_type': 'error message', 'piece_content': "AttributeErrorTraceback (most recent call last)\\n<ipython-input-1-84910288ec3f> in <module>\\n44 del gdf\\n45 path_out = '/raid/criteo/tests/jp_csv_orig/'\\n---> 46 file_to_pq(train_set, 'csv', output_folder=path_out, cols=cols, dtypes=dtypes)\\n\\n<ipython-in...
AttributeErrorTraceback (most recent call last) <ipython-input-1-84910288ec3f> in <module> 44 del gdf 45 path_out = '/raid/criteo/tests/jp_csv_orig/' ---> 46 file_to_pq(train_set, 'csv', output_folder=path_out, cols=cols, dtypes=dtypes) <ipython-input-1-84910288ec3f> in file_to_pq(target_files, file_type, outp...
AttributeError
def _predict(self, X): """Collect results from clf.predict calls.""" if self.refit: return np.asarray([clf.predict(X) for clf in self.clfs_]).T else: return np.asarray([self.le_.transform(clf.predict(X)) for clf in self.clfs_]).T
def _predict(self, X): """Collect results from clf.predict calls.""" return np.asarray([clf.predict(X) for clf in self.clfs_]).T
[{'piece_type': 'reproducing source code', 'piece_content': "import numpy as np\\nfrom sklearn.ensemble import RandomForestClassifier\\nfrom mlxtend.classifier import EnsembleVoteClassifier\\n\\ndata = np.array([0, 1, 2, 3, 0, 1, 2, 3])[:, np.newaxis]\\nlabels = ['a', 'b', 'c', 'd', 'a', 'b', 'c', 'd']\\n\\ntest = np.a...
Traceback (most recent call last): File "/_mlxtend_bug/reproduce.py", line 16, in <module> print(clf.predict(test)) File "/venv/py3/lib/python3.4/site-packages/mlxtend/classifier/ensemble_vote.py", line 197, in predict arr=predictions) File "/venv/py3/lib/python3.4/site-packages/numpy/lib/shape_base.py", line 132, in a...
TypeError
def transform( self, xx: Any, yy: Any, zz: Any = None, tt: Any = None, radians: bool = False, errcheck: bool = False, direction: Union[TransformDirection, str] = TransformDirection.FORWARD, ) -> Any: """ Transform points between two...
def transform( self, xx: Any, yy: Any, zz: Any = None, tt: Any = None, radians: bool = False, errcheck: bool = False, direction: Union[TransformDirection, str] = TransformDirection.FORWARD, ) -> Any: """ Transform points between two...
[{'piece_type': 'other', 'piece_content': 'echo 50 25 0 | cct +proj=pipeline +ellps=GRS80 +step +proj=cart'}, {'piece_type': 'other', 'piece_content': '3717892.6072 4430811.8715 2679074.4629 inf'}, {'piece_type': 'source code', 'piece_content': 'from pyproj import Transformer\\n\\nstring = "+proj=pipeline ...
Traceback (most recent call last): File "<stdin>", line 1, in <module> File "/usr/local/lib/python3.7/site-packages/pyproj/transformer.py", line 446, in transform errcheck=errcheck, File "pyproj/_transformer.pyx", line 463, in pyproj._transformer._Transformer._transform pyproj.exceptions.ProjError: transform error: lat...
pyproj.exceptions.ProjError
End of preview. Expand in Data Studio

No dataset card yet

Downloads last month
5