comments_url
stringlengths
70
70
timeline_url
stringlengths
70
70
closed_at
stringlengths
20
20
performed_via_github_app
null
state_reason
stringclasses
3 values
node_id
stringlengths
18
32
state
stringclasses
2 values
assignees
listlengths
0
4
draft
bool
2 classes
number
int64
1.61k
6.73k
user
dict
title
stringlengths
1
290
events_url
stringlengths
68
68
milestone
dict
labels_url
stringlengths
75
75
created_at
stringlengths
20
20
active_lock_reason
null
locked
bool
1 class
assignee
dict
pull_request
dict
id
int64
771M
2.18B
labels
listlengths
0
4
url
stringlengths
61
61
comments
listlengths
0
30
repository_url
stringclasses
1 value
author_association
stringclasses
3 values
body
stringlengths
0
228k
updated_at
stringlengths
20
20
html_url
stringlengths
49
51
reactions
dict
is_pull_request
bool
2 classes
https://api.github.com/repos/huggingface/datasets/issues/5282/comments
https://api.github.com/repos/huggingface/datasets/issues/5282/timeline
2022-11-22T17:21:27Z
null
null
PR_kwDODunzps5Det2_
closed
[]
false
5,282
{ "avatar_url": "https://avatars.githubusercontent.com/u/8515462?v=4", "events_url": "https://api.github.com/users/albertvillanova/events{/privacy}", "followers_url": "https://api.github.com/users/albertvillanova/followers", "following_url": "https://api.github.com/users/albertvillanova/following{/other_user}", "gists_url": "https://api.github.com/users/albertvillanova/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/albertvillanova", "id": 8515462, "login": "albertvillanova", "node_id": "MDQ6VXNlcjg1MTU0NjI=", "organizations_url": "https://api.github.com/users/albertvillanova/orgs", "received_events_url": "https://api.github.com/users/albertvillanova/received_events", "repos_url": "https://api.github.com/users/albertvillanova/repos", "site_admin": false, "starred_url": "https://api.github.com/users/albertvillanova/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/albertvillanova/subscriptions", "type": "User", "url": "https://api.github.com/users/albertvillanova" }
Release: 2.7.1
https://api.github.com/repos/huggingface/datasets/issues/5282/events
null
https://api.github.com/repos/huggingface/datasets/issues/5282/labels{/name}
2022-11-22T16:58:54Z
null
false
null
{ "diff_url": "https://github.com/huggingface/datasets/pull/5282.diff", "html_url": "https://github.com/huggingface/datasets/pull/5282", "merged_at": "2022-11-22T17:21:27Z", "patch_url": "https://github.com/huggingface/datasets/pull/5282.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/5282" }
1,460,238,928
[]
https://api.github.com/repos/huggingface/datasets/issues/5282
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
MEMBER
null
2022-11-22T17:21:28Z
https://github.com/huggingface/datasets/pull/5282
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5282/reactions" }
true
https://api.github.com/repos/huggingface/datasets/issues/5281/comments
https://api.github.com/repos/huggingface/datasets/issues/5281/timeline
null
null
reopened
I_kwDODunzps5XBMSf
open
[]
null
5,281
{ "avatar_url": "https://avatars.githubusercontent.com/u/42851186?v=4", "events_url": "https://api.github.com/users/lhoestq/events{/privacy}", "followers_url": "https://api.github.com/users/lhoestq/followers", "following_url": "https://api.github.com/users/lhoestq/following{/other_user}", "gists_url": "https://api.github.com/users/lhoestq/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/lhoestq", "id": 42851186, "login": "lhoestq", "node_id": "MDQ6VXNlcjQyODUxMTg2", "organizations_url": "https://api.github.com/users/lhoestq/orgs", "received_events_url": "https://api.github.com/users/lhoestq/received_events", "repos_url": "https://api.github.com/users/lhoestq/repos", "site_admin": false, "starred_url": "https://api.github.com/users/lhoestq/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/lhoestq/subscriptions", "type": "User", "url": "https://api.github.com/users/lhoestq" }
Support cloud storage in load_dataset
https://api.github.com/repos/huggingface/datasets/issues/5281/events
null
https://api.github.com/repos/huggingface/datasets/issues/5281/labels{/name}
2022-11-22T14:00:10Z
null
false
null
null
1,459,930,271
[ { "color": "a2eeef", "default": true, "description": "New feature or request", "id": 1935892871, "name": "enhancement", "node_id": "MDU6TGFiZWwxOTM1ODkyODcx", "url": "https://api.github.com/repos/huggingface/datasets/labels/enhancement" }, { "color": "BDE59C", "default": false, "description": "Issues a bit more difficult than \"Good First\" issues", "id": 3761482852, "name": "good second issue", "node_id": "LA_kwDODunzps7gM6xk", "url": "https://api.github.com/repos/huggingface/datasets/labels/good%20second%20issue" } ]
https://api.github.com/repos/huggingface/datasets/issues/5281
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
MEMBER
Would be nice to be able to do ```python data_files=["s3://..."] # or gs:// or any cloud storage path storage_options = {...} load_dataset(..., data_files=data_files, storage_options=storage_options) ``` The idea would be to use `fsspec` as in `download_and_prepare` and `save_to_disk`. This has been requested several times already. Some users want to use their data from private cloud storage to train models related: https://github.com/huggingface/datasets/issues/3490 https://github.com/huggingface/datasets/issues/5244 [forum](https://discuss.huggingface.co/t/how-to-use-s3-path-with-load-dataset-with-streaming-true/25739/2)
2024-03-11T15:38:14Z
https://github.com/huggingface/datasets/issues/5281
{ "+1": 26, "-1": 0, "confused": 0, "eyes": 0, "heart": 18, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 44, "url": "https://api.github.com/repos/huggingface/datasets/issues/5281/reactions" }
false
https://api.github.com/repos/huggingface/datasets/issues/5280/comments
https://api.github.com/repos/huggingface/datasets/issues/5280/timeline
2022-12-15T19:57:40Z
null
completed
I_kwDODunzps5XAyJL
closed
[]
null
5,280
{ "avatar_url": "https://avatars.githubusercontent.com/u/40760055?v=4", "events_url": "https://api.github.com/users/feketedavid1012/events{/privacy}", "followers_url": "https://api.github.com/users/feketedavid1012/followers", "following_url": "https://api.github.com/users/feketedavid1012/following{/other_user}", "gists_url": "https://api.github.com/users/feketedavid1012/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/feketedavid1012", "id": 40760055, "login": "feketedavid1012", "node_id": "MDQ6VXNlcjQwNzYwMDU1", "organizations_url": "https://api.github.com/users/feketedavid1012/orgs", "received_events_url": "https://api.github.com/users/feketedavid1012/received_events", "repos_url": "https://api.github.com/users/feketedavid1012/repos", "site_admin": false, "starred_url": "https://api.github.com/users/feketedavid1012/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/feketedavid1012/subscriptions", "type": "User", "url": "https://api.github.com/users/feketedavid1012" }
Import error
https://api.github.com/repos/huggingface/datasets/issues/5280/events
null
https://api.github.com/repos/huggingface/datasets/issues/5280/labels{/name}
2022-11-22T12:56:43Z
null
false
null
null
1,459,823,179
[]
https://api.github.com/repos/huggingface/datasets/issues/5280
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
NONE
https://github.com/huggingface/datasets/blob/cd3d8e637cfab62d352a3f4e5e60e96597b5f0e9/src/datasets/__init__.py#L28 Hy, I have error at the above line. I have python version 3.8.13, the message says I need python>=3.7, which is True, but I think the if statement not working properly (or the message wrong)
2022-12-15T19:57:40Z
https://github.com/huggingface/datasets/issues/5280
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5280/reactions" }
false
https://api.github.com/repos/huggingface/datasets/issues/5279/comments
https://api.github.com/repos/huggingface/datasets/issues/5279/timeline
2022-11-23T09:47:02Z
null
null
PR_kwDODunzps5Dcoue
closed
[]
false
5,279
{ "avatar_url": "https://avatars.githubusercontent.com/u/42851186?v=4", "events_url": "https://api.github.com/users/lhoestq/events{/privacy}", "followers_url": "https://api.github.com/users/lhoestq/followers", "following_url": "https://api.github.com/users/lhoestq/following{/other_user}", "gists_url": "https://api.github.com/users/lhoestq/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/lhoestq", "id": 42851186, "login": "lhoestq", "node_id": "MDQ6VXNlcjQyODUxMTg2", "organizations_url": "https://api.github.com/users/lhoestq/orgs", "received_events_url": "https://api.github.com/users/lhoestq/received_events", "repos_url": "https://api.github.com/users/lhoestq/repos", "site_admin": false, "starred_url": "https://api.github.com/users/lhoestq/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/lhoestq/subscriptions", "type": "User", "url": "https://api.github.com/users/lhoestq" }
Warn about checksums
https://api.github.com/repos/huggingface/datasets/issues/5279/events
null
https://api.github.com/repos/huggingface/datasets/issues/5279/labels{/name}
2022-11-22T10:58:48Z
null
false
null
{ "diff_url": "https://github.com/huggingface/datasets/pull/5279.diff", "html_url": "https://github.com/huggingface/datasets/pull/5279", "merged_at": "2022-11-23T09:47:01Z", "patch_url": "https://github.com/huggingface/datasets/pull/5279.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/5279" }
1,459,635,002
[]
https://api.github.com/repos/huggingface/datasets/issues/5279
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
MEMBER
It takes a lot of time on big datasets to compute the checksums, we should at least add a warning to notify the user about this step. I also mentioned how to disable it, and added a tqdm bar (delay=5 seconds) cc @ola13
2022-11-23T11:43:50Z
https://github.com/huggingface/datasets/pull/5279
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 1, "total_count": 1, "url": "https://api.github.com/repos/huggingface/datasets/issues/5279/reactions" }
true
https://api.github.com/repos/huggingface/datasets/issues/5278/comments
https://api.github.com/repos/huggingface/datasets/issues/5278/timeline
2022-11-23T11:38:35Z
null
completed
I_kwDODunzps5W_1ba
closed
[]
null
5,278
{ "avatar_url": "https://avatars.githubusercontent.com/u/81414263?v=4", "events_url": "https://api.github.com/users/065294847/events{/privacy}", "followers_url": "https://api.github.com/users/065294847/followers", "following_url": "https://api.github.com/users/065294847/following{/other_user}", "gists_url": "https://api.github.com/users/065294847/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/065294847", "id": 81414263, "login": "065294847", "node_id": "MDQ6VXNlcjgxNDE0MjYz", "organizations_url": "https://api.github.com/users/065294847/orgs", "received_events_url": "https://api.github.com/users/065294847/received_events", "repos_url": "https://api.github.com/users/065294847/repos", "site_admin": false, "starred_url": "https://api.github.com/users/065294847/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/065294847/subscriptions", "type": "User", "url": "https://api.github.com/users/065294847" }
load_dataset does not read jsonl metadata file properly
https://api.github.com/repos/huggingface/datasets/issues/5278/events
null
https://api.github.com/repos/huggingface/datasets/issues/5278/labels{/name}
2022-11-22T10:24:46Z
null
false
null
null
1,459,574,490
[]
https://api.github.com/repos/huggingface/datasets/issues/5278
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
NONE
### Describe the bug Hi, I'm following [this page](https://huggingface.co/docs/datasets/image_dataset) to create a dataset of images and captions via an image folder and a metadata.json file, but I can't seem to get the dataloader to recognize the "text" column. It just spits out "image" and "label" as features. Below is code to reproduce my exact example/problem. ### Steps to reproduce the bug ```ruby dataset_link="19Unu89Ih_kP6zsE7f9Mkw8dy3NwHopRF" id = dataset_link output = 'Godardv01.zip' gdown.download(id=id, output=output, quiet=False) ds = load_dataset("imagefolder", data_dir="/kaggle/working/Volumes/TOSHIBA/Godard_imgs/Volumes/TOSHIBA/Godard_imgs/Full/train", split="train", drop_labels=False) print(ds) ``` ### Expected behavior I would expect that it returned "image" and "text" columns from the code above. ### Environment info - `datasets` version: 2.1.0 - Platform: Linux-5.15.65+-x86_64-with-debian-bullseye-sid - Python version: 3.7.12 - PyArrow version: 5.0.0 - Pandas version: 1.3.5
2023-02-14T14:48:16Z
https://github.com/huggingface/datasets/issues/5278
{ "+1": 1, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 1, "url": "https://api.github.com/repos/huggingface/datasets/issues/5278/reactions" }
false
https://api.github.com/repos/huggingface/datasets/issues/5277/comments
https://api.github.com/repos/huggingface/datasets/issues/5277/timeline
2022-11-22T13:55:49Z
null
null
PR_kwDODunzps5Dbybu
closed
[]
false
5,277
{ "avatar_url": "https://avatars.githubusercontent.com/u/8515462?v=4", "events_url": "https://api.github.com/users/albertvillanova/events{/privacy}", "followers_url": "https://api.github.com/users/albertvillanova/followers", "following_url": "https://api.github.com/users/albertvillanova/following{/other_user}", "gists_url": "https://api.github.com/users/albertvillanova/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/albertvillanova", "id": 8515462, "login": "albertvillanova", "node_id": "MDQ6VXNlcjg1MTU0NjI=", "organizations_url": "https://api.github.com/users/albertvillanova/orgs", "received_events_url": "https://api.github.com/users/albertvillanova/received_events", "repos_url": "https://api.github.com/users/albertvillanova/repos", "site_admin": false, "starred_url": "https://api.github.com/users/albertvillanova/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/albertvillanova/subscriptions", "type": "User", "url": "https://api.github.com/users/albertvillanova" }
Remove YAML integer keys from class_label metadata
https://api.github.com/repos/huggingface/datasets/issues/5277/events
null
https://api.github.com/repos/huggingface/datasets/issues/5277/labels{/name}
2022-11-22T08:34:07Z
null
false
null
{ "diff_url": "https://github.com/huggingface/datasets/pull/5277.diff", "html_url": "https://github.com/huggingface/datasets/pull/5277", "merged_at": "2022-11-22T13:55:49Z", "patch_url": "https://github.com/huggingface/datasets/pull/5277.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/5277" }
1,459,388,551
[]
https://api.github.com/repos/huggingface/datasets/issues/5277
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
MEMBER
Fix partially #5275.
2022-11-22T13:58:26Z
https://github.com/huggingface/datasets/pull/5277
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5277/reactions" }
true
https://api.github.com/repos/huggingface/datasets/issues/5276/comments
https://api.github.com/repos/huggingface/datasets/issues/5276/timeline
2023-07-21T14:33:10Z
null
completed
I_kwDODunzps5W_B5y
closed
[]
null
5,276
{ "avatar_url": "https://avatars.githubusercontent.com/u/48530104?v=4", "events_url": "https://api.github.com/users/capsabogdan/events{/privacy}", "followers_url": "https://api.github.com/users/capsabogdan/followers", "following_url": "https://api.github.com/users/capsabogdan/following{/other_user}", "gists_url": "https://api.github.com/users/capsabogdan/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/capsabogdan", "id": 48530104, "login": "capsabogdan", "node_id": "MDQ6VXNlcjQ4NTMwMTA0", "organizations_url": "https://api.github.com/users/capsabogdan/orgs", "received_events_url": "https://api.github.com/users/capsabogdan/received_events", "repos_url": "https://api.github.com/users/capsabogdan/repos", "site_admin": false, "starred_url": "https://api.github.com/users/capsabogdan/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/capsabogdan/subscriptions", "type": "User", "url": "https://api.github.com/users/capsabogdan" }
Bug in downloading common_voice data and snall chunk of it to one's own hub
https://api.github.com/repos/huggingface/datasets/issues/5276/events
null
https://api.github.com/repos/huggingface/datasets/issues/5276/labels{/name}
2022-11-22T08:17:53Z
null
false
null
null
1,459,363,442
[]
https://api.github.com/repos/huggingface/datasets/issues/5276
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
NONE
### Describe the bug I'm trying to load the common voice dataset. Currently there is no implementation to download just par tof the data, and I need just one part of it, without downloading the entire dataset Help please? ![image](https://user-images.githubusercontent.com/48530104/203260511-26df766f-6013-4eaf-be26-8aa13794def2.png) ### Steps to reproduce the bug So here is what I have done: 1. Download common_voice data 2. Trim part of it and publish it to my own repo. 3. Download data from my own repo, but am getting this error. ### Expected behavior There shouldn't be an error in downloading part of the data and publishing it to one's own repo ### Environment info common_voice 11
2023-07-21T14:33:10Z
https://github.com/huggingface/datasets/issues/5276
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5276/reactions" }
false
https://api.github.com/repos/huggingface/datasets/issues/5275/comments
https://api.github.com/repos/huggingface/datasets/issues/5275/timeline
2023-01-26T10:40:21Z
null
completed
I_kwDODunzps5W_AzH
closed
[ { "avatar_url": "https://avatars.githubusercontent.com/u/8515462?v=4", "events_url": "https://api.github.com/users/albertvillanova/events{/privacy}", "followers_url": "https://api.github.com/users/albertvillanova/followers", "following_url": "https://api.github.com/users/albertvillanova/following{/other_user}", "gists_url": "https://api.github.com/users/albertvillanova/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/albertvillanova", "id": 8515462, "login": "albertvillanova", "node_id": "MDQ6VXNlcjg1MTU0NjI=", "organizations_url": "https://api.github.com/users/albertvillanova/orgs", "received_events_url": "https://api.github.com/users/albertvillanova/received_events", "repos_url": "https://api.github.com/users/albertvillanova/repos", "site_admin": false, "starred_url": "https://api.github.com/users/albertvillanova/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/albertvillanova/subscriptions", "type": "User", "url": "https://api.github.com/users/albertvillanova" } ]
null
5,275
{ "avatar_url": "https://avatars.githubusercontent.com/u/8515462?v=4", "events_url": "https://api.github.com/users/albertvillanova/events{/privacy}", "followers_url": "https://api.github.com/users/albertvillanova/followers", "following_url": "https://api.github.com/users/albertvillanova/following{/other_user}", "gists_url": "https://api.github.com/users/albertvillanova/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/albertvillanova", "id": 8515462, "login": "albertvillanova", "node_id": "MDQ6VXNlcjg1MTU0NjI=", "organizations_url": "https://api.github.com/users/albertvillanova/orgs", "received_events_url": "https://api.github.com/users/albertvillanova/received_events", "repos_url": "https://api.github.com/users/albertvillanova/repos", "site_admin": false, "starred_url": "https://api.github.com/users/albertvillanova/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/albertvillanova/subscriptions", "type": "User", "url": "https://api.github.com/users/albertvillanova" }
YAML integer keys are not preserved Hub server-side
https://api.github.com/repos/huggingface/datasets/issues/5275/events
null
https://api.github.com/repos/huggingface/datasets/issues/5275/labels{/name}
2022-11-22T08:14:47Z
null
false
{ "avatar_url": "https://avatars.githubusercontent.com/u/8515462?v=4", "events_url": "https://api.github.com/users/albertvillanova/events{/privacy}", "followers_url": "https://api.github.com/users/albertvillanova/followers", "following_url": "https://api.github.com/users/albertvillanova/following{/other_user}", "gists_url": "https://api.github.com/users/albertvillanova/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/albertvillanova", "id": 8515462, "login": "albertvillanova", "node_id": "MDQ6VXNlcjg1MTU0NjI=", "organizations_url": "https://api.github.com/users/albertvillanova/orgs", "received_events_url": "https://api.github.com/users/albertvillanova/received_events", "repos_url": "https://api.github.com/users/albertvillanova/repos", "site_admin": false, "starred_url": "https://api.github.com/users/albertvillanova/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/albertvillanova/subscriptions", "type": "User", "url": "https://api.github.com/users/albertvillanova" }
null
1,459,358,919
[ { "color": "d73a4a", "default": true, "description": "Something isn't working", "id": 1935892857, "name": "bug", "node_id": "MDU6TGFiZWwxOTM1ODkyODU3", "url": "https://api.github.com/repos/huggingface/datasets/labels/bug" } ]
https://api.github.com/repos/huggingface/datasets/issues/5275
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
MEMBER
After an internal discussion (https://github.com/huggingface/moon-landing/issues/4563): - YAML integer keys are not preserved server-side: they are transformed to strings - See for example this Hub PR: https://huggingface.co/datasets/acronym_identification/discussions/1/files - Original: ```yaml class_label: names: 0: B-long 1: B-short ``` - Returned by the server: ```yaml class_label: names: '0': B-long '1': B-short ``` - They are planning to enforce only string keys - Other projects already use interger-transformed-to string keys: e.g. `transformers` models `id2label`: https://huggingface.co/roberta-large-mnli/blob/main/config.json ```yaml "id2label": { "0": "CONTRADICTION", "1": "NEUTRAL", "2": "ENTAILMENT" } ``` On the other hand, at `datasets` we are currently using YAML integer keys for `dataset_info` `class_label`. Please note (thanks @lhoestq for pointing out) that previous versions (2.6 and 2.7) of `datasets` need being patched: ```python In [18]: Features._from_yaml_list([{'dtype': {'class_label': {'names': {'0': 'neg', '1': 'pos'}}}, 'name': 'label'}]) --------------------------------------------------------------------------- TypeError Traceback (most recent call last) <ipython-input-18-974f07eea526> in <module> ----> 1 Features._from_yaml_list(ry) ~/Desktop/hf/nlp/src/datasets/features/features.py in _from_yaml_list(cls, yaml_data) 1743 raise TypeError(f"Expected a dict or a list but got {type(obj)}: {obj}") 1744 -> 1745 return cls.from_dict(from_yaml_inner(yaml_data)) 1746 1747 def encode_example(self, example): ~/Desktop/hf/nlp/src/datasets/features/features.py in from_yaml_inner(obj) 1739 elif isinstance(obj, list): 1740 names = [_feature.pop("name") for _feature in obj] -> 1741 return {name: from_yaml_inner(_feature) for name, _feature in zip(names, obj)} 1742 else: 1743 raise TypeError(f"Expected a dict or a list but got {type(obj)}: {obj}") ~/Desktop/hf/nlp/src/datasets/features/features.py in <dictcomp>(.0) 1739 elif isinstance(obj, list): 1740 names = [_feature.pop("name") for _feature in obj] -> 1741 return {name: from_yaml_inner(_feature) for name, _feature in zip(names, obj)} 1742 else: 1743 raise TypeError(f"Expected a dict or a list but got {type(obj)}: {obj}") ~/Desktop/hf/nlp/src/datasets/features/features.py in from_yaml_inner(obj) 1734 return {"_type": snakecase_to_camelcase(obj["dtype"])} 1735 else: -> 1736 return from_yaml_inner(obj["dtype"]) 1737 else: 1738 return {"_type": snakecase_to_camelcase(_type), **unsimplify(obj)[_type]} ~/Desktop/hf/nlp/src/datasets/features/features.py in from_yaml_inner(obj) 1736 return from_yaml_inner(obj["dtype"]) 1737 else: -> 1738 return {"_type": snakecase_to_camelcase(_type), **unsimplify(obj)[_type]} 1739 elif isinstance(obj, list): 1740 names = [_feature.pop("name") for _feature in obj] ~/Desktop/hf/nlp/src/datasets/features/features.py in unsimplify(feature) 1704 if isinstance(feature.get("class_label"), dict) and isinstance(feature["class_label"].get("names"), dict): 1705 label_ids = sorted(feature["class_label"]["names"]) -> 1706 if label_ids and label_ids != list(range(label_ids[-1] + 1)): 1707 raise ValueError( 1708 f"ClassLabel expected a value for all label ids [0:{label_ids[-1] + 1}] but some ids are missing." TypeError: can only concatenate str (not "int") to str ``` TODO: - [x] Remove YAML integer keys from `dataset_info` metadata - [x] Make a patch release for affected `datasets` versions: 2.6 and 2.7 - [x] Communicate on the fix - [x] Wait for adoption - [x] Bulk edit the Hub to fix this in all canonical datasets
2023-01-26T10:52:35Z
https://github.com/huggingface/datasets/issues/5275
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 1, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 1, "url": "https://api.github.com/repos/huggingface/datasets/issues/5275/reactions" }
false
https://api.github.com/repos/huggingface/datasets/issues/5274/comments
https://api.github.com/repos/huggingface/datasets/issues/5274/timeline
2022-11-28T02:50:42Z
null
completed
I_kwDODunzps5W8S23
closed
[]
null
5,274
{ "avatar_url": "https://avatars.githubusercontent.com/u/20826878?v=4", "events_url": "https://api.github.com/users/TristanThrush/events{/privacy}", "followers_url": "https://api.github.com/users/TristanThrush/followers", "following_url": "https://api.github.com/users/TristanThrush/following{/other_user}", "gists_url": "https://api.github.com/users/TristanThrush/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/TristanThrush", "id": 20826878, "login": "TristanThrush", "node_id": "MDQ6VXNlcjIwODI2ODc4", "organizations_url": "https://api.github.com/users/TristanThrush/orgs", "received_events_url": "https://api.github.com/users/TristanThrush/received_events", "repos_url": "https://api.github.com/users/TristanThrush/repos", "site_admin": false, "starred_url": "https://api.github.com/users/TristanThrush/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/TristanThrush/subscriptions", "type": "User", "url": "https://api.github.com/users/TristanThrush" }
load_dataset possibly broken for gated datasets?
https://api.github.com/repos/huggingface/datasets/issues/5274/events
null
https://api.github.com/repos/huggingface/datasets/issues/5274/labels{/name}
2022-11-21T21:59:53Z
null
false
null
null
1,458,646,455
[]
https://api.github.com/repos/huggingface/datasets/issues/5274
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
CONTRIBUTOR
### Describe the bug When trying to download the [winoground dataset](https://huggingface.co/datasets/facebook/winoground), I get this error unless I roll back the version of huggingface-hub: ``` [/usr/local/lib/python3.7/dist-packages/huggingface_hub/utils/_validators.py](https://localhost:8080/#) in validate_repo_id(repo_id) 165 if repo_id.count("/") > 1: 166 raise HFValidationError( --> 167 "Repo id must be in the form 'repo_name' or 'namespace/repo_name':" 168 f" '{repo_id}'. Use `repo_type` argument if needed." 169 ) HFValidationError: Repo id must be in the form 'repo_name' or 'namespace/repo_name': 'datasets/facebook/winoground'. Use `repo_type` argument if needed ``` ### Steps to reproduce the bug Install requirements: ``` pip install transformers pip install datasets # It works if you uncomment the following line, rolling back huggingface hub: # pip install huggingface-hub==0.10.1 ``` Then: ``` from datasets import load_dataset auth_token = "" # Replace with an auth token, which you can get from your huggingface account: Profile -> Settings -> Access Tokens -> New Token winoground = load_dataset("facebook/winoground", use_auth_token=auth_token)["test"] ``` ### Expected behavior Downloading of the datset ### Environment info Just a google colab; see here: https://colab.research.google.com/drive/15wwOSte2CjTazdnCWYUm2VPlFbk2NGc0?usp=sharing
2023-05-27T00:06:14Z
https://github.com/huggingface/datasets/issues/5274
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 1, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 1, "url": "https://api.github.com/repos/huggingface/datasets/issues/5274/reactions" }
false
https://api.github.com/repos/huggingface/datasets/issues/5273/comments
https://api.github.com/repos/huggingface/datasets/issues/5273/timeline
null
null
null
I_kwDODunzps5W55cC
open
[]
null
5,273
{ "avatar_url": "https://avatars.githubusercontent.com/u/28439912?v=4", "events_url": "https://api.github.com/users/nomisto/events{/privacy}", "followers_url": "https://api.github.com/users/nomisto/followers", "following_url": "https://api.github.com/users/nomisto/following{/other_user}", "gists_url": "https://api.github.com/users/nomisto/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/nomisto", "id": 28439912, "login": "nomisto", "node_id": "MDQ6VXNlcjI4NDM5OTEy", "organizations_url": "https://api.github.com/users/nomisto/orgs", "received_events_url": "https://api.github.com/users/nomisto/received_events", "repos_url": "https://api.github.com/users/nomisto/repos", "site_admin": false, "starred_url": "https://api.github.com/users/nomisto/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/nomisto/subscriptions", "type": "User", "url": "https://api.github.com/users/nomisto" }
download_mode="force_redownload" does not refresh cached dataset
https://api.github.com/repos/huggingface/datasets/issues/5273/events
null
https://api.github.com/repos/huggingface/datasets/issues/5273/labels{/name}
2022-11-21T14:12:43Z
null
false
null
null
1,458,018,050
[]
https://api.github.com/repos/huggingface/datasets/issues/5273
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
NONE
### Describe the bug `load_datasets` does not refresh dataset when features are imported from external file, even with `download_mode="force_redownload"`. The bug is not limited to nested fields, however it is more likely to occur with nested fields. ### Steps to reproduce the bug To reproduce the bug 3 files are needed: `dataset.py` (contains dataset loading script), `schema.py` (contains features of dataset) and `main.py` (to run `load_datasets`) `dataset.py` ```python import datasets from schema import features class NewDataset(datasets.GeneratorBasedBuilder): def _info(self): return datasets.DatasetInfo( features=features ) def _split_generators(self, dl_manager): return [ datasets.SplitGenerator( name=datasets.Split.TRAIN ) ] def _generate_examples(self): data = [ {"id": 0, "nested": []}, {"id": 1, "nested": []} ] for key, example in enumerate(data): yield key, example ``` `schema.py` ```python import datasets features = datasets.Features( { "id": datasets.Value("int32"), "nested": [ {"text": datasets.Value("string")} ] } ) ``` `main.py` ```python import datasets a = datasets.load_dataset("dataset.py") print(a["train"].info.features) ``` Now if `main.py` is run it prints the following correct output: `{'id': Value(dtype='int32', id=None), 'nested': [{'text': Value(dtype='string', id=None)}]}`. However, if f.e. the label of the feature "text" is changed to something else, f.e. to `schema.py` ```python import datasets features = datasets.Features( { "id": datasets.Value("int32"), "nested": [ {"textfoo": datasets.Value("string")} ] } ) ``` `main.py` still prints `{'id': Value(dtype='int32', id=None), 'nested': [{'text': Value(dtype='string', id=None)}]}`, even if run with `download_mode="force_redownload"`. The only fix is to delete the folder in the cache. ### Expected behavior The cached dataset is deleted and refreshed when using `load_datasets` with `download_mode="force_redownload"`. ### Environment info - `datasets` version: 2.7.0 - Platform: Windows-10-10.0.19041-SP0 - Python version: 3.7.9 - PyArrow version: 10.0.0 - Pandas version: 1.3.5
2022-11-21T14:13:03Z
https://github.com/huggingface/datasets/issues/5273
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5273/reactions" }
false
https://api.github.com/repos/huggingface/datasets/issues/5272/comments
https://api.github.com/repos/huggingface/datasets/issues/5272/timeline
null
null
null
I_kwDODunzps5W1yP1
open
[]
null
5,272
{ "avatar_url": "https://avatars.githubusercontent.com/u/18228395?v=4", "events_url": "https://api.github.com/users/franz101/events{/privacy}", "followers_url": "https://api.github.com/users/franz101/followers", "following_url": "https://api.github.com/users/franz101/following{/other_user}", "gists_url": "https://api.github.com/users/franz101/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/franz101", "id": 18228395, "login": "franz101", "node_id": "MDQ6VXNlcjE4MjI4Mzk1", "organizations_url": "https://api.github.com/users/franz101/orgs", "received_events_url": "https://api.github.com/users/franz101/received_events", "repos_url": "https://api.github.com/users/franz101/repos", "site_admin": false, "starred_url": "https://api.github.com/users/franz101/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/franz101/subscriptions", "type": "User", "url": "https://api.github.com/users/franz101" }
Use pyarrow Tensor dtype
https://api.github.com/repos/huggingface/datasets/issues/5272/events
null
https://api.github.com/repos/huggingface/datasets/issues/5272/labels{/name}
2022-11-20T15:18:41Z
null
false
null
null
1,456,940,021
[ { "color": "a2eeef", "default": true, "description": "New feature or request", "id": 1935892871, "name": "enhancement", "node_id": "MDU6TGFiZWwxOTM1ODkyODcx", "url": "https://api.github.com/repos/huggingface/datasets/labels/enhancement" } ]
https://api.github.com/repos/huggingface/datasets/issues/5272
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
NONE
### Feature request I was going the discussion of converting tensors to lists. Is there a way to leverage pyarrow's Tensors for nested arrays / embeddings? For example: ```python import pyarrow as pa import numpy as np x = np.array([[2, 2, 4], [4, 5, 100]], np.int32) pa.Tensor.from_numpy(x, dim_names=["dim1","dim2"]) ``` [Apache docs](https://arrow.apache.org/docs/python/generated/pyarrow.Tensor.html) Maybe this belongs into the pyarrow features / repo. ### Motivation Working with big data, we need to make sure to use the best data structures and IO out there ### Your contribution Can try to a PR if code changes necessary
2023-08-17T21:09:11Z
https://github.com/huggingface/datasets/issues/5272
{ "+1": 2, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 2, "url": "https://api.github.com/repos/huggingface/datasets/issues/5272/reactions" }
false
https://api.github.com/repos/huggingface/datasets/issues/5271/comments
https://api.github.com/repos/huggingface/datasets/issues/5271/timeline
2022-11-21T15:06:38Z
null
null
PR_kwDODunzps5DTDX1
closed
[]
false
5,271
{ "avatar_url": "https://avatars.githubusercontent.com/u/32936898?v=4", "events_url": "https://api.github.com/users/Freed-Wu/events{/privacy}", "followers_url": "https://api.github.com/users/Freed-Wu/followers", "following_url": "https://api.github.com/users/Freed-Wu/following{/other_user}", "gists_url": "https://api.github.com/users/Freed-Wu/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/Freed-Wu", "id": 32936898, "login": "Freed-Wu", "node_id": "MDQ6VXNlcjMyOTM2ODk4", "organizations_url": "https://api.github.com/users/Freed-Wu/orgs", "received_events_url": "https://api.github.com/users/Freed-Wu/received_events", "repos_url": "https://api.github.com/users/Freed-Wu/repos", "site_admin": false, "starred_url": "https://api.github.com/users/Freed-Wu/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/Freed-Wu/subscriptions", "type": "User", "url": "https://api.github.com/users/Freed-Wu" }
Fix #5269
https://api.github.com/repos/huggingface/datasets/issues/5271/events
null
https://api.github.com/repos/huggingface/datasets/issues/5271/labels{/name}
2022-11-20T07:50:49Z
null
false
null
{ "diff_url": "https://github.com/huggingface/datasets/pull/5271.diff", "html_url": "https://github.com/huggingface/datasets/pull/5271", "merged_at": null, "patch_url": "https://github.com/huggingface/datasets/pull/5271.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/5271" }
1,456,807,738
[]
https://api.github.com/repos/huggingface/datasets/issues/5271
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
NONE
``` $ datasets-cli convert --datasets_directory <TAB> datasets_directory benchmarks/ docs/ metrics/ notebooks/ src/ templates/ tests/ utils/ ```
2022-11-21T15:07:19Z
https://github.com/huggingface/datasets/pull/5271
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5271/reactions" }
true
https://api.github.com/repos/huggingface/datasets/issues/5270/comments
https://api.github.com/repos/huggingface/datasets/issues/5270/timeline
null
null
null
I_kwDODunzps5W0JA-
open
[]
null
5,270
{ "avatar_url": "https://avatars.githubusercontent.com/u/32936898?v=4", "events_url": "https://api.github.com/users/Freed-Wu/events{/privacy}", "followers_url": "https://api.github.com/users/Freed-Wu/followers", "following_url": "https://api.github.com/users/Freed-Wu/following{/other_user}", "gists_url": "https://api.github.com/users/Freed-Wu/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/Freed-Wu", "id": 32936898, "login": "Freed-Wu", "node_id": "MDQ6VXNlcjMyOTM2ODk4", "organizations_url": "https://api.github.com/users/Freed-Wu/orgs", "received_events_url": "https://api.github.com/users/Freed-Wu/received_events", "repos_url": "https://api.github.com/users/Freed-Wu/repos", "site_admin": false, "starred_url": "https://api.github.com/users/Freed-Wu/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/Freed-Wu/subscriptions", "type": "User", "url": "https://api.github.com/users/Freed-Wu" }
When len(_URLS) > 16, download will hang
https://api.github.com/repos/huggingface/datasets/issues/5270/events
null
https://api.github.com/repos/huggingface/datasets/issues/5270/labels{/name}
2022-11-19T14:27:41Z
null
false
null
null
1,456,508,990
[]
https://api.github.com/repos/huggingface/datasets/issues/5270
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
NONE
### Describe the bug ```python In [9]: dataset = load_dataset('Freed-Wu/kodak', split='test') Downloading: 100%|████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 2.53k/2.53k [00:00<00:00, 1.88MB/s] [11/19/22 22:16:21] WARNING Using custom data configuration default builder.py:379 Downloading and preparing dataset kodak/default to /home/wzy/.cache/huggingface/datasets/Freed-Wu___kodak/default/0.0.1/bd1cc3434212e3e654f7e16ad618f8a1470b5982b086c91b1d6bc7187183c6e9... Downloading: 100%|███████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 531k/531k [00:02<00:00, 239kB/s] #10: 100%|██████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 1/1 [00:04<00:00, 4.06s/obj] Downloading: 100%|███████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 534k/534k [00:02<00:00, 193kB/s] #14: 100%|██████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 1/1 [00:04<00:00, 4.37s/obj] Downloading: 100%|███████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 692k/692k [00:02<00:00, 269kB/s] #12: 100%|██████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 1/1 [00:04<00:00, 4.44s/obj] Downloading: 100%|███████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 566k/566k [00:02<00:00, 210kB/s] #5: 100%|███████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 1/1 [00:04<00:00, 4.53s/obj] Downloading: 100%|███████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 613k/613k [00:02<00:00, 235kB/s] #13: 100%|██████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 1/1 [00:04<00:00, 4.53s/obj] Downloading: 100%|███████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 786k/786k [00:02<00:00, 342kB/s] #3: 100%|███████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 1/1 [00:04<00:00, 4.60s/obj] Downloading: 100%|███████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 619k/619k [00:02<00:00, 254kB/s] #4: 100%|███████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 1/1 [00:04<00:00, 4.68s/obj] Downloading: 100%|███████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 737k/737k [00:02<00:00, 271kB/s] Downloading: 100%|███████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 788k/788k [00:02<00:00, 285kB/s] #6: 100%|███████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 1/1 [00:05<00:00, 5.04s/obj] Downloading: 100%|███████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 618k/618k [00:04<00:00, 153kB/s] #0: 100%|███████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 2/2 [00:11<00:00, 5.69s/obj] ^CProcess ForkPoolWorker-47: Process ForkPoolWorker-46: Process ForkPoolWorker-36: Process ForkPoolWorker-38:██████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 1/1 [00:05<00:00, 5.04s/obj] Process ForkPoolWorker-37: Process ForkPoolWorker-45: Process ForkPoolWorker-39: Process ForkPoolWorker-43: Process ForkPoolWorker-33: Process ForkPoolWorker-18: Traceback (most recent call last): Traceback (most recent call last): Traceback (most recent call last): Traceback (most recent call last): Traceback (most recent call last): File "/usr/lib/python3.10/multiprocessing/process.py", line 314, in _bootstrap self.run() File "/usr/lib/python3.10/multiprocessing/process.py", line 314, in _bootstrap self.run() File "/usr/lib/python3.10/multiprocessing/process.py", line 314, in _bootstrap self.run() File "/usr/lib/python3.10/multiprocessing/process.py", line 108, in run self._target(*self._args, **self._kwargs) File "/usr/lib/python3.10/multiprocessing/process.py", line 314, in _bootstrap self.run() File "/usr/lib/python3.10/multiprocessing/pool.py", line 114, in worker task = get() File "/usr/lib/python3.10/multiprocessing/process.py", line 314, in _bootstrap self.run() File "/usr/lib/python3.10/multiprocessing/queues.py", line 364, in get with self._rlock: File "/usr/lib/python3.10/multiprocessing/process.py", line 108, in run self._target(*self._args, **self._kwargs) File "/usr/lib/python3.10/multiprocessing/synchronize.py", line 95, in __enter__ return self._semlock.__enter__() File "/usr/lib/python3.10/multiprocessing/process.py", line 108, in run self._target(*self._args, **self._kwargs) File "/usr/lib/python3.10/multiprocessing/process.py", line 108, in run self._target(*self._args, **self._kwargs) File "/usr/lib/python3.10/multiprocessing/pool.py", line 114, in worker task = get() File "/usr/lib/python3.10/multiprocessing/pool.py", line 114, in worker task = get() File "/usr/lib/python3.10/multiprocessing/process.py", line 108, in run self._target(*self._args, **self._kwargs) File "/usr/lib/python3.10/multiprocessing/queues.py", line 364, in get with self._rlock: File "/usr/lib/python3.10/multiprocessing/pool.py", line 114, in worker task = get() File "/usr/lib/python3.10/multiprocessing/queues.py", line 364, in get with self._rlock: KeyboardInterrupt File "/usr/lib/python3.10/multiprocessing/synchronize.py", line 95, in __enter__ return self._semlock.__enter__() Traceback (most recent call last): Traceback (most recent call last): Traceback (most recent call last): KeyboardInterrupt File "/usr/lib/python3.10/multiprocessing/pool.py", line 114, in worker task = get() File "/usr/lib/python3.10/multiprocessing/queues.py", line 364, in get with self._rlock: File "/usr/lib/python3.10/multiprocessing/queues.py", line 364, in get with self._rlock: File "/usr/lib/python3.10/multiprocessing/synchronize.py", line 95, in __enter__ return self._semlock.__enter__() File "/usr/lib/python3.10/multiprocessing/synchronize.py", line 95, in __enter__ return self._semlock.__enter__() KeyboardInterrupt File "/usr/lib/python3.10/multiprocessing/process.py", line 314, in _bootstrap self.run() File "/usr/lib/python3.10/multiprocessing/process.py", line 314, in _bootstrap self.run() KeyboardInterrupt File "/usr/lib/python3.10/multiprocessing/process.py", line 314, in _bootstrap self.run() File "/usr/lib/python3.10/multiprocessing/process.py", line 108, in run self._target(*self._args, **self._kwargs) File "/usr/lib/python3.10/multiprocessing/process.py", line 108, in run self._target(*self._args, **self._kwargs) File "/usr/lib/python3.10/multiprocessing/pool.py", line 114, in worker task = get() File "/usr/lib/python3.10/multiprocessing/synchronize.py", line 95, in __enter__ return self._semlock.__enter__() File "/usr/lib/python3.10/multiprocessing/process.py", line 108, in run self._target(*self._args, **self._kwargs) File "/usr/lib/python3.10/multiprocessing/pool.py", line 114, in worker task = get() File "/usr/lib/python3.10/multiprocessing/pool.py", line 114, in worker task = get() File "/usr/lib/python3.10/multiprocessing/queues.py", line 364, in get with self._rlock: File "/usr/lib/python3.10/multiprocessing/queues.py", line 365, in get res = self._reader.recv_bytes() File "/usr/lib/python3.10/multiprocessing/queues.py", line 364, in get with self._rlock: File "/usr/lib/python3.10/multiprocessing/synchronize.py", line 95, in __enter__ return self._semlock.__enter__() KeyboardInterrupt File "/usr/lib/python3.10/multiprocessing/synchronize.py", line 95, in __enter__ return self._semlock.__enter__() File "/usr/lib/python3.10/multiprocessing/connection.py", line 221, in recv_bytes buf = self._recv_bytes(maxlength) KeyboardInterrupt KeyboardInterrupt File "/usr/lib/python3.10/multiprocessing/connection.py", line 419, in _recv_bytes buf = self._recv(4) File "/usr/lib/python3.10/multiprocessing/connection.py", line 384, in _recv chunk = read(handle, remaining) KeyboardInterrupt Traceback (most recent call last): File "/usr/lib/python3.10/multiprocessing/process.py", line 314, in _bootstrap self.run() File "/usr/lib/python3.10/multiprocessing/process.py", line 108, in run self._target(*self._args, **self._kwargs) File "/usr/lib/python3.10/multiprocessing/pool.py", line 114, in worker task = get() File "/usr/lib/python3.10/multiprocessing/queues.py", line 364, in get with self._rlock: File "/usr/lib/python3.10/multiprocessing/synchronize.py", line 95, in __enter__ return self._semlock.__enter__() KeyboardInterrupt Process ForkPoolWorker-20: Process ForkPoolWorker-44: Process ForkPoolWorker-22: Traceback (most recent call last): File "/usr/lib/python3.10/site-packages/urllib3/util/connection.py", line 85, in create_connection sock.connect(sa) ConnectionRefusedError: [Errno 111] Connection refused During handling of the above exception, another exception occurred: Traceback (most recent call last): File "/usr/lib/python3.10/multiprocessing/process.py", line 314, in _bootstrap self.run() File "/usr/lib/python3.10/multiprocessing/process.py", line 108, in run self._target(*self._args, **self._kwargs) File "/usr/lib/python3.10/multiprocessing/pool.py", line 125, in worker result = (True, func(*args, **kwds)) File "/usr/lib/python3.10/multiprocessing/pool.py", line 48, in mapstar return list(map(*args)) File "/usr/lib/python3.10/site-packages/datasets/utils/py_utils.py", line 215, in _single_map_nested mapped = [_single_map_nested((function, v, types, None, True)) for v in pbar] File "/usr/lib/python3.10/site-packages/datasets/utils/py_utils.py", line 215, in <listcomp> mapped = [_single_map_nested((function, v, types, None, True)) for v in pbar] File "/usr/lib/python3.10/site-packages/datasets/utils/py_utils.py", line 197, in _single_map_nested return function(data_struct) File "/usr/lib/python3.10/site-packages/datasets/utils/download_manager.py", line 217, in _download return cached_path(url_or_filename, download_config=download_config) File "/usr/lib/python3.10/site-packages/datasets/utils/file_utils.py", line 298, in cached_path output_path = get_from_cache( File "/usr/lib/python3.10/site-packages/datasets/utils/file_utils.py", line 561, in get_from_cache response = http_head( File "/usr/lib/python3.10/site-packages/datasets/utils/file_utils.py", line 476, in http_head response = _request_with_retry( File "/usr/lib/python3.10/site-packages/datasets/utils/file_utils.py", line 405, in _request_with_retry response = requests.request(method=method.upper(), url=url, timeout=timeout, **params) File "/usr/lib/python3.10/site-packages/requests/api.py", line 59, in request return session.request(method=method, url=url, **kwargs) File "/usr/lib/python3.10/site-packages/requests/sessions.py", line 587, in request resp = self.send(prep, **send_kwargs) File "/usr/lib/python3.10/site-packages/requests/sessions.py", line 701, in send r = adapter.send(request, **kwargs) File "/usr/lib/python3.10/site-packages/requests/adapters.py", line 489, in send resp = conn.urlopen( File "/usr/lib/python3.10/site-packages/urllib3/connectionpool.py", line 703, in urlopen httplib_response = self._make_request( File "/usr/lib/python3.10/site-packages/urllib3/connectionpool.py", line 386, in _make_request self._validate_conn(conn) File "/usr/lib/python3.10/site-packages/urllib3/connectionpool.py", line 1042, in _validate_conn conn.connect() File "/usr/lib/python3.10/site-packages/urllib3/connection.py", line 358, in connect self.sock = conn = self._new_conn() File "/usr/lib/python3.10/site-packages/urllib3/connection.py", line 174, in _new_conn conn = connection.create_connection( File "/usr/lib/python3.10/site-packages/urllib3/util/connection.py", line 85, in create_connection sock.connect(sa) KeyboardInterrupt #1: 0%| | 0/2 [03:00<?, ?obj/s] Traceback (most recent call last): Traceback (most recent call last): File "/usr/lib/python3.10/multiprocessing/process.py", line 314, in _bootstrap self.run() File "/usr/lib/python3.10/multiprocessing/process.py", line 108, in run self._target(*self._args, **self._kwargs) File "/usr/lib/python3.10/multiprocessing/pool.py", line 125, in worker result = (True, func(*args, **kwds)) File "/usr/lib/python3.10/multiprocessing/pool.py", line 48, in mapstar return list(map(*args)) File "/usr/lib/python3.10/site-packages/datasets/utils/py_utils.py", line 215, in _single_map_nested mapped = [_single_map_nested((function, v, types, None, True)) for v in pbar] File "/usr/lib/python3.10/site-packages/datasets/utils/py_utils.py", line 215, in <listcomp> mapped = [_single_map_nested((function, v, types, None, True)) for v in pbar] File "/usr/lib/python3.10/site-packages/datasets/utils/py_utils.py", line 197, in _single_map_nested return function(data_struct) File "/usr/lib/python3.10/site-packages/datasets/utils/download_manager.py", line 217, in _download return cached_path(url_or_filename, download_config=download_config) File "/usr/lib/python3.10/site-packages/datasets/utils/file_utils.py", line 298, in cached_path output_path = get_from_cache( File "/usr/lib/python3.10/site-packages/datasets/utils/file_utils.py", line 659, in get_from_cache http_get( File "/usr/lib/python3.10/site-packages/datasets/utils/file_utils.py", line 442, in http_get response = _request_with_retry( File "/usr/lib/python3.10/site-packages/datasets/utils/file_utils.py", line 405, in _request_with_retry response = requests.request(method=method.upper(), url=url, timeout=timeout, **params) File "/usr/lib/python3.10/site-packages/requests/api.py", line 59, in request return session.request(method=method, url=url, **kwargs) File "/usr/lib/python3.10/site-packages/requests/sessions.py", line 587, in request resp = self.send(prep, **send_kwargs) File "/usr/lib/python3.10/site-packages/requests/sessions.py", line 701, in send r = adapter.send(request, **kwargs) File "/usr/lib/python3.10/site-packages/requests/adapters.py", line 489, in send resp = conn.urlopen( File "/usr/lib/python3.10/site-packages/urllib3/connectionpool.py", line 703, in urlopen httplib_response = self._make_request( File "/usr/lib/python3.10/site-packages/urllib3/connectionpool.py", line 386, in _make_request self._validate_conn(conn) File "/usr/lib/python3.10/multiprocessing/process.py", line 314, in _bootstrap self.run() File "/usr/lib/python3.10/site-packages/urllib3/connectionpool.py", line 1042, in _validate_conn conn.connect() File "/usr/lib/python3.10/multiprocessing/process.py", line 108, in run self._target(*self._args, **self._kwargs) File "/usr/lib/python3.10/site-packages/urllib3/connection.py", line 358, in connect self.sock = conn = self._new_conn() File "/usr/lib/python3.10/multiprocessing/pool.py", line 125, in worker result = (True, func(*args, **kwds)) File "/usr/lib/python3.10/site-packages/urllib3/connection.py", line 174, in _new_conn conn = connection.create_connection( File "/usr/lib/python3.10/multiprocessing/pool.py", line 48, in mapstar return list(map(*args)) File "/usr/lib/python3.10/site-packages/urllib3/util/connection.py", line 72, in create_connection for res in socket.getaddrinfo(host, port, family, socket.SOCK_STREAM): File "/usr/lib/python3.10/site-packages/datasets/utils/py_utils.py", line 215, in _single_map_nested mapped = [_single_map_nested((function, v, types, None, True)) for v in pbar] File "/usr/lib/python3.10/socket.py", line 955, in getaddrinfo for res in _socket.getaddrinfo(host, port, family, type, proto, flags): File "/usr/lib/python3.10/site-packages/datasets/utils/py_utils.py", line 215, in <listcomp> mapped = [_single_map_nested((function, v, types, None, True)) for v in pbar] File "/usr/lib/python3.10/site-packages/datasets/utils/py_utils.py", line 197, in _single_map_nested return function(data_struct) File "/usr/lib/python3.10/site-packages/datasets/utils/download_manager.py", line 217, in _download return cached_path(url_or_filename, download_config=download_config) KeyboardInterrupt File "/usr/lib/python3.10/site-packages/datasets/utils/file_utils.py", line 298, in cached_path output_path = get_from_cache( File "/usr/lib/python3.10/site-packages/datasets/utils/file_utils.py", line 561, in get_from_cache response = http_head( File "/usr/lib/python3.10/site-packages/datasets/utils/file_utils.py", line 476, in http_head response = _request_with_retry( File "/usr/lib/python3.10/site-packages/datasets/utils/file_utils.py", line 405, in _request_with_retry response = requests.request(method=method.upper(), url=url, timeout=timeout, **params) File "/usr/lib/python3.10/site-packages/requests/api.py", line 59, in request return session.request(method=method, url=url, **kwargs) File "/usr/lib/python3.10/site-packages/requests/sessions.py", line 587, in request resp = self.send(prep, **send_kwargs) File "/usr/lib/python3.10/site-packages/requests/sessions.py", line 701, in send r = adapter.send(request, **kwargs) File "/usr/lib/python3.10/site-packages/requests/adapters.py", line 489, in send resp = conn.urlopen( File "/usr/lib/python3.10/site-packages/urllib3/connectionpool.py", line 703, in urlopen httplib_response = self._make_request( File "/usr/lib/python3.10/site-packages/urllib3/connectionpool.py", line 386, in _make_request self._validate_conn(conn) File "/usr/lib/python3.10/site-packages/urllib3/connectionpool.py", line 1042, in _validate_conn conn.connect() File "/usr/lib/python3.10/site-packages/urllib3/connection.py", line 358, in connect self.sock = conn = self._new_conn() File "/usr/lib/python3.10/site-packages/urllib3/connection.py", line 174, in _new_conn conn = connection.create_connection( File "/usr/lib/python3.10/site-packages/urllib3/util/connection.py", line 72, in create_connection for res in socket.getaddrinfo(host, port, family, socket.SOCK_STREAM): File "/usr/lib/python3.10/socket.py", line 955, in getaddrinfo for res in _socket.getaddrinfo(host, port, family, type, proto, flags): KeyboardInterrupt #3: 0%| | 0/2 [03:00<?, ?obj/s] #11: 0%| | 0/1 [00:49<?, ?obj/s] Traceback (most recent call last): File "/usr/lib/python3.10/site-packages/urllib3/util/connection.py", line 85, in create_connection sock.connect(sa) ConnectionRefusedError: [Errno 111] Connection refused During handling of the above exception, another exception occurred: Traceback (most recent call last): File "/usr/lib/python3.10/multiprocessing/process.py", line 314, in _bootstrap self.run() File "/usr/lib/python3.10/multiprocessing/process.py", line 108, in run self._target(*self._args, **self._kwargs) File "/usr/lib/python3.10/multiprocessing/pool.py", line 125, in worker result = (True, func(*args, **kwds)) File "/usr/lib/python3.10/multiprocessing/pool.py", line 48, in mapstar return list(map(*args)) File "/usr/lib/python3.10/site-packages/datasets/utils/py_utils.py", line 215, in _single_map_nested mapped = [_single_map_nested((function, v, types, None, True)) for v in pbar] File "/usr/lib/python3.10/site-packages/datasets/utils/py_utils.py", line 215, in <listcomp> mapped = [_single_map_nested((function, v, types, None, True)) for v in pbar] File "/usr/lib/python3.10/site-packages/datasets/utils/py_utils.py", line 197, in _single_map_nested return function(data_struct) File "/usr/lib/python3.10/site-packages/datasets/utils/download_manager.py", line 217, in _download return cached_path(url_or_filename, download_config=download_config) File "/usr/lib/python3.10/site-packages/datasets/utils/file_utils.py", line 298, in cached_path output_path = get_from_cache( File "/usr/lib/python3.10/site-packages/datasets/utils/file_utils.py", line 561, in get_from_cache response = http_head( File "/usr/lib/python3.10/site-packages/datasets/utils/file_utils.py", line 476, in http_head response = _request_with_retry( File "/usr/lib/python3.10/site-packages/datasets/utils/file_utils.py", line 405, in _request_with_retry response = requests.request(method=method.upper(), url=url, timeout=timeout, **params) File "/usr/lib/python3.10/site-packages/requests/api.py", line 59, in request return session.request(method=method, url=url, **kwargs) File "/usr/lib/python3.10/site-packages/requests/sessions.py", line 587, in request resp = self.send(prep, **send_kwargs) File "/usr/lib/python3.10/site-packages/requests/sessions.py", line 723, in send history = [resp for resp in gen] File "/usr/lib/python3.10/site-packages/requests/sessions.py", line 723, in <listcomp> history = [resp for resp in gen] File "/usr/lib/python3.10/site-packages/requests/sessions.py", line 266, in resolve_redirects resp = self.send( File "/usr/lib/python3.10/site-packages/requests/sessions.py", line 701, in send r = adapter.send(request, **kwargs) File "/usr/lib/python3.10/site-packages/requests/adapters.py", line 489, in send resp = conn.urlopen( File "/usr/lib/python3.10/site-packages/urllib3/connectionpool.py", line 703, in urlopen httplib_response = self._make_request( File "/usr/lib/python3.10/site-packages/urllib3/connectionpool.py", line 386, in _make_request self._validate_conn(conn) File "/usr/lib/python3.10/site-packages/urllib3/connectionpool.py", line 1042, in _validate_conn conn.connect() File "/usr/lib/python3.10/site-packages/urllib3/connection.py", line 358, in connect self.sock = conn = self._new_conn() File "/usr/lib/python3.10/site-packages/urllib3/connection.py", line 174, in _new_conn conn = connection.create_connection( File "/usr/lib/python3.10/site-packages/urllib3/util/connection.py", line 85, in create_connection sock.connect(sa) KeyboardInterrupt #5: 0%| | 0/1 [03:00<?, ?obj/s] KeyboardInterrupt Process ForkPoolWorker-42: Traceback (most recent call last): File "/usr/lib/python3.10/multiprocessing/process.py", line 314, in _bootstrap self.run() File "/usr/lib/python3.10/multiprocessing/process.py", line 108, in run self._target(*self._args, **self._kwargs) File "/usr/lib/python3.10/multiprocessing/pool.py", line 125, in worker result = (True, func(*args, **kwds)) File "/usr/lib/python3.10/multiprocessing/pool.py", line 48, in mapstar return list(map(*args)) File "/usr/lib/python3.10/site-packages/datasets/utils/py_utils.py", line 215, in _single_map_nested mapped = [_single_map_nested((function, v, types, None, True)) for v in pbar] File "/usr/lib/python3.10/site-packages/datasets/utils/py_utils.py", line 215, in <listcomp> mapped = [_single_map_nested((function, v, types, None, True)) for v in pbar] File "/usr/lib/python3.10/site-packages/datasets/utils/py_utils.py", line 197, in _single_map_nested return function(data_struct) File "/usr/lib/python3.10/site-packages/datasets/utils/download_manager.py", line 217, in _download return cached_path(url_or_filename, download_config=download_config) File "/usr/lib/python3.10/site-packages/datasets/utils/file_utils.py", line 298, in cached_path output_path = get_from_cache( File "/usr/lib/python3.10/site-packages/datasets/utils/file_utils.py", line 561, in get_from_cache response = http_head( File "/usr/lib/python3.10/site-packages/datasets/utils/file_utils.py", line 476, in http_head response = _request_with_retry( File "/usr/lib/python3.10/site-packages/datasets/utils/file_utils.py", line 405, in _request_with_retry response = requests.request(method=method.upper(), url=url, timeout=timeout, **params) File "/usr/lib/python3.10/site-packages/requests/api.py", line 59, in request return session.request(method=method, url=url, **kwargs) File "/usr/lib/python3.10/site-packages/requests/sessions.py", line 587, in request resp = self.send(prep, **send_kwargs) File "/usr/lib/python3.10/site-packages/requests/sessions.py", line 701, in send r = adapter.send(request, **kwargs) File "/usr/lib/python3.10/site-packages/requests/adapters.py", line 489, in send resp = conn.urlopen( File "/usr/lib/python3.10/site-packages/urllib3/connectionpool.py", line 703, in urlopen httplib_response = self._make_request( File "/usr/lib/python3.10/site-packages/urllib3/connectionpool.py", line 386, in _make_request self._validate_conn(conn) File "/usr/lib/python3.10/site-packages/urllib3/connectionpool.py", line 1042, in _validate_conn conn.connect() File "/usr/lib/python3.10/site-packages/urllib3/connection.py", line 358, in connect self.sock = conn = self._new_conn() File "/usr/lib/python3.10/site-packages/urllib3/connection.py", line 174, in _new_conn conn = connection.create_connection( File "/usr/lib/python3.10/site-packages/urllib3/util/connection.py", line 72, in create_connection for res in socket.getaddrinfo(host, port, family, socket.SOCK_STREAM): File "/usr/lib/python3.10/socket.py", line 955, in getaddrinfo for res in _socket.getaddrinfo(host, port, family, type, proto, flags): KeyboardInterrupt #9: 0%| | 0/1 [00:51<?, ?obj/s] ``` ### Steps to reproduce the bug ```python """Kodak. Copyright 2020 The HuggingFace Datasets Authors and the current dataset script contributor. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import datasets NUMBER = 17 _DESCRIPTION = """\ The pictures below link to lossless, true color (24 bits per pixel, aka "full color") images. It is my understanding they have been released by the Eastman Kodak Company for unrestricted usage. Many sites use them as a standard test suite for compression testing, etc. Prior to this site, they were only available in the Sun Raster format via ftp. This meant that the images could not be previewed before downloading. Since their release, however, the lossless PNG format has been incorporated into all the major browsers. Since PNG supports 24-bit lossless color (which GIF and JPEG do not), it is now possible to offer this browser-friendly access to the images. """ _HOMEPAGE = "https://r0k.us/graphics/kodak/" _LICENSE = "GPLv3" _URLS = [ f"https://github.com/MohamedBakrAli/Kodak-Lossless-True-Color-Image-Suite/raw/master/PhotoCD_PCD0992/{i}.png" for i in range(1, 1 + NUMBER) ] class Kodak(datasets.GeneratorBasedBuilder): """Kodak datasets.""" VERSION = datasets.Version("0.0.1") def _info(self): features = datasets.Features( { "image": datasets.Image(), } ) return datasets.DatasetInfo( description=_DESCRIPTION, features=features, homepage=_HOMEPAGE, license=_LICENSE, ) def _split_generators(self, dl_manager): """Return SplitGenerators.""" file_paths = dl_manager.download_and_extract(_URLS) return [ datasets.SplitGenerator( name=datasets.Split.TEST, gen_kwargs={ "file_paths": file_paths, }, ), ] def _generate_examples(self, file_paths): """Yield examples.""" for file_path in file_paths: yield file_path, {"image": file_path} ``` ### Expected behavior When `len(_URLS) < 16`, it works. ```python In [3]: dataset = load_dataset('Freed-Wu/kodak', split='test') Downloading: 100%|████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 2.53k/2.53k [00:00<00:00, 3.02MB/s] [11/19/22 22:04:28] WARNING Using custom data configuration default builder.py:379 Downloading and preparing dataset kodak/default to /home/wzy/.cache/huggingface/datasets/Freed-Wu___kodak/default/0.0.1/d26017602a592b5bfa7e008127cdf9dec5af220c9068005f1b4eda036031f475... Downloading: 100%|██████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 593k/593k [00:00<00:00, 2.88MB/s] Downloading: 100%|███████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 621k/621k [00:03<00:00, 166kB/s] Downloading: 100%|███████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 531k/531k [00:01<00:00, 366kB/s] 100%|██████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 16/16 [00:13<00:00, 1.18it/s] 100%|████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 16/16 [00:00<00:00, 3832.38it/s] Dataset kodak downloaded and prepared to /home/wzy/.cache/huggingface/datasets/Freed-Wu___kodak/default/0.0.1/d26017602a592b5bfa7e008127cdf9dec5af220c9068005f1b4eda036031f475. Subsequent calls will reuse this data. ``` ### Environment info - `datasets` version: 2.7.0 - Platform: Linux-6.0.8-arch1-1-x86_64-with-glibc2.36 - Python version: 3.10.8 - PyArrow version: 9.0.0 - Pandas version: 1.4.4
2022-11-21T15:27:16Z
https://github.com/huggingface/datasets/issues/5270
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5270/reactions" }
false
https://api.github.com/repos/huggingface/datasets/issues/5269/comments
https://api.github.com/repos/huggingface/datasets/issues/5269/timeline
2022-11-21T15:06:14Z
null
completed
I_kwDODunzps5W0DWn
closed
[]
null
5,269
{ "avatar_url": "https://avatars.githubusercontent.com/u/32936898?v=4", "events_url": "https://api.github.com/users/Freed-Wu/events{/privacy}", "followers_url": "https://api.github.com/users/Freed-Wu/followers", "following_url": "https://api.github.com/users/Freed-Wu/following{/other_user}", "gists_url": "https://api.github.com/users/Freed-Wu/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/Freed-Wu", "id": 32936898, "login": "Freed-Wu", "node_id": "MDQ6VXNlcjMyOTM2ODk4", "organizations_url": "https://api.github.com/users/Freed-Wu/orgs", "received_events_url": "https://api.github.com/users/Freed-Wu/received_events", "repos_url": "https://api.github.com/users/Freed-Wu/repos", "site_admin": false, "starred_url": "https://api.github.com/users/Freed-Wu/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/Freed-Wu/subscriptions", "type": "User", "url": "https://api.github.com/users/Freed-Wu" }
Shell completions
https://api.github.com/repos/huggingface/datasets/issues/5269/events
null
https://api.github.com/repos/huggingface/datasets/issues/5269/labels{/name}
2022-11-19T13:48:59Z
null
false
null
null
1,456,485,799
[ { "color": "a2eeef", "default": true, "description": "New feature or request", "id": 1935892871, "name": "enhancement", "node_id": "MDU6TGFiZWwxOTM1ODkyODcx", "url": "https://api.github.com/repos/huggingface/datasets/labels/enhancement" } ]
https://api.github.com/repos/huggingface/datasets/issues/5269
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
NONE
### Feature request Like <https://github.com/huggingface/huggingface_hub/issues/1197>, datasets-cli maybe need it, too. ### Motivation See above. ### Your contribution Maybe.
2022-11-21T15:06:15Z
https://github.com/huggingface/datasets/issues/5269
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5269/reactions" }
false
https://api.github.com/repos/huggingface/datasets/issues/5268/comments
https://api.github.com/repos/huggingface/datasets/issues/5268/timeline
2022-12-14T18:22:58Z
null
null
PR_kwDODunzps5DPIsp
closed
[]
false
5,268
{ "avatar_url": "https://avatars.githubusercontent.com/u/42851186?v=4", "events_url": "https://api.github.com/users/lhoestq/events{/privacy}", "followers_url": "https://api.github.com/users/lhoestq/followers", "following_url": "https://api.github.com/users/lhoestq/following{/other_user}", "gists_url": "https://api.github.com/users/lhoestq/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/lhoestq", "id": 42851186, "login": "lhoestq", "node_id": "MDQ6VXNlcjQyODUxMTg2", "organizations_url": "https://api.github.com/users/lhoestq/orgs", "received_events_url": "https://api.github.com/users/lhoestq/received_events", "repos_url": "https://api.github.com/users/lhoestq/repos", "site_admin": false, "starred_url": "https://api.github.com/users/lhoestq/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/lhoestq/subscriptions", "type": "User", "url": "https://api.github.com/users/lhoestq" }
Sharded save_to_disk + multiprocessing
https://api.github.com/repos/huggingface/datasets/issues/5268/events
null
https://api.github.com/repos/huggingface/datasets/issues/5268/labels{/name}
2022-11-18T18:50:01Z
null
false
null
{ "diff_url": "https://github.com/huggingface/datasets/pull/5268.diff", "html_url": "https://github.com/huggingface/datasets/pull/5268", "merged_at": "2022-12-14T18:22:58Z", "patch_url": "https://github.com/huggingface/datasets/pull/5268.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/5268" }
1,455,633,978
[]
https://api.github.com/repos/huggingface/datasets/issues/5268
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
MEMBER
Added `num_shards=` and `num_proc=` to `save_to_disk()` EDIT: also added `max_shard_size=` to `save_to_disk()`, and also `num_shards=` to `push_to_hub` I also: - deprecated the fs parameter in favor of storage_options (for consistency with the rest of the lib) in save_to_disk and load_from_disk - always embed the image/audio data in arrow when doing `save_to_disk` - added a tqdm bar in `save_to_disk` - Use the MockFileSystem in tests for `save_to_disk` and `load_from_disk` - removed the unused integration tests with S3, since we can now test with `mockfs` instead of `s3fs` TODO: - [x] implem save_to_disk for dataset dict - [x] save_to_disk for dataset dict tests - [x] deprecate fs in dataset dict load_from_disk as well - [x] update docs Close #5263 Close https://github.com/huggingface/datasets/issues/4196 Close https://github.com/huggingface/datasets/issues/4351
2022-12-14T18:25:52Z
https://github.com/huggingface/datasets/pull/5268
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5268/reactions" }
true
https://api.github.com/repos/huggingface/datasets/issues/5267/comments
https://api.github.com/repos/huggingface/datasets/issues/5267/timeline
2022-11-18T17:25:27Z
null
null
PR_kwDODunzps5DOlFR
closed
[]
false
5,267
{ "avatar_url": "https://avatars.githubusercontent.com/u/42851186?v=4", "events_url": "https://api.github.com/users/lhoestq/events{/privacy}", "followers_url": "https://api.github.com/users/lhoestq/followers", "following_url": "https://api.github.com/users/lhoestq/following{/other_user}", "gists_url": "https://api.github.com/users/lhoestq/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/lhoestq", "id": 42851186, "login": "lhoestq", "node_id": "MDQ6VXNlcjQyODUxMTg2", "organizations_url": "https://api.github.com/users/lhoestq/orgs", "received_events_url": "https://api.github.com/users/lhoestq/received_events", "repos_url": "https://api.github.com/users/lhoestq/repos", "site_admin": false, "starred_url": "https://api.github.com/users/lhoestq/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/lhoestq/subscriptions", "type": "User", "url": "https://api.github.com/users/lhoestq" }
Fix `max_shard_size` docs
https://api.github.com/repos/huggingface/datasets/issues/5267/events
null
https://api.github.com/repos/huggingface/datasets/issues/5267/labels{/name}
2022-11-18T16:55:22Z
null
false
null
{ "diff_url": "https://github.com/huggingface/datasets/pull/5267.diff", "html_url": "https://github.com/huggingface/datasets/pull/5267", "merged_at": "2022-11-18T17:25:26Z", "patch_url": "https://github.com/huggingface/datasets/pull/5267.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/5267" }
1,455,466,464
[]
https://api.github.com/repos/huggingface/datasets/issues/5267
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
MEMBER
null
2022-11-18T17:28:58Z
https://github.com/huggingface/datasets/pull/5267
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5267/reactions" }
true
https://api.github.com/repos/huggingface/datasets/issues/5266/comments
https://api.github.com/repos/huggingface/datasets/issues/5266/timeline
2022-11-21T15:41:57Z
null
null
PR_kwDODunzps5DN9BT
closed
[]
false
5,266
{ "avatar_url": "https://avatars.githubusercontent.com/u/16348744?v=4", "events_url": "https://api.github.com/users/polinaeterna/events{/privacy}", "followers_url": "https://api.github.com/users/polinaeterna/followers", "following_url": "https://api.github.com/users/polinaeterna/following{/other_user}", "gists_url": "https://api.github.com/users/polinaeterna/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/polinaeterna", "id": 16348744, "login": "polinaeterna", "node_id": "MDQ6VXNlcjE2MzQ4NzQ0", "organizations_url": "https://api.github.com/users/polinaeterna/orgs", "received_events_url": "https://api.github.com/users/polinaeterna/received_events", "repos_url": "https://api.github.com/users/polinaeterna/repos", "site_admin": false, "starred_url": "https://api.github.com/users/polinaeterna/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/polinaeterna/subscriptions", "type": "User", "url": "https://api.github.com/users/polinaeterna" }
Specify arguments as keywords in librosa.reshape to avoid future errors
https://api.github.com/repos/huggingface/datasets/issues/5266/events
null
https://api.github.com/repos/huggingface/datasets/issues/5266/labels{/name}
2022-11-18T14:58:47Z
null
false
null
{ "diff_url": "https://github.com/huggingface/datasets/pull/5266.diff", "html_url": "https://github.com/huggingface/datasets/pull/5266", "merged_at": "2022-11-21T15:41:57Z", "patch_url": "https://github.com/huggingface/datasets/pull/5266.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/5266" }
1,455,281,310
[]
https://api.github.com/repos/huggingface/datasets/issues/5266
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
CONTRIBUTOR
Fixes a warning and future deprecation from `librosa.reshape`: ``` FutureWarning: Pass orig_sr=16000, target_sr=48000 as keyword args. From version 0.10 passing these as positional arguments will result in an error array = librosa.resample(array, sampling_rate, self.sampling_rate, res_type="kaiser_best") ```
2022-11-21T15:45:02Z
https://github.com/huggingface/datasets/pull/5266
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5266/reactions" }
true
https://api.github.com/repos/huggingface/datasets/issues/5265/comments
https://api.github.com/repos/huggingface/datasets/issues/5265/timeline
2023-02-01T16:36:03Z
null
completed
I_kwDODunzps5Wvbtw
closed
[ { "avatar_url": "https://avatars.githubusercontent.com/u/42851186?v=4", "events_url": "https://api.github.com/users/lhoestq/events{/privacy}", "followers_url": "https://api.github.com/users/lhoestq/followers", "following_url": "https://api.github.com/users/lhoestq/following{/other_user}", "gists_url": "https://api.github.com/users/lhoestq/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/lhoestq", "id": 42851186, "login": "lhoestq", "node_id": "MDQ6VXNlcjQyODUxMTg2", "organizations_url": "https://api.github.com/users/lhoestq/orgs", "received_events_url": "https://api.github.com/users/lhoestq/received_events", "repos_url": "https://api.github.com/users/lhoestq/repos", "site_admin": false, "starred_url": "https://api.github.com/users/lhoestq/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/lhoestq/subscriptions", "type": "User", "url": "https://api.github.com/users/lhoestq" } ]
null
5,265
{ "avatar_url": "https://avatars.githubusercontent.com/u/42851186?v=4", "events_url": "https://api.github.com/users/lhoestq/events{/privacy}", "followers_url": "https://api.github.com/users/lhoestq/followers", "following_url": "https://api.github.com/users/lhoestq/following{/other_user}", "gists_url": "https://api.github.com/users/lhoestq/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/lhoestq", "id": 42851186, "login": "lhoestq", "node_id": "MDQ6VXNlcjQyODUxMTg2", "organizations_url": "https://api.github.com/users/lhoestq/orgs", "received_events_url": "https://api.github.com/users/lhoestq/received_events", "repos_url": "https://api.github.com/users/lhoestq/repos", "site_admin": false, "starred_url": "https://api.github.com/users/lhoestq/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/lhoestq/subscriptions", "type": "User", "url": "https://api.github.com/users/lhoestq" }
Get an IterableDataset from a map-style Dataset
https://api.github.com/repos/huggingface/datasets/issues/5265/events
null
https://api.github.com/repos/huggingface/datasets/issues/5265/labels{/name}
2022-11-18T14:54:40Z
null
false
{ "avatar_url": "https://avatars.githubusercontent.com/u/42851186?v=4", "events_url": "https://api.github.com/users/lhoestq/events{/privacy}", "followers_url": "https://api.github.com/users/lhoestq/followers", "following_url": "https://api.github.com/users/lhoestq/following{/other_user}", "gists_url": "https://api.github.com/users/lhoestq/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/lhoestq", "id": 42851186, "login": "lhoestq", "node_id": "MDQ6VXNlcjQyODUxMTg2", "organizations_url": "https://api.github.com/users/lhoestq/orgs", "received_events_url": "https://api.github.com/users/lhoestq/received_events", "repos_url": "https://api.github.com/users/lhoestq/repos", "site_admin": false, "starred_url": "https://api.github.com/users/lhoestq/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/lhoestq/subscriptions", "type": "User", "url": "https://api.github.com/users/lhoestq" }
null
1,455,274,864
[ { "color": "a2eeef", "default": true, "description": "New feature or request", "id": 1935892871, "name": "enhancement", "node_id": "MDU6TGFiZWwxOTM1ODkyODcx", "url": "https://api.github.com/repos/huggingface/datasets/labels/enhancement" }, { "color": "fef2c0", "default": false, "description": "", "id": 3287858981, "name": "streaming", "node_id": "MDU6TGFiZWwzMjg3ODU4OTgx", "url": "https://api.github.com/repos/huggingface/datasets/labels/streaming" } ]
https://api.github.com/repos/huggingface/datasets/issues/5265
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
MEMBER
This is useful to leverage iterable datasets specific features like: - fast approximate shuffling - lazy map, filter etc. Iterating over the resulting iterable dataset should be at least as fast at iterating over the map-style dataset. Here are some ideas regarding the API: ```python # 1. # - consistency with load_dataset(..., streaming=True) # - gives intuition that map/filter/etc. are done on-the-fly ids = ds.stream() # 2. # - more explicit on the output type # - but maybe sounds like a conversion tool rather than a step in a processing pipeline ids = ds.as_iterable_dataset() ```
2023-02-01T16:36:03Z
https://github.com/huggingface/datasets/issues/5265
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5265/reactions" }
false
https://api.github.com/repos/huggingface/datasets/issues/5264/comments
https://api.github.com/repos/huggingface/datasets/issues/5264/timeline
2022-11-22T11:18:08Z
null
completed
I_kwDODunzps5WvWWq
closed
[]
null
5,264
{ "avatar_url": "https://avatars.githubusercontent.com/u/44069155?v=4", "events_url": "https://api.github.com/users/loubnabnl/events{/privacy}", "followers_url": "https://api.github.com/users/loubnabnl/followers", "following_url": "https://api.github.com/users/loubnabnl/following{/other_user}", "gists_url": "https://api.github.com/users/loubnabnl/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/loubnabnl", "id": 44069155, "login": "loubnabnl", "node_id": "MDQ6VXNlcjQ0MDY5MTU1", "organizations_url": "https://api.github.com/users/loubnabnl/orgs", "received_events_url": "https://api.github.com/users/loubnabnl/received_events", "repos_url": "https://api.github.com/users/loubnabnl/repos", "site_admin": false, "starred_url": "https://api.github.com/users/loubnabnl/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/loubnabnl/subscriptions", "type": "User", "url": "https://api.github.com/users/loubnabnl" }
`datasets` can't read a Parquet file in Python 3.9.13
https://api.github.com/repos/huggingface/datasets/issues/5264/events
null
https://api.github.com/repos/huggingface/datasets/issues/5264/labels{/name}
2022-11-18T14:44:01Z
null
false
null
null
1,455,252,906
[ { "color": "d73a4a", "default": true, "description": "Something isn't working", "id": 1935892857, "name": "bug", "node_id": "MDU6TGFiZWwxOTM1ODkyODU3", "url": "https://api.github.com/repos/huggingface/datasets/labels/bug" } ]
https://api.github.com/repos/huggingface/datasets/issues/5264
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
NONE
### Describe the bug I have an error when trying to load this [dataset](https://huggingface.co/datasets/bigcode/the-stack-dedup-pjj) (it's private but I can add you to the bigcode org). `datasets` can't read one of the parquet files in the Java subset ```python from datasets import load_dataset ds = load_dataset("bigcode/the-stack-dedup-pjj", data_dir="data/java", split="train", revision="v1.1.a1", use_auth_token=True) ```` ``` File "pyarrow/error.pxi", line 100, in pyarrow.lib.check_status pyarrow.lib.ArrowInvalid: Parquet magic bytes not found in footer. Either the file is corrupted or this is not a parquet file. ``` It seems to be an issue with new Python versions, Because it works in these two environements: ``` - `datasets` version: 2.6.1 - Platform: Linux-5.4.0-131-generic-x86_64-with-glibc2.31 - Python version: 3.9.7 - PyArrow version: 9.0.0 - Pandas version: 1.3.4 ``` ``` - `datasets` version: 2.6.1 - Platform: Linux-4.19.0-22-cloud-amd64-x86_64-with-debian-10.13 - Python version: 3.7.12 - PyArrow version: 9.0.0 - Pandas version: 1.3.4 ``` But not in this: ``` - `datasets` version: 2.6.1 - Platform: Linux-4.19.0-22-cloud-amd64-x86_64-with-glibc2.28 - Python version: 3.9.13 - PyArrow version: 9.0.0 - Pandas version: 1.3.4 ``` ### Steps to reproduce the bug Load the dataset in python 3.9.13 ### Expected behavior Load the dataset without the pyarrow error. ### Environment info ``` - `datasets` version: 2.6.1 - Platform: Linux-4.19.0-22-cloud-amd64-x86_64-with-glibc2.28 - Python version: 3.9.13 - PyArrow version: 9.0.0 - Pandas version: 1.3.4 ```
2023-05-07T09:52:59Z
https://github.com/huggingface/datasets/issues/5264
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5264/reactions" }
false
https://api.github.com/repos/huggingface/datasets/issues/5263/comments
https://api.github.com/repos/huggingface/datasets/issues/5263/timeline
2022-12-14T18:22:59Z
null
completed
I_kwDODunzps5WvWSS
closed
[ { "avatar_url": "https://avatars.githubusercontent.com/u/42851186?v=4", "events_url": "https://api.github.com/users/lhoestq/events{/privacy}", "followers_url": "https://api.github.com/users/lhoestq/followers", "following_url": "https://api.github.com/users/lhoestq/following{/other_user}", "gists_url": "https://api.github.com/users/lhoestq/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/lhoestq", "id": 42851186, "login": "lhoestq", "node_id": "MDQ6VXNlcjQyODUxMTg2", "organizations_url": "https://api.github.com/users/lhoestq/orgs", "received_events_url": "https://api.github.com/users/lhoestq/received_events", "repos_url": "https://api.github.com/users/lhoestq/repos", "site_admin": false, "starred_url": "https://api.github.com/users/lhoestq/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/lhoestq/subscriptions", "type": "User", "url": "https://api.github.com/users/lhoestq" } ]
null
5,263
{ "avatar_url": "https://avatars.githubusercontent.com/u/42851186?v=4", "events_url": "https://api.github.com/users/lhoestq/events{/privacy}", "followers_url": "https://api.github.com/users/lhoestq/followers", "following_url": "https://api.github.com/users/lhoestq/following{/other_user}", "gists_url": "https://api.github.com/users/lhoestq/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/lhoestq", "id": 42851186, "login": "lhoestq", "node_id": "MDQ6VXNlcjQyODUxMTg2", "organizations_url": "https://api.github.com/users/lhoestq/orgs", "received_events_url": "https://api.github.com/users/lhoestq/received_events", "repos_url": "https://api.github.com/users/lhoestq/repos", "site_admin": false, "starred_url": "https://api.github.com/users/lhoestq/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/lhoestq/subscriptions", "type": "User", "url": "https://api.github.com/users/lhoestq" }
Save a dataset in a determined number of shards
https://api.github.com/repos/huggingface/datasets/issues/5263/events
null
https://api.github.com/repos/huggingface/datasets/issues/5263/labels{/name}
2022-11-18T14:43:54Z
null
false
{ "avatar_url": "https://avatars.githubusercontent.com/u/42851186?v=4", "events_url": "https://api.github.com/users/lhoestq/events{/privacy}", "followers_url": "https://api.github.com/users/lhoestq/followers", "following_url": "https://api.github.com/users/lhoestq/following{/other_user}", "gists_url": "https://api.github.com/users/lhoestq/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/lhoestq", "id": 42851186, "login": "lhoestq", "node_id": "MDQ6VXNlcjQyODUxMTg2", "organizations_url": "https://api.github.com/users/lhoestq/orgs", "received_events_url": "https://api.github.com/users/lhoestq/received_events", "repos_url": "https://api.github.com/users/lhoestq/repos", "site_admin": false, "starred_url": "https://api.github.com/users/lhoestq/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/lhoestq/subscriptions", "type": "User", "url": "https://api.github.com/users/lhoestq" }
null
1,455,252,626
[ { "color": "a2eeef", "default": true, "description": "New feature or request", "id": 1935892871, "name": "enhancement", "node_id": "MDU6TGFiZWwxOTM1ODkyODcx", "url": "https://api.github.com/repos/huggingface/datasets/labels/enhancement" } ]
https://api.github.com/repos/huggingface/datasets/issues/5263
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
MEMBER
This is useful to distribute the shards to training nodes. This can be implemented in `save_to_disk` and can also leverage multiprocessing to speed up the process
2022-12-14T18:22:59Z
https://github.com/huggingface/datasets/issues/5263
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5263/reactions" }
false
https://api.github.com/repos/huggingface/datasets/issues/5262/comments
https://api.github.com/repos/huggingface/datasets/issues/5262/timeline
2022-11-22T10:09:23Z
null
completed
I_kwDODunzps5WvCYc
closed
[]
null
5,262
{ "avatar_url": "https://avatars.githubusercontent.com/u/102913847?v=4", "events_url": "https://api.github.com/users/emnaboughariou/events{/privacy}", "followers_url": "https://api.github.com/users/emnaboughariou/followers", "following_url": "https://api.github.com/users/emnaboughariou/following{/other_user}", "gists_url": "https://api.github.com/users/emnaboughariou/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/emnaboughariou", "id": 102913847, "login": "emnaboughariou", "node_id": "U_kgDOBiJXNw", "organizations_url": "https://api.github.com/users/emnaboughariou/orgs", "received_events_url": "https://api.github.com/users/emnaboughariou/received_events", "repos_url": "https://api.github.com/users/emnaboughariou/repos", "site_admin": false, "starred_url": "https://api.github.com/users/emnaboughariou/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/emnaboughariou/subscriptions", "type": "User", "url": "https://api.github.com/users/emnaboughariou" }
AttributeError: 'Value' object has no attribute 'names'
https://api.github.com/repos/huggingface/datasets/issues/5262/events
null
https://api.github.com/repos/huggingface/datasets/issues/5262/labels{/name}
2022-11-18T13:58:42Z
null
false
null
null
1,455,171,100
[]
https://api.github.com/repos/huggingface/datasets/issues/5262
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
NONE
Hello I'm trying to build a model for custom token classification I already followed the token classification course on huggingface while adapting the code to my work, this message occures : 'Value' object has no attribute 'names' Here's my code: `raw_datasets` generates DatasetDict({ train: Dataset({ features: ['isDisf', 'pos', 'tokens', 'id'], num_rows: 14 }) }) `raw_datasets["train"][3]["isDisf"]` generates ['B_RM', 'I_RM', 'I_RM', 'B_RP', 'I_RP', 'O', 'O'] `dis_feature = raw_datasets["train"].features["isDisf"] dis_feature` generates Sequence(feature=Value(dtype='string', id=None), length=-1, id=None) and `label_names = dis_feature.feature.names label_names` generates AttributeError Traceback (most recent call last) [<ipython-input-28-972fd54a869a>](https://localhost:8080/#) in <module> ----> 1 label_names = dis_feature.feature.names 2 label_names AttributeError: 'Value' object has AttributeError: 'Value' object has no attribute 'names' Thank you for your help
2022-11-22T10:09:24Z
https://github.com/huggingface/datasets/issues/5262
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5262/reactions" }
false
https://api.github.com/repos/huggingface/datasets/issues/5261/comments
https://api.github.com/repos/huggingface/datasets/issues/5261/timeline
null
null
null
I_kwDODunzps5WtCo1
open
[]
null
5,261
{ "avatar_url": "https://avatars.githubusercontent.com/u/48327001?v=4", "events_url": "https://api.github.com/users/NielsRogge/events{/privacy}", "followers_url": "https://api.github.com/users/NielsRogge/followers", "following_url": "https://api.github.com/users/NielsRogge/following{/other_user}", "gists_url": "https://api.github.com/users/NielsRogge/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/NielsRogge", "id": 48327001, "login": "NielsRogge", "node_id": "MDQ6VXNlcjQ4MzI3MDAx", "organizations_url": "https://api.github.com/users/NielsRogge/orgs", "received_events_url": "https://api.github.com/users/NielsRogge/received_events", "repos_url": "https://api.github.com/users/NielsRogge/repos", "site_admin": false, "starred_url": "https://api.github.com/users/NielsRogge/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/NielsRogge/subscriptions", "type": "User", "url": "https://api.github.com/users/NielsRogge" }
Add PubTables-1M
https://api.github.com/repos/huggingface/datasets/issues/5261/events
null
https://api.github.com/repos/huggingface/datasets/issues/5261/labels{/name}
2022-11-18T07:56:36Z
null
false
null
null
1,454,647,861
[ { "color": "e99695", "default": false, "description": "Requesting to add a new dataset", "id": 2067376369, "name": "dataset request", "node_id": "MDU6TGFiZWwyMDY3Mzc2MzY5", "url": "https://api.github.com/repos/huggingface/datasets/labels/dataset%20request" } ]
https://api.github.com/repos/huggingface/datasets/issues/5261
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
CONTRIBUTOR
### Name PubTables-1M ### Paper https://openaccess.thecvf.com/content/CVPR2022/html/Smock_PubTables-1M_Towards_Comprehensive_Table_Extraction_From_Unstructured_Documents_CVPR_2022_paper.html ### Data https://github.com/microsoft/table-transformer ### Motivation Table Transformer is now available in 🤗 Transformer, and it was trained on PubTables-1M. It's a large dataset for table extraction and structure recognition in unstructured documents.
2022-11-18T08:02:18Z
https://github.com/huggingface/datasets/issues/5261
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5261/reactions" }
false
https://api.github.com/repos/huggingface/datasets/issues/5260/comments
https://api.github.com/repos/huggingface/datasets/issues/5260/timeline
null
null
null
I_kwDODunzps5WqRWh
open
[ { "avatar_url": "https://avatars.githubusercontent.com/u/8515462?v=4", "events_url": "https://api.github.com/users/albertvillanova/events{/privacy}", "followers_url": "https://api.github.com/users/albertvillanova/followers", "following_url": "https://api.github.com/users/albertvillanova/following{/other_user}", "gists_url": "https://api.github.com/users/albertvillanova/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/albertvillanova", "id": 8515462, "login": "albertvillanova", "node_id": "MDQ6VXNlcjg1MTU0NjI=", "organizations_url": "https://api.github.com/users/albertvillanova/orgs", "received_events_url": "https://api.github.com/users/albertvillanova/received_events", "repos_url": "https://api.github.com/users/albertvillanova/repos", "site_admin": false, "starred_url": "https://api.github.com/users/albertvillanova/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/albertvillanova/subscriptions", "type": "User", "url": "https://api.github.com/users/albertvillanova" } ]
null
5,260
{ "avatar_url": "https://avatars.githubusercontent.com/u/8098496?v=4", "events_url": "https://api.github.com/users/adiprasad/events{/privacy}", "followers_url": "https://api.github.com/users/adiprasad/followers", "following_url": "https://api.github.com/users/adiprasad/following{/other_user}", "gists_url": "https://api.github.com/users/adiprasad/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/adiprasad", "id": 8098496, "login": "adiprasad", "node_id": "MDQ6VXNlcjgwOTg0OTY=", "organizations_url": "https://api.github.com/users/adiprasad/orgs", "received_events_url": "https://api.github.com/users/adiprasad/received_events", "repos_url": "https://api.github.com/users/adiprasad/repos", "site_admin": false, "starred_url": "https://api.github.com/users/adiprasad/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/adiprasad/subscriptions", "type": "User", "url": "https://api.github.com/users/adiprasad" }
consumer-finance-complaints dataset not loading
https://api.github.com/repos/huggingface/datasets/issues/5260/events
null
https://api.github.com/repos/huggingface/datasets/issues/5260/labels{/name}
2022-11-17T20:10:26Z
null
false
{ "avatar_url": "https://avatars.githubusercontent.com/u/8515462?v=4", "events_url": "https://api.github.com/users/albertvillanova/events{/privacy}", "followers_url": "https://api.github.com/users/albertvillanova/followers", "following_url": "https://api.github.com/users/albertvillanova/following{/other_user}", "gists_url": "https://api.github.com/users/albertvillanova/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/albertvillanova", "id": 8515462, "login": "albertvillanova", "node_id": "MDQ6VXNlcjg1MTU0NjI=", "organizations_url": "https://api.github.com/users/albertvillanova/orgs", "received_events_url": "https://api.github.com/users/albertvillanova/received_events", "repos_url": "https://api.github.com/users/albertvillanova/repos", "site_admin": false, "starred_url": "https://api.github.com/users/albertvillanova/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/albertvillanova/subscriptions", "type": "User", "url": "https://api.github.com/users/albertvillanova" }
null
1,453,921,697
[]
https://api.github.com/repos/huggingface/datasets/issues/5260
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
NONE
### Describe the bug Error during dataset loading ### Steps to reproduce the bug ``` >>> import datasets >>> cf_raw = datasets.load_dataset("consumer-finance-complaints") Downloading builder script: 100%|█████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 8.42k/8.42k [00:00<00:00, 3.33MB/s] Downloading metadata: 100%|███████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 5.60k/5.60k [00:00<00:00, 2.90MB/s] Downloading readme: 100%|██████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 16.6k/16.6k [00:00<00:00, 510kB/s] Downloading and preparing dataset consumer-finance-complaints/default to /root/.cache/huggingface/datasets/consumer-finance-complaints/default/0.0.0/30e483d37fb4b25bb98cad1bfd2dc48f6ed6d1f3371eb4568c625a61d1a79b69... Downloading data: 100%|██████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 511M/511M [00:04<00:00, 103MB/s] Traceback (most recent call last): File "<stdin>", line 1, in <module> File "/skunk-pod-storage-lee-2emartie-40ibm-2ecom-pvc/anaconda3/envs/datasets/lib/python3.8/site-packages/datasets/load.py", line 1741, in load_dataset builder_instance.download_and_prepare( File "/skunk-pod-storage-lee-2emartie-40ibm-2ecom-pvc/anaconda3/envs/datasets/lib/python3.8/site-packages/datasets/builder.py", line 822, in download_and_prepare self._download_and_prepare( File "/skunk-pod-storage-lee-2emartie-40ibm-2ecom-pvc/anaconda3/envs/datasets/lib/python3.8/site-packages/datasets/builder.py", line 1555, in _download_and_prepare super()._download_and_prepare( File "/skunk-pod-storage-lee-2emartie-40ibm-2ecom-pvc/anaconda3/envs/datasets/lib/python3.8/site-packages/datasets/builder.py", line 931, in _download_and_prepare verify_splits(self.info.splits, split_dict) File "/skunk-pod-storage-lee-2emartie-40ibm-2ecom-pvc/anaconda3/envs/datasets/lib/python3.8/site-packages/datasets/utils/info_utils.py", line 74, in verify_splits raise NonMatchingSplitsSizesError(str(bad_splits)) datasets.utils.info_utils.NonMatchingSplitsSizesError: [{'expected': SplitInfo(name='train', num_bytes=1605177353, num_examples=2455765, shard_lengths=None, dataset_name=None), 'recorded': SplitInfo(name='train', num_bytes=2043641693, num_examples=3079747, shard_lengths=[721000, 656000, 788000, 846000, 68747], dataset_name='consumer-finance-complaints')}] ``` ### Expected behavior dataset should load ### Environment info >>> datasets.__version__ '2.7.0' Python 3.8.10 "Ubuntu 20.04.4 LTS"
2022-11-18T10:16:53Z
https://github.com/huggingface/datasets/issues/5260
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5260/reactions" }
false
https://api.github.com/repos/huggingface/datasets/issues/5259/comments
https://api.github.com/repos/huggingface/datasets/issues/5259/timeline
2022-11-18T12:52:05Z
null
completed
I_kwDODunzps5Wo4DT
closed
[]
null
5,259
{ "avatar_url": "https://avatars.githubusercontent.com/u/3616964?v=4", "events_url": "https://api.github.com/users/DCNemesis/events{/privacy}", "followers_url": "https://api.github.com/users/DCNemesis/followers", "following_url": "https://api.github.com/users/DCNemesis/following{/other_user}", "gists_url": "https://api.github.com/users/DCNemesis/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/DCNemesis", "id": 3616964, "login": "DCNemesis", "node_id": "MDQ6VXNlcjM2MTY5NjQ=", "organizations_url": "https://api.github.com/users/DCNemesis/orgs", "received_events_url": "https://api.github.com/users/DCNemesis/received_events", "repos_url": "https://api.github.com/users/DCNemesis/repos", "site_admin": false, "starred_url": "https://api.github.com/users/DCNemesis/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/DCNemesis/subscriptions", "type": "User", "url": "https://api.github.com/users/DCNemesis" }
datasets 2.7 introduces sharding error
https://api.github.com/repos/huggingface/datasets/issues/5259/events
null
https://api.github.com/repos/huggingface/datasets/issues/5259/labels{/name}
2022-11-17T15:36:52Z
null
false
null
null
1,453,555,923
[]
https://api.github.com/repos/huggingface/datasets/issues/5259
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
NONE
### Describe the bug dataset fails to load with runtime error `RuntimeError: Sharding is ambiguous for this dataset: we found several data sources lists of different lengths, and we don't know over which list we should parallelize: - key audio_files has length 46 - key data has length 0 To fix this, check the 'gen_kwargs' and make sure to use lists only for data sources, and use tuples otherwise. In the end there should only be one single list, or several lists with the same length.` ### Steps to reproduce the bug With datasets[audio] 2.7 loaded, and logged into hugging face, `data = datasets.load_dataset('sil-ai/bloom-speech', 'bis', use_auth_token=True)` creates the error. Full stack trace: ```--------------------------------------------------------------------------- RuntimeError Traceback (most recent call last) [<ipython-input-7-8cb9ca0f79f0>](https://localhost:8080/#) in <module> ----> 1 data = datasets.load_dataset('sil-ai/bloom-speech', 'bis', use_auth_token=True) 5 frames [/usr/local/lib/python3.7/dist-packages/datasets/load.py](https://localhost:8080/#) in load_dataset(path, name, data_dir, data_files, split, cache_dir, features, download_config, download_mode, ignore_verifications, keep_in_memory, save_infos, revision, use_auth_token, task, streaming, num_proc, **config_kwargs) 1745 try_from_hf_gcs=try_from_hf_gcs, 1746 use_auth_token=use_auth_token, -> 1747 num_proc=num_proc, 1748 ) 1749 [/usr/local/lib/python3.7/dist-packages/datasets/builder.py](https://localhost:8080/#) in download_and_prepare(self, output_dir, download_config, download_mode, ignore_verifications, try_from_hf_gcs, dl_manager, base_path, use_auth_token, file_format, max_shard_size, num_proc, storage_options, **download_and_prepare_kwargs) 824 verify_infos=verify_infos, 825 **prepare_split_kwargs, --> 826 **download_and_prepare_kwargs, 827 ) 828 # Sync info [/usr/local/lib/python3.7/dist-packages/datasets/builder.py](https://localhost:8080/#) in _download_and_prepare(self, dl_manager, verify_infos, **prepare_splits_kwargs) 1554 def _download_and_prepare(self, dl_manager, verify_infos, **prepare_splits_kwargs): 1555 super()._download_and_prepare( -> 1556 dl_manager, verify_infos, check_duplicate_keys=verify_infos, **prepare_splits_kwargs 1557 ) 1558 [/usr/local/lib/python3.7/dist-packages/datasets/builder.py](https://localhost:8080/#) in _download_and_prepare(self, dl_manager, verify_infos, **prepare_split_kwargs) 911 try: 912 # Prepare split will record examples associated to the split --> 913 self._prepare_split(split_generator, **prepare_split_kwargs) 914 except OSError as e: 915 raise OSError( [/usr/local/lib/python3.7/dist-packages/datasets/builder.py](https://localhost:8080/#) in _prepare_split(self, split_generator, check_duplicate_keys, file_format, num_proc, max_shard_size) 1362 fpath = path_join(self._output_dir, fname) 1363 -> 1364 num_input_shards = _number_of_shards_in_gen_kwargs(split_generator.gen_kwargs) 1365 if num_input_shards <= 1 and num_proc is not None: 1366 logger.warning( [/usr/local/lib/python3.7/dist-packages/datasets/utils/sharding.py](https://localhost:8080/#) in _number_of_shards_in_gen_kwargs(gen_kwargs) 16 + "\n".join(f"\t- key {key} has length {length}" for key, length in lists_lengths.items()) 17 + "\nTo fix this, check the 'gen_kwargs' and make sure to use lists only for data sources, " ---> 18 + "and use tuples otherwise. In the end there should only be one single list, or several lists with the same length." 19 ) 20 ) RuntimeError: Sharding is ambiguous for this dataset: we found several data sources lists of different lengths, and we don't know over which list we should parallelize: - key audio_files has length 46 - key data has length 0 To fix this, check the 'gen_kwargs' and make sure to use lists only for data sources, and use tuples otherwise. In the end there should only be one single list, or several lists with the same length.``` ### Expected behavior the dataset loads in datasets version 2.6.1 and should load with datasets 2.7 ### Environment info - `datasets` version: 2.7.0 - Platform: Linux-5.10.133+-x86_64-with-Ubuntu-18.04-bionic - Python version: 3.7.15 - PyArrow version: 6.0.1 - Pandas version: 1.3.5
2022-12-24T01:44:02Z
https://github.com/huggingface/datasets/issues/5259
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5259/reactions" }
false
https://api.github.com/repos/huggingface/datasets/issues/5258/comments
https://api.github.com/repos/huggingface/datasets/issues/5258/timeline
2022-11-19T06:51:37Z
null
completed
I_kwDODunzps5Woudc
closed
[ { "avatar_url": "https://avatars.githubusercontent.com/u/8515462?v=4", "events_url": "https://api.github.com/users/albertvillanova/events{/privacy}", "followers_url": "https://api.github.com/users/albertvillanova/followers", "following_url": "https://api.github.com/users/albertvillanova/following{/other_user}", "gists_url": "https://api.github.com/users/albertvillanova/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/albertvillanova", "id": 8515462, "login": "albertvillanova", "node_id": "MDQ6VXNlcjg1MTU0NjI=", "organizations_url": "https://api.github.com/users/albertvillanova/orgs", "received_events_url": "https://api.github.com/users/albertvillanova/received_events", "repos_url": "https://api.github.com/users/albertvillanova/repos", "site_admin": false, "starred_url": "https://api.github.com/users/albertvillanova/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/albertvillanova/subscriptions", "type": "User", "url": "https://api.github.com/users/albertvillanova" } ]
null
5,258
{ "avatar_url": "https://avatars.githubusercontent.com/u/8515462?v=4", "events_url": "https://api.github.com/users/albertvillanova/events{/privacy}", "followers_url": "https://api.github.com/users/albertvillanova/followers", "following_url": "https://api.github.com/users/albertvillanova/following{/other_user}", "gists_url": "https://api.github.com/users/albertvillanova/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/albertvillanova", "id": 8515462, "login": "albertvillanova", "node_id": "MDQ6VXNlcjg1MTU0NjI=", "organizations_url": "https://api.github.com/users/albertvillanova/orgs", "received_events_url": "https://api.github.com/users/albertvillanova/received_events", "repos_url": "https://api.github.com/users/albertvillanova/repos", "site_admin": false, "starred_url": "https://api.github.com/users/albertvillanova/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/albertvillanova/subscriptions", "type": "User", "url": "https://api.github.com/users/albertvillanova" }
Restore order of split names in dataset_info for canonical datasets
https://api.github.com/repos/huggingface/datasets/issues/5258/events
null
https://api.github.com/repos/huggingface/datasets/issues/5258/labels{/name}
2022-11-17T15:13:15Z
null
false
{ "avatar_url": "https://avatars.githubusercontent.com/u/8515462?v=4", "events_url": "https://api.github.com/users/albertvillanova/events{/privacy}", "followers_url": "https://api.github.com/users/albertvillanova/followers", "following_url": "https://api.github.com/users/albertvillanova/following{/other_user}", "gists_url": "https://api.github.com/users/albertvillanova/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/albertvillanova", "id": 8515462, "login": "albertvillanova", "node_id": "MDQ6VXNlcjg1MTU0NjI=", "organizations_url": "https://api.github.com/users/albertvillanova/orgs", "received_events_url": "https://api.github.com/users/albertvillanova/received_events", "repos_url": "https://api.github.com/users/albertvillanova/repos", "site_admin": false, "starred_url": "https://api.github.com/users/albertvillanova/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/albertvillanova/subscriptions", "type": "User", "url": "https://api.github.com/users/albertvillanova" }
null
1,453,516,636
[ { "color": "0e8a16", "default": false, "description": "Contribution to a dataset script", "id": 4564477500, "name": "dataset contribution", "node_id": "LA_kwDODunzps8AAAABEBBmPA", "url": "https://api.github.com/repos/huggingface/datasets/labels/dataset%20contribution" } ]
https://api.github.com/repos/huggingface/datasets/issues/5258
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
MEMBER
After a bulk edit of canonical datasets to create the YAML `dataset_info` metadata, the split names were accidentally sorted alphabetically. See for example: - https://huggingface.co/datasets/bc2gm_corpus/commit/2384629484401ecf4bb77cd808816719c424e57c Note that this order is the one appearing in the preview of the datasets. I'm making a bulk edit to align the order of the splits appearing in the metadata info with the order appearing in the loading script. Related to: - #5202
2023-02-16T09:49:05Z
https://github.com/huggingface/datasets/issues/5258
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5258/reactions" }
false
https://api.github.com/repos/huggingface/datasets/issues/5257/comments
https://api.github.com/repos/huggingface/datasets/issues/5257/timeline
2022-11-18T11:04:08Z
null
null
PR_kwDODunzps5DFENm
closed
[]
false
5,257
{ "avatar_url": "https://avatars.githubusercontent.com/u/7569098?v=4", "events_url": "https://api.github.com/users/WrRan/events{/privacy}", "followers_url": "https://api.github.com/users/WrRan/followers", "following_url": "https://api.github.com/users/WrRan/following{/other_user}", "gists_url": "https://api.github.com/users/WrRan/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/WrRan", "id": 7569098, "login": "WrRan", "node_id": "MDQ6VXNlcjc1NjkwOTg=", "organizations_url": "https://api.github.com/users/WrRan/orgs", "received_events_url": "https://api.github.com/users/WrRan/received_events", "repos_url": "https://api.github.com/users/WrRan/repos", "site_admin": false, "starred_url": "https://api.github.com/users/WrRan/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/WrRan/subscriptions", "type": "User", "url": "https://api.github.com/users/WrRan" }
remove an unused statement
https://api.github.com/repos/huggingface/datasets/issues/5257/events
null
https://api.github.com/repos/huggingface/datasets/issues/5257/labels{/name}
2022-11-17T04:00:50Z
null
false
null
{ "diff_url": "https://github.com/huggingface/datasets/pull/5257.diff", "html_url": "https://github.com/huggingface/datasets/pull/5257", "merged_at": "2022-11-18T11:04:08Z", "patch_url": "https://github.com/huggingface/datasets/pull/5257.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/5257" }
1,452,656,891
[]
https://api.github.com/repos/huggingface/datasets/issues/5257
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
CONTRIBUTOR
remove the unused statement: `input_pairs = list(zip())`
2022-11-18T11:04:08Z
https://github.com/huggingface/datasets/pull/5257
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5257/reactions" }
true
https://api.github.com/repos/huggingface/datasets/issues/5256/comments
https://api.github.com/repos/huggingface/datasets/issues/5256/timeline
2022-11-18T11:05:32Z
null
null
PR_kwDODunzps5DFDY0
closed
[]
false
5,256
{ "avatar_url": "https://avatars.githubusercontent.com/u/7569098?v=4", "events_url": "https://api.github.com/users/WrRan/events{/privacy}", "followers_url": "https://api.github.com/users/WrRan/followers", "following_url": "https://api.github.com/users/WrRan/following{/other_user}", "gists_url": "https://api.github.com/users/WrRan/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/WrRan", "id": 7569098, "login": "WrRan", "node_id": "MDQ6VXNlcjc1NjkwOTg=", "organizations_url": "https://api.github.com/users/WrRan/orgs", "received_events_url": "https://api.github.com/users/WrRan/received_events", "repos_url": "https://api.github.com/users/WrRan/repos", "site_admin": false, "starred_url": "https://api.github.com/users/WrRan/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/WrRan/subscriptions", "type": "User", "url": "https://api.github.com/users/WrRan" }
fix wrong print
https://api.github.com/repos/huggingface/datasets/issues/5256/events
null
https://api.github.com/repos/huggingface/datasets/issues/5256/labels{/name}
2022-11-17T03:54:26Z
null
false
null
{ "diff_url": "https://github.com/huggingface/datasets/pull/5256.diff", "html_url": "https://github.com/huggingface/datasets/pull/5256", "merged_at": "2022-11-18T11:05:32Z", "patch_url": "https://github.com/huggingface/datasets/pull/5256.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/5256" }
1,452,652,586
[]
https://api.github.com/repos/huggingface/datasets/issues/5256
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
CONTRIBUTOR
print `encoded_dataset.column_names` not `dataset.column_names`
2022-11-18T11:05:32Z
https://github.com/huggingface/datasets/pull/5256
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5256/reactions" }
true
https://api.github.com/repos/huggingface/datasets/issues/5255/comments
https://api.github.com/repos/huggingface/datasets/issues/5255/timeline
2022-12-17T12:20:37Z
null
completed
I_kwDODunzps5WlWXd
closed
[ { "avatar_url": "https://avatars.githubusercontent.com/u/22957388?v=4", "events_url": "https://api.github.com/users/sayakpaul/events{/privacy}", "followers_url": "https://api.github.com/users/sayakpaul/followers", "following_url": "https://api.github.com/users/sayakpaul/following{/other_user}", "gists_url": "https://api.github.com/users/sayakpaul/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/sayakpaul", "id": 22957388, "login": "sayakpaul", "node_id": "MDQ6VXNlcjIyOTU3Mzg4", "organizations_url": "https://api.github.com/users/sayakpaul/orgs", "received_events_url": "https://api.github.com/users/sayakpaul/received_events", "repos_url": "https://api.github.com/users/sayakpaul/repos", "site_admin": false, "starred_url": "https://api.github.com/users/sayakpaul/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/sayakpaul/subscriptions", "type": "User", "url": "https://api.github.com/users/sayakpaul" } ]
null
5,255
{ "avatar_url": "https://avatars.githubusercontent.com/u/22957388?v=4", "events_url": "https://api.github.com/users/sayakpaul/events{/privacy}", "followers_url": "https://api.github.com/users/sayakpaul/followers", "following_url": "https://api.github.com/users/sayakpaul/following{/other_user}", "gists_url": "https://api.github.com/users/sayakpaul/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/sayakpaul", "id": 22957388, "login": "sayakpaul", "node_id": "MDQ6VXNlcjIyOTU3Mzg4", "organizations_url": "https://api.github.com/users/sayakpaul/orgs", "received_events_url": "https://api.github.com/users/sayakpaul/received_events", "repos_url": "https://api.github.com/users/sayakpaul/repos", "site_admin": false, "starred_url": "https://api.github.com/users/sayakpaul/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/sayakpaul/subscriptions", "type": "User", "url": "https://api.github.com/users/sayakpaul" }
Add a Depth Estimation dataset - DIODE / NYUDepth / KITTI
https://api.github.com/repos/huggingface/datasets/issues/5255/events
null
https://api.github.com/repos/huggingface/datasets/issues/5255/labels{/name}
2022-11-17T03:22:22Z
null
false
{ "avatar_url": "https://avatars.githubusercontent.com/u/22957388?v=4", "events_url": "https://api.github.com/users/sayakpaul/events{/privacy}", "followers_url": "https://api.github.com/users/sayakpaul/followers", "following_url": "https://api.github.com/users/sayakpaul/following{/other_user}", "gists_url": "https://api.github.com/users/sayakpaul/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/sayakpaul", "id": 22957388, "login": "sayakpaul", "node_id": "MDQ6VXNlcjIyOTU3Mzg4", "organizations_url": "https://api.github.com/users/sayakpaul/orgs", "received_events_url": "https://api.github.com/users/sayakpaul/received_events", "repos_url": "https://api.github.com/users/sayakpaul/repos", "site_admin": false, "starred_url": "https://api.github.com/users/sayakpaul/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/sayakpaul/subscriptions", "type": "User", "url": "https://api.github.com/users/sayakpaul" }
null
1,452,631,517
[ { "color": "e99695", "default": false, "description": "Requesting to add a new dataset", "id": 2067376369, "name": "dataset request", "node_id": "MDU6TGFiZWwyMDY3Mzc2MzY5", "url": "https://api.github.com/repos/huggingface/datasets/labels/dataset%20request" } ]
https://api.github.com/repos/huggingface/datasets/issues/5255
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
MEMBER
### Name NYUDepth ### Paper http://cs.nyu.edu/~silberman/papers/indoor_seg_support.pdf ### Data https://cs.nyu.edu/~silberman/datasets/nyu_depth_v2.html ### Motivation Depth estimation is an important problem in computer vision. We have a couple of Depth Estimation models on Hub as well: * [GLPN](https://huggingface.co/docs/transformers/model_doc/glpn) * [DPT](https://huggingface.co/docs/transformers/model_doc/dpt) Would be nice to have a dataset for depth estimation. These datasets usually have three things: input image, depth map image, and depth mask (validity mask to indicate if a reading for a pixel is valid or not). Since we already have [semantic segmentation datasets on the Hub](https://huggingface.co/datasets?task_categories=task_categories:image-segmentation&sort=downloads), I don't think we need any extended utilities to support this addition. Having this dataset would also allow us to author data preprocessing guides for depth estimation, particularly like the ones we have for other tasks ([example](https://huggingface.co/docs/datasets/image_classification)). Ccing @osanseviero @nateraw @NielsRogge Happy to work on adding it.
2022-12-17T12:20:38Z
https://github.com/huggingface/datasets/issues/5255
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5255/reactions" }
false
https://api.github.com/repos/huggingface/datasets/issues/5254/comments
https://api.github.com/repos/huggingface/datasets/issues/5254/timeline
2022-11-18T10:53:45Z
null
null
PR_kwDODunzps5DE47u
closed
[]
false
5,254
{ "avatar_url": "https://avatars.githubusercontent.com/u/7569098?v=4", "events_url": "https://api.github.com/users/WrRan/events{/privacy}", "followers_url": "https://api.github.com/users/WrRan/followers", "following_url": "https://api.github.com/users/WrRan/following{/other_user}", "gists_url": "https://api.github.com/users/WrRan/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/WrRan", "id": 7569098, "login": "WrRan", "node_id": "MDQ6VXNlcjc1NjkwOTg=", "organizations_url": "https://api.github.com/users/WrRan/orgs", "received_events_url": "https://api.github.com/users/WrRan/received_events", "repos_url": "https://api.github.com/users/WrRan/repos", "site_admin": false, "starred_url": "https://api.github.com/users/WrRan/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/WrRan/subscriptions", "type": "User", "url": "https://api.github.com/users/WrRan" }
typo
https://api.github.com/repos/huggingface/datasets/issues/5254/events
null
https://api.github.com/repos/huggingface/datasets/issues/5254/labels{/name}
2022-11-17T02:39:57Z
null
false
null
{ "diff_url": "https://github.com/huggingface/datasets/pull/5254.diff", "html_url": "https://github.com/huggingface/datasets/pull/5254", "merged_at": "2022-11-18T10:53:45Z", "patch_url": "https://github.com/huggingface/datasets/pull/5254.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/5254" }
1,452,600,088
[]
https://api.github.com/repos/huggingface/datasets/issues/5254
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
CONTRIBUTOR
null
2022-11-18T10:53:45Z
https://github.com/huggingface/datasets/pull/5254
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5254/reactions" }
true
https://api.github.com/repos/huggingface/datasets/issues/5253/comments
https://api.github.com/repos/huggingface/datasets/issues/5253/timeline
2022-11-18T10:53:10Z
null
null
PR_kwDODunzps5DE2io
closed
[]
false
5,253
{ "avatar_url": "https://avatars.githubusercontent.com/u/7569098?v=4", "events_url": "https://api.github.com/users/WrRan/events{/privacy}", "followers_url": "https://api.github.com/users/WrRan/followers", "following_url": "https://api.github.com/users/WrRan/following{/other_user}", "gists_url": "https://api.github.com/users/WrRan/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/WrRan", "id": 7569098, "login": "WrRan", "node_id": "MDQ6VXNlcjc1NjkwOTg=", "organizations_url": "https://api.github.com/users/WrRan/orgs", "received_events_url": "https://api.github.com/users/WrRan/received_events", "repos_url": "https://api.github.com/users/WrRan/repos", "site_admin": false, "starred_url": "https://api.github.com/users/WrRan/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/WrRan/subscriptions", "type": "User", "url": "https://api.github.com/users/WrRan" }
typo
https://api.github.com/repos/huggingface/datasets/issues/5253/events
null
https://api.github.com/repos/huggingface/datasets/issues/5253/labels{/name}
2022-11-17T02:22:58Z
null
false
null
{ "diff_url": "https://github.com/huggingface/datasets/pull/5253.diff", "html_url": "https://github.com/huggingface/datasets/pull/5253", "merged_at": "2022-11-18T10:53:10Z", "patch_url": "https://github.com/huggingface/datasets/pull/5253.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/5253" }
1,452,588,206
[]
https://api.github.com/repos/huggingface/datasets/issues/5253
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
CONTRIBUTOR
null
2022-11-18T10:53:11Z
https://github.com/huggingface/datasets/pull/5253
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5253/reactions" }
true
https://api.github.com/repos/huggingface/datasets/issues/5252/comments
https://api.github.com/repos/huggingface/datasets/issues/5252/timeline
2022-12-13T16:59:04Z
null
null
PR_kwDODunzps5DCI1U
closed
[]
false
5,252
{ "avatar_url": "https://avatars.githubusercontent.com/u/47462742?v=4", "events_url": "https://api.github.com/users/mariosasko/events{/privacy}", "followers_url": "https://api.github.com/users/mariosasko/followers", "following_url": "https://api.github.com/users/mariosasko/following{/other_user}", "gists_url": "https://api.github.com/users/mariosasko/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/mariosasko", "id": 47462742, "login": "mariosasko", "node_id": "MDQ6VXNlcjQ3NDYyNzQy", "organizations_url": "https://api.github.com/users/mariosasko/orgs", "received_events_url": "https://api.github.com/users/mariosasko/received_events", "repos_url": "https://api.github.com/users/mariosasko/repos", "site_admin": false, "starred_url": "https://api.github.com/users/mariosasko/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/mariosasko/subscriptions", "type": "User", "url": "https://api.github.com/users/mariosasko" }
Support for decoding Image/Audio types in map when format type is not default one
https://api.github.com/repos/huggingface/datasets/issues/5252/events
null
https://api.github.com/repos/huggingface/datasets/issues/5252/labels{/name}
2022-11-16T15:02:13Z
null
false
null
{ "diff_url": "https://github.com/huggingface/datasets/pull/5252.diff", "html_url": "https://github.com/huggingface/datasets/pull/5252", "merged_at": "2022-12-13T16:59:04Z", "patch_url": "https://github.com/huggingface/datasets/pull/5252.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/5252" }
1,451,765,838
[]
https://api.github.com/repos/huggingface/datasets/issues/5252
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
CONTRIBUTOR
Add support for decoding the `Image`/`Audio` types in `map` for the formats (Numpy, TF, Jax, PyTorch) other than the default one (Python). Additional improvements: * make `Dataset`'s "iter" API cleaner by removing `_iter` and replacing `_iter_batches` with `iter(batch_size)` (also implemented for `IterableDataset`) * iterate over arrow tables in `map` to avoid `_getitem` calls, which are much slower than `__iter__`/`iter(batch_size)`, when the `format_type` is not Python * fix `_iter_batches` (now named `iter`) when `drop_last_batch=True` and `pyarrow<=8.0.0` is installed * lazily extract and decode arrow data in the default format TODO: * [x] update the `iter` benchmark in the docs (the `BeamBuilder` cannot load the preprocessed datasets from our bucket, so wait for this to be fixed (cc @lhoestq)) Fix https://github.com/huggingface/datasets/issues/3992, fix https://github.com/huggingface/datasets/issues/3756
2022-12-13T17:01:54Z
https://github.com/huggingface/datasets/pull/5252
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 1, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 1, "url": "https://api.github.com/repos/huggingface/datasets/issues/5252/reactions" }
true
https://api.github.com/repos/huggingface/datasets/issues/5251/comments
https://api.github.com/repos/huggingface/datasets/issues/5251/timeline
2022-11-22T16:27:50Z
null
completed
I_kwDODunzps5WiB6p
closed
[]
null
5,251
{ "avatar_url": "https://avatars.githubusercontent.com/u/8515462?v=4", "events_url": "https://api.github.com/users/albertvillanova/events{/privacy}", "followers_url": "https://api.github.com/users/albertvillanova/followers", "following_url": "https://api.github.com/users/albertvillanova/following{/other_user}", "gists_url": "https://api.github.com/users/albertvillanova/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/albertvillanova", "id": 8515462, "login": "albertvillanova", "node_id": "MDQ6VXNlcjg1MTU0NjI=", "organizations_url": "https://api.github.com/users/albertvillanova/orgs", "received_events_url": "https://api.github.com/users/albertvillanova/received_events", "repos_url": "https://api.github.com/users/albertvillanova/repos", "site_admin": false, "starred_url": "https://api.github.com/users/albertvillanova/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/albertvillanova/subscriptions", "type": "User", "url": "https://api.github.com/users/albertvillanova" }
Docs are not generated after latest release
https://api.github.com/repos/huggingface/datasets/issues/5251/events
null
https://api.github.com/repos/huggingface/datasets/issues/5251/labels{/name}
2022-11-16T14:59:31Z
null
false
null
null
1,451,761,321
[ { "color": "d4c5f9", "default": false, "description": "Maintenance tasks", "id": 4296013012, "name": "maintenance", "node_id": "LA_kwDODunzps8AAAABAA_01A", "url": "https://api.github.com/repos/huggingface/datasets/labels/maintenance" } ]
https://api.github.com/repos/huggingface/datasets/issues/5251
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
MEMBER
After the latest `datasets` release version 0.7.0, the docs were not generated. As we have changed the release procedure (so that now we do not push directly to main branch), maybe we should also change the corresponding GitHub action: https://github.com/huggingface/datasets/blob/edf1902f954c5568daadebcd8754bdad44b02a85/.github/workflows/build_documentation.yml#L3-L8 Related to: - #5250 CC: @mishig25
2022-11-22T16:27:50Z
https://github.com/huggingface/datasets/issues/5251
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5251/reactions" }
false
https://api.github.com/repos/huggingface/datasets/issues/5250/comments
https://api.github.com/repos/huggingface/datasets/issues/5250/timeline
2022-11-22T16:27:48Z
null
null
PR_kwDODunzps5DB-1y
closed
[]
false
5,250
{ "avatar_url": "https://avatars.githubusercontent.com/u/8515462?v=4", "events_url": "https://api.github.com/users/albertvillanova/events{/privacy}", "followers_url": "https://api.github.com/users/albertvillanova/followers", "following_url": "https://api.github.com/users/albertvillanova/following{/other_user}", "gists_url": "https://api.github.com/users/albertvillanova/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/albertvillanova", "id": 8515462, "login": "albertvillanova", "node_id": "MDQ6VXNlcjg1MTU0NjI=", "organizations_url": "https://api.github.com/users/albertvillanova/orgs", "received_events_url": "https://api.github.com/users/albertvillanova/received_events", "repos_url": "https://api.github.com/users/albertvillanova/repos", "site_admin": false, "starred_url": "https://api.github.com/users/albertvillanova/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/albertvillanova/subscriptions", "type": "User", "url": "https://api.github.com/users/albertvillanova" }
Change release procedure to use only pull requests
https://api.github.com/repos/huggingface/datasets/issues/5250/events
null
https://api.github.com/repos/huggingface/datasets/issues/5250/labels{/name}
2022-11-16T14:35:32Z
null
false
null
{ "diff_url": "https://github.com/huggingface/datasets/pull/5250.diff", "html_url": "https://github.com/huggingface/datasets/pull/5250", "merged_at": "2022-11-22T16:27:48Z", "patch_url": "https://github.com/huggingface/datasets/pull/5250.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/5250" }
1,451,720,030
[]
https://api.github.com/repos/huggingface/datasets/issues/5250
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
MEMBER
This PR changes the release procedure so that: - it only make changes to main branch via pull requests - it is no longer necessary to directly commit/push to main branch Close #5251.
2022-11-22T16:30:58Z
https://github.com/huggingface/datasets/pull/5250
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5250/reactions" }
true
https://api.github.com/repos/huggingface/datasets/issues/5249/comments
https://api.github.com/repos/huggingface/datasets/issues/5249/timeline
2023-12-21T10:28:26Z
null
completed
I_kwDODunzps5WhxDX
closed
[ { "avatar_url": "https://avatars.githubusercontent.com/u/8515462?v=4", "events_url": "https://api.github.com/users/albertvillanova/events{/privacy}", "followers_url": "https://api.github.com/users/albertvillanova/followers", "following_url": "https://api.github.com/users/albertvillanova/following{/other_user}", "gists_url": "https://api.github.com/users/albertvillanova/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/albertvillanova", "id": 8515462, "login": "albertvillanova", "node_id": "MDQ6VXNlcjg1MTU0NjI=", "organizations_url": "https://api.github.com/users/albertvillanova/orgs", "received_events_url": "https://api.github.com/users/albertvillanova/received_events", "repos_url": "https://api.github.com/users/albertvillanova/repos", "site_admin": false, "starred_url": "https://api.github.com/users/albertvillanova/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/albertvillanova/subscriptions", "type": "User", "url": "https://api.github.com/users/albertvillanova" } ]
null
5,249
{ "avatar_url": "https://avatars.githubusercontent.com/u/8515462?v=4", "events_url": "https://api.github.com/users/albertvillanova/events{/privacy}", "followers_url": "https://api.github.com/users/albertvillanova/followers", "following_url": "https://api.github.com/users/albertvillanova/following{/other_user}", "gists_url": "https://api.github.com/users/albertvillanova/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/albertvillanova", "id": 8515462, "login": "albertvillanova", "node_id": "MDQ6VXNlcjg1MTU0NjI=", "organizations_url": "https://api.github.com/users/albertvillanova/orgs", "received_events_url": "https://api.github.com/users/albertvillanova/received_events", "repos_url": "https://api.github.com/users/albertvillanova/repos", "site_admin": false, "starred_url": "https://api.github.com/users/albertvillanova/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/albertvillanova/subscriptions", "type": "User", "url": "https://api.github.com/users/albertvillanova" }
Protect the main branch from inadvertent direct pushes
https://api.github.com/repos/huggingface/datasets/issues/5249/events
null
https://api.github.com/repos/huggingface/datasets/issues/5249/labels{/name}
2022-11-16T14:19:03Z
null
false
{ "avatar_url": "https://avatars.githubusercontent.com/u/8515462?v=4", "events_url": "https://api.github.com/users/albertvillanova/events{/privacy}", "followers_url": "https://api.github.com/users/albertvillanova/followers", "following_url": "https://api.github.com/users/albertvillanova/following{/other_user}", "gists_url": "https://api.github.com/users/albertvillanova/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/albertvillanova", "id": 8515462, "login": "albertvillanova", "node_id": "MDQ6VXNlcjg1MTU0NjI=", "organizations_url": "https://api.github.com/users/albertvillanova/orgs", "received_events_url": "https://api.github.com/users/albertvillanova/received_events", "repos_url": "https://api.github.com/users/albertvillanova/repos", "site_admin": false, "starred_url": "https://api.github.com/users/albertvillanova/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/albertvillanova/subscriptions", "type": "User", "url": "https://api.github.com/users/albertvillanova" }
null
1,451,692,247
[ { "color": "d4c5f9", "default": false, "description": "Maintenance tasks", "id": 4296013012, "name": "maintenance", "node_id": "LA_kwDODunzps8AAAABAA_01A", "url": "https://api.github.com/repos/huggingface/datasets/labels/maintenance" } ]
https://api.github.com/repos/huggingface/datasets/issues/5249
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
MEMBER
We have decided to implement a protection mechanism in this repository, so that nobody (not even administrators) can inadvertently push accidentally directly to the main branch. See context here: - d7c942228b8dcf4de64b00a3053dce59b335f618 To do: - [x] Protect main branch - Settings > Branches > Branch protection rules > main > Edit - [x] Check: Do not allow bypassing the above settings - The above settings will apply to administrators and custom roles with the "bypass branch protections" permission. - [x] Additionally, uncheck: Require approvals [under "Require a pull request before merging", which was already checked] - Before, we could exceptionally merge a non-approved PR, using Administrator bypass - Now that Administrator bypass is no longer possible, we would always need an approval to be able to merge; and pull request authors cannot approve their own pull requests. This could be an inconvenient in some exceptional circumstances when an urgent fix is needed - Nevertheless, although it is no longer enforced, it is strongly recommended to merge PRs only if they have at least one approval - [x] #5250 - So that direct pushes to main branch are no longer necessary
2023-12-21T10:28:27Z
https://github.com/huggingface/datasets/issues/5249
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5249/reactions" }
false
https://api.github.com/repos/huggingface/datasets/issues/5248/comments
https://api.github.com/repos/huggingface/datasets/issues/5248/timeline
2022-11-16T10:41:10Z
null
null
PR_kwDODunzps5DAqwt
closed
[]
false
5,248
{ "avatar_url": "https://avatars.githubusercontent.com/u/11827707?v=4", "events_url": "https://api.github.com/users/mishig25/events{/privacy}", "followers_url": "https://api.github.com/users/mishig25/followers", "following_url": "https://api.github.com/users/mishig25/following{/other_user}", "gists_url": "https://api.github.com/users/mishig25/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/mishig25", "id": 11827707, "login": "mishig25", "node_id": "MDQ6VXNlcjExODI3NzA3", "organizations_url": "https://api.github.com/users/mishig25/orgs", "received_events_url": "https://api.github.com/users/mishig25/received_events", "repos_url": "https://api.github.com/users/mishig25/repos", "site_admin": false, "starred_url": "https://api.github.com/users/mishig25/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/mishig25/subscriptions", "type": "User", "url": "https://api.github.com/users/mishig25" }
Complete doc migration
https://api.github.com/repos/huggingface/datasets/issues/5248/events
null
https://api.github.com/repos/huggingface/datasets/issues/5248/labels{/name}
2022-11-16T10:41:04Z
null
false
null
{ "diff_url": "https://github.com/huggingface/datasets/pull/5248.diff", "html_url": "https://github.com/huggingface/datasets/pull/5248", "merged_at": "2022-11-16T10:41:10Z", "patch_url": "https://github.com/huggingface/datasets/pull/5248.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/5248" }
1,451,338,676
[]
https://api.github.com/repos/huggingface/datasets/issues/5248
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
CONTRIBUTOR
Reverts huggingface/datasets#5214 Everything is handled on the doc-builder side now 😊
2022-11-16T15:06:50Z
https://github.com/huggingface/datasets/pull/5248
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5248/reactions" }
true
https://api.github.com/repos/huggingface/datasets/issues/5247/comments
https://api.github.com/repos/huggingface/datasets/issues/5247/timeline
2022-11-16T10:17:50Z
null
null
PR_kwDODunzps5DAhto
closed
[]
false
5,247
{ "avatar_url": "https://avatars.githubusercontent.com/u/8515462?v=4", "events_url": "https://api.github.com/users/albertvillanova/events{/privacy}", "followers_url": "https://api.github.com/users/albertvillanova/followers", "following_url": "https://api.github.com/users/albertvillanova/following{/other_user}", "gists_url": "https://api.github.com/users/albertvillanova/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/albertvillanova", "id": 8515462, "login": "albertvillanova", "node_id": "MDQ6VXNlcjg1MTU0NjI=", "organizations_url": "https://api.github.com/users/albertvillanova/orgs", "received_events_url": "https://api.github.com/users/albertvillanova/received_events", "repos_url": "https://api.github.com/users/albertvillanova/repos", "site_admin": false, "starred_url": "https://api.github.com/users/albertvillanova/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/albertvillanova/subscriptions", "type": "User", "url": "https://api.github.com/users/albertvillanova" }
Set dev version
https://api.github.com/repos/huggingface/datasets/issues/5247/events
null
https://api.github.com/repos/huggingface/datasets/issues/5247/labels{/name}
2022-11-16T10:17:31Z
null
false
null
{ "diff_url": "https://github.com/huggingface/datasets/pull/5247.diff", "html_url": "https://github.com/huggingface/datasets/pull/5247", "merged_at": "2022-11-16T10:17:50Z", "patch_url": "https://github.com/huggingface/datasets/pull/5247.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/5247" }
1,451,297,749
[]
https://api.github.com/repos/huggingface/datasets/issues/5247
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
MEMBER
null
2022-11-16T10:22:20Z
https://github.com/huggingface/datasets/pull/5247
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5247/reactions" }
true
https://api.github.com/repos/huggingface/datasets/issues/5246/comments
https://api.github.com/repos/huggingface/datasets/issues/5246/timeline
2022-11-16T09:37:03Z
null
null
PR_kwDODunzps5DASLI
closed
[]
false
5,246
{ "avatar_url": "https://avatars.githubusercontent.com/u/8515462?v=4", "events_url": "https://api.github.com/users/albertvillanova/events{/privacy}", "followers_url": "https://api.github.com/users/albertvillanova/followers", "following_url": "https://api.github.com/users/albertvillanova/following{/other_user}", "gists_url": "https://api.github.com/users/albertvillanova/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/albertvillanova", "id": 8515462, "login": "albertvillanova", "node_id": "MDQ6VXNlcjg1MTU0NjI=", "organizations_url": "https://api.github.com/users/albertvillanova/orgs", "received_events_url": "https://api.github.com/users/albertvillanova/received_events", "repos_url": "https://api.github.com/users/albertvillanova/repos", "site_admin": false, "starred_url": "https://api.github.com/users/albertvillanova/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/albertvillanova/subscriptions", "type": "User", "url": "https://api.github.com/users/albertvillanova" }
Release: 2.7.0
https://api.github.com/repos/huggingface/datasets/issues/5246/events
null
https://api.github.com/repos/huggingface/datasets/issues/5246/labels{/name}
2022-11-16T09:32:44Z
null
false
null
{ "diff_url": "https://github.com/huggingface/datasets/pull/5246.diff", "html_url": "https://github.com/huggingface/datasets/pull/5246", "merged_at": "2022-11-16T09:37:03Z", "patch_url": "https://github.com/huggingface/datasets/pull/5246.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/5246" }
1,451,226,055
[]
https://api.github.com/repos/huggingface/datasets/issues/5246
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
MEMBER
null
2022-11-16T09:39:42Z
https://github.com/huggingface/datasets/pull/5246
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5246/reactions" }
true
https://api.github.com/repos/huggingface/datasets/issues/5245/comments
https://api.github.com/repos/huggingface/datasets/issues/5245/timeline
2022-11-28T12:53:24Z
null
completed
I_kwDODunzps5Wcvzx
closed
[ { "avatar_url": "https://avatars.githubusercontent.com/u/36760800?v=4", "events_url": "https://api.github.com/users/alvarobartt/events{/privacy}", "followers_url": "https://api.github.com/users/alvarobartt/followers", "following_url": "https://api.github.com/users/alvarobartt/following{/other_user}", "gists_url": "https://api.github.com/users/alvarobartt/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/alvarobartt", "id": 36760800, "login": "alvarobartt", "node_id": "MDQ6VXNlcjM2NzYwODAw", "organizations_url": "https://api.github.com/users/alvarobartt/orgs", "received_events_url": "https://api.github.com/users/alvarobartt/received_events", "repos_url": "https://api.github.com/users/alvarobartt/repos", "site_admin": false, "starred_url": "https://api.github.com/users/alvarobartt/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/alvarobartt/subscriptions", "type": "User", "url": "https://api.github.com/users/alvarobartt" } ]
null
5,245
{ "avatar_url": "https://avatars.githubusercontent.com/u/9079808?v=4", "events_url": "https://api.github.com/users/peregilk/events{/privacy}", "followers_url": "https://api.github.com/users/peregilk/followers", "following_url": "https://api.github.com/users/peregilk/following{/other_user}", "gists_url": "https://api.github.com/users/peregilk/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/peregilk", "id": 9079808, "login": "peregilk", "node_id": "MDQ6VXNlcjkwNzk4MDg=", "organizations_url": "https://api.github.com/users/peregilk/orgs", "received_events_url": "https://api.github.com/users/peregilk/received_events", "repos_url": "https://api.github.com/users/peregilk/repos", "site_admin": false, "starred_url": "https://api.github.com/users/peregilk/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/peregilk/subscriptions", "type": "User", "url": "https://api.github.com/users/peregilk" }
Unable to rename columns in streaming dataset
https://api.github.com/repos/huggingface/datasets/issues/5245/events
null
https://api.github.com/repos/huggingface/datasets/issues/5245/labels{/name}
2022-11-15T21:04:41Z
null
false
{ "avatar_url": "https://avatars.githubusercontent.com/u/36760800?v=4", "events_url": "https://api.github.com/users/alvarobartt/events{/privacy}", "followers_url": "https://api.github.com/users/alvarobartt/followers", "following_url": "https://api.github.com/users/alvarobartt/following{/other_user}", "gists_url": "https://api.github.com/users/alvarobartt/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/alvarobartt", "id": 36760800, "login": "alvarobartt", "node_id": "MDQ6VXNlcjM2NzYwODAw", "organizations_url": "https://api.github.com/users/alvarobartt/orgs", "received_events_url": "https://api.github.com/users/alvarobartt/received_events", "repos_url": "https://api.github.com/users/alvarobartt/repos", "site_admin": false, "starred_url": "https://api.github.com/users/alvarobartt/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/alvarobartt/subscriptions", "type": "User", "url": "https://api.github.com/users/alvarobartt" }
null
1,450,376,433
[]
https://api.github.com/repos/huggingface/datasets/issues/5245
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
NONE
### Describe the bug Trying to rename column in a streaming datasets, destroys the features object. ### Steps to reproduce the bug The following code illustrates the error: ``` from datasets import load_dataset dataset = load_dataset('mc4', 'en', streaming=True, split='train') dataset.info.features # {'text': Value(dtype='string', id=None), 'timestamp': Value(dtype='string', id=None), 'url': Value(dtype='string', id=None)} dataset = dataset.rename_column("text", "content") dataset.info.features # This returned object is now None! ``` ### Expected behavior This should just alter the renamed column. ### Environment info datasets 2.6.1
2022-11-28T12:53:24Z
https://github.com/huggingface/datasets/issues/5245
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5245/reactions" }
false
https://api.github.com/repos/huggingface/datasets/issues/5244/comments
https://api.github.com/repos/huggingface/datasets/issues/5244/timeline
null
null
null
I_kwDODunzps5WbYmZ
open
[]
null
5,244
{ "avatar_url": "https://avatars.githubusercontent.com/u/48770768?v=4", "events_url": "https://api.github.com/users/Hubert-Bonisseur/events{/privacy}", "followers_url": "https://api.github.com/users/Hubert-Bonisseur/followers", "following_url": "https://api.github.com/users/Hubert-Bonisseur/following{/other_user}", "gists_url": "https://api.github.com/users/Hubert-Bonisseur/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/Hubert-Bonisseur", "id": 48770768, "login": "Hubert-Bonisseur", "node_id": "MDQ6VXNlcjQ4NzcwNzY4", "organizations_url": "https://api.github.com/users/Hubert-Bonisseur/orgs", "received_events_url": "https://api.github.com/users/Hubert-Bonisseur/received_events", "repos_url": "https://api.github.com/users/Hubert-Bonisseur/repos", "site_admin": false, "starred_url": "https://api.github.com/users/Hubert-Bonisseur/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/Hubert-Bonisseur/subscriptions", "type": "User", "url": "https://api.github.com/users/Hubert-Bonisseur" }
Allow dataset streaming from private a private source when loading a dataset with a dataset loading script
https://api.github.com/repos/huggingface/datasets/issues/5244/events
null
https://api.github.com/repos/huggingface/datasets/issues/5244/labels{/name}
2022-11-15T16:02:10Z
null
false
null
null
1,450,019,225
[ { "color": "a2eeef", "default": true, "description": "New feature or request", "id": 1935892871, "name": "enhancement", "node_id": "MDU6TGFiZWwxOTM1ODkyODcx", "url": "https://api.github.com/repos/huggingface/datasets/labels/enhancement" } ]
https://api.github.com/repos/huggingface/datasets/issues/5244
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
CONTRIBUTOR
### Feature request Add arguments to the function _get_authentication_headers_for_url_ like custom_endpoint and custom_token in order to add flexibility when downloading files from a private source. It should also be possible to provide these arguments from the dataset loading script, maybe giving them to the dl_manager ### Motivation It is possible to share a dataset hosted on another platform by writing a dataset loading script. It works perfectly for publicly available resources. For resources that require authentication, you can provide a [download_custom](https://huggingface.co/docs/datasets/package_reference/builder_classes#datasets.DownloadManager) method to the download_manager. Unfortunately, this function doesn't work with **dataset streaming**. A solution so as to allow dataset streaming from private sources would be a more flexible _get_authentication_headers_for_url_ function. ### Your contribution Would you be interested in this improvement ? If so I could provide a PR. I've got something working locally, but it's not very clean, I'd need some guidance regarding integration.
2022-11-23T14:02:30Z
https://github.com/huggingface/datasets/issues/5244
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5244/reactions" }
false
https://api.github.com/repos/huggingface/datasets/issues/5243/comments
https://api.github.com/repos/huggingface/datasets/issues/5243/timeline
null
null
null
I_kwDODunzps5WZfr6
open
[]
null
5,243
{ "avatar_url": "https://avatars.githubusercontent.com/u/48530104?v=4", "events_url": "https://api.github.com/users/capsabogdan/events{/privacy}", "followers_url": "https://api.github.com/users/capsabogdan/followers", "following_url": "https://api.github.com/users/capsabogdan/following{/other_user}", "gists_url": "https://api.github.com/users/capsabogdan/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/capsabogdan", "id": 48530104, "login": "capsabogdan", "node_id": "MDQ6VXNlcjQ4NTMwMTA0", "organizations_url": "https://api.github.com/users/capsabogdan/orgs", "received_events_url": "https://api.github.com/users/capsabogdan/received_events", "repos_url": "https://api.github.com/users/capsabogdan/repos", "site_admin": false, "starred_url": "https://api.github.com/users/capsabogdan/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/capsabogdan/subscriptions", "type": "User", "url": "https://api.github.com/users/capsabogdan" }
Download only split data
https://api.github.com/repos/huggingface/datasets/issues/5243/events
null
https://api.github.com/repos/huggingface/datasets/issues/5243/labels{/name}
2022-11-15T10:15:54Z
null
false
null
null
1,449,523,962
[ { "color": "a2eeef", "default": true, "description": "New feature or request", "id": 1935892871, "name": "enhancement", "node_id": "MDU6TGFiZWwxOTM1ODkyODcx", "url": "https://api.github.com/repos/huggingface/datasets/labels/enhancement" } ]
https://api.github.com/repos/huggingface/datasets/issues/5243
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
NONE
### Feature request Is it possible to download only the data that I am requesting and not the entire dataset? I run out of disk spaceas it seems to download the entire dataset, instead of only the part needed. common_voice["test"] = load_dataset("mozilla-foundation/common_voice_11_0", "en", split="test", cache_dir="cache/path...", use_auth_token=True, download_config=DownloadConfig(delete_extracted='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') ) ### Motivation efficiency improvement ### Your contribution n/a
2024-03-06T13:28:17Z
https://github.com/huggingface/datasets/issues/5243
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5243/reactions" }
false
https://api.github.com/repos/huggingface/datasets/issues/5242/comments
https://api.github.com/repos/huggingface/datasets/issues/5242/timeline
null
null
null
I_kwDODunzps5WXwtG
open
[]
null
5,242
{ "avatar_url": "https://avatars.githubusercontent.com/u/82735473?v=4", "events_url": "https://api.github.com/users/scrambled2/events{/privacy}", "followers_url": "https://api.github.com/users/scrambled2/followers", "following_url": "https://api.github.com/users/scrambled2/following{/other_user}", "gists_url": "https://api.github.com/users/scrambled2/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/scrambled2", "id": 82735473, "login": "scrambled2", "node_id": "MDQ6VXNlcjgyNzM1NDcz", "organizations_url": "https://api.github.com/users/scrambled2/orgs", "received_events_url": "https://api.github.com/users/scrambled2/received_events", "repos_url": "https://api.github.com/users/scrambled2/repos", "site_admin": false, "starred_url": "https://api.github.com/users/scrambled2/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/scrambled2/subscriptions", "type": "User", "url": "https://api.github.com/users/scrambled2" }
Failed Data Processing upon upload with zip file full of images
https://api.github.com/repos/huggingface/datasets/issues/5242/events
null
https://api.github.com/repos/huggingface/datasets/issues/5242/labels{/name}
2022-11-15T02:47:52Z
null
false
null
null
1,449,069,382
[]
https://api.github.com/repos/huggingface/datasets/issues/5242
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
NONE
I went to autotrain and under image classification arrived where it was time to prepare my dataset. Screenshot below ![image](https://user-images.githubusercontent.com/82735473/201814099-3cc5ff8a-88dc-4f5f-8140-f19560641d83.png) I chose the method 2 option. I have a csv file with two columns. ~23,000 files. I uploaded this and chose the image_relpath, and target columns. The image uploader said that I could only upload 10,000 singular images at a time so the 2nd option was to zip the images up and upload a zip archive which I did. That all uploaded. Now I have the message below. It appears the zip archive does just uncompress on the Hugging Face end? What am I missing here? ![image](https://user-images.githubusercontent.com/82735473/201813838-b50dbbbc-34e8-4d73-9c07-12f9e41c62eb.png)
2022-11-15T17:59:23Z
https://github.com/huggingface/datasets/issues/5242
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5242/reactions" }
false
https://api.github.com/repos/huggingface/datasets/issues/5241/comments
https://api.github.com/repos/huggingface/datasets/issues/5241/timeline
2022-11-15T16:09:31Z
null
null
PR_kwDODunzps5C3MTG
closed
[]
false
5,241
{ "avatar_url": "https://avatars.githubusercontent.com/u/42851186?v=4", "events_url": "https://api.github.com/users/lhoestq/events{/privacy}", "followers_url": "https://api.github.com/users/lhoestq/followers", "following_url": "https://api.github.com/users/lhoestq/following{/other_user}", "gists_url": "https://api.github.com/users/lhoestq/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/lhoestq", "id": 42851186, "login": "lhoestq", "node_id": "MDQ6VXNlcjQyODUxMTg2", "organizations_url": "https://api.github.com/users/lhoestq/orgs", "received_events_url": "https://api.github.com/users/lhoestq/received_events", "repos_url": "https://api.github.com/users/lhoestq/repos", "site_admin": false, "starred_url": "https://api.github.com/users/lhoestq/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/lhoestq/subscriptions", "type": "User", "url": "https://api.github.com/users/lhoestq" }
Support hfh rc version
https://api.github.com/repos/huggingface/datasets/issues/5241/events
null
https://api.github.com/repos/huggingface/datasets/issues/5241/labels{/name}
2022-11-14T18:05:47Z
null
false
null
{ "diff_url": "https://github.com/huggingface/datasets/pull/5241.diff", "html_url": "https://github.com/huggingface/datasets/pull/5241", "merged_at": "2022-11-15T16:09:31Z", "patch_url": "https://github.com/huggingface/datasets/pull/5241.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/5241" }
1,448,510,407
[]
https://api.github.com/repos/huggingface/datasets/issues/5241
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
MEMBER
otherwise the code doesn't work for hfh 0.11.0rc0 following #5237
2022-11-15T16:11:30Z
https://github.com/huggingface/datasets/pull/5241
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5241/reactions" }
true
https://api.github.com/repos/huggingface/datasets/issues/5240/comments
https://api.github.com/repos/huggingface/datasets/issues/5240/timeline
2022-11-15T18:24:38Z
null
null
PR_kwDODunzps5C3Fe6
closed
[]
false
5,240
{ "avatar_url": "https://avatars.githubusercontent.com/u/47462742?v=4", "events_url": "https://api.github.com/users/mariosasko/events{/privacy}", "followers_url": "https://api.github.com/users/mariosasko/followers", "following_url": "https://api.github.com/users/mariosasko/following{/other_user}", "gists_url": "https://api.github.com/users/mariosasko/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/mariosasko", "id": 47462742, "login": "mariosasko", "node_id": "MDQ6VXNlcjQ3NDYyNzQy", "organizations_url": "https://api.github.com/users/mariosasko/orgs", "received_events_url": "https://api.github.com/users/mariosasko/received_events", "repos_url": "https://api.github.com/users/mariosasko/repos", "site_admin": false, "starred_url": "https://api.github.com/users/mariosasko/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/mariosasko/subscriptions", "type": "User", "url": "https://api.github.com/users/mariosasko" }
Cleaner error tracebacks for dataset script errors
https://api.github.com/repos/huggingface/datasets/issues/5240/events
null
https://api.github.com/repos/huggingface/datasets/issues/5240/labels{/name}
2022-11-14T17:42:02Z
null
false
null
{ "diff_url": "https://github.com/huggingface/datasets/pull/5240.diff", "html_url": "https://github.com/huggingface/datasets/pull/5240", "merged_at": "2022-11-15T18:24:38Z", "patch_url": "https://github.com/huggingface/datasets/pull/5240.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/5240" }
1,448,478,617
[]
https://api.github.com/repos/huggingface/datasets/issues/5240
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
CONTRIBUTOR
Make the traceback of the errors raised in `_generate_examples` cleaner for easier debugging. Additionally, initialize the `writer` in the for-loop to avoid the `ValueError` from `ArrowWriter.finalize` raised in the `finally` block when no examples are yielded before the `_generate_examples` error. <details> <summary> The full traceback of the "SQLAlchemy ImportError" error that gets printed with these changes: </summary> ```bash ImportError Traceback (most recent call last) /usr/local/lib/python3.7/dist-packages/datasets/builder.py in _prepare_split_single(self, arg) 1759 _time = time.time() -> 1760 for _, table in generator: 1761 # Only initialize the writer when we have the first record (to avoid having to do the clean-up if an error occurs before that) 9 frames /usr/local/lib/python3.7/dist-packages/datasets/packaged_modules/sql/sql.py in _generate_tables(self) 112 sql_reader = pd.read_sql( --> 113 self.config.sql, self.config.con, chunksize=chunksize, **self.config.pd_read_sql_kwargs 114 ) /usr/local/lib/python3.7/dist-packages/pandas/io/sql.py in read_sql(sql, con, index_col, coerce_float, params, parse_dates, columns, chunksize) 598 """ --> 599 pandas_sql = pandasSQL_builder(con) 600 /usr/local/lib/python3.7/dist-packages/pandas/io/sql.py in pandasSQL_builder(con, schema, meta, is_cursor) 789 elif isinstance(con, str): --> 790 raise ImportError("Using URI string without sqlalchemy installed.") 791 else: ImportError: Using URI string without sqlalchemy installed. The above exception was the direct cause of the following exception: DatasetGenerationError Traceback (most recent call last) <ipython-input-4-5af11af4737b> in <module> ----> 1 ds = Dataset.from_sql('''SELECT * from states WHERE state=="New York";''', "sqlite:///us_covid_data.db") /usr/local/lib/python3.7/dist-packages/datasets/arrow_dataset.py in from_sql(sql, con, features, cache_dir, keep_in_memory, **kwargs) 1152 cache_dir=cache_dir, 1153 keep_in_memory=keep_in_memory, -> 1154 **kwargs, 1155 ).read() 1156 /usr/local/lib/python3.7/dist-packages/datasets/io/sql.py in read(self) 47 # try_from_hf_gcs=try_from_hf_gcs, 48 base_path=base_path, ---> 49 use_auth_token=use_auth_token, 50 ) 51 /usr/local/lib/python3.7/dist-packages/datasets/builder.py in download_and_prepare(self, output_dir, download_config, download_mode, ignore_verifications, try_from_hf_gcs, dl_manager, base_path, use_auth_token, file_format, max_shard_size, num_proc, storage_options, **download_and_prepare_kwargs) 825 verify_infos=verify_infos, 826 **prepare_split_kwargs, --> 827 **download_and_prepare_kwargs, 828 ) 829 # Sync info /usr/local/lib/python3.7/dist-packages/datasets/builder.py in _download_and_prepare(self, dl_manager, verify_infos, **prepare_split_kwargs) 912 try: 913 # Prepare split will record examples associated to the split --> 914 self._prepare_split(split_generator, **prepare_split_kwargs) 915 except OSError as e: 916 raise OSError( /usr/local/lib/python3.7/dist-packages/datasets/builder.py in _prepare_split(self, split_generator, file_format, num_proc, max_shard_size) 1652 job_id = 0 1653 for job_id, done, content in self._prepare_split_single( -> 1654 {"gen_kwargs": gen_kwargs, "job_id": job_id, **_prepare_split_args} 1655 ): 1656 if done: /usr/local/lib/python3.7/dist-packages/datasets/builder.py in _prepare_split_single(self, arg) 1789 raise DatasetGenerationError( 1790 f"An error occured while generating the dataset" -> 1791 ) from e 1792 finally: 1793 yield job_id, False, num_examples_progress_update DatasetGenerationError: An error occurred while generating the dataset ``` </details> PS: I've also considered raising the error as follows: ```python tb = sys.exc_info()[2] raise DatasetGenerationError(f"An error occurred while generating the dataset: {type(e).__name__}: {e}").with_traceback(tb) from None # this raises the DatasetGenerationError with "e"'s traceback ``` But it seems like "from e" is now the [preferred](https://docs.python.org/3/library/exceptions.html#BaseException.with_traceback) way to chain exceptions. Fix https://github.com/huggingface/datasets/issues/5186 cc @nateraw
2022-11-15T18:26:48Z
https://github.com/huggingface/datasets/pull/5240
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5240/reactions" }
true
https://api.github.com/repos/huggingface/datasets/issues/5239/comments
https://api.github.com/repos/huggingface/datasets/issues/5239/timeline
2022-12-06T15:39:09Z
null
null
PR_kwDODunzps5C2L_P
closed
[]
false
5,239
{ "avatar_url": "https://avatars.githubusercontent.com/u/42851186?v=4", "events_url": "https://api.github.com/users/lhoestq/events{/privacy}", "followers_url": "https://api.github.com/users/lhoestq/followers", "following_url": "https://api.github.com/users/lhoestq/following{/other_user}", "gists_url": "https://api.github.com/users/lhoestq/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/lhoestq", "id": 42851186, "login": "lhoestq", "node_id": "MDQ6VXNlcjQyODUxMTg2", "organizations_url": "https://api.github.com/users/lhoestq/orgs", "received_events_url": "https://api.github.com/users/lhoestq/received_events", "repos_url": "https://api.github.com/users/lhoestq/repos", "site_admin": false, "starred_url": "https://api.github.com/users/lhoestq/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/lhoestq/subscriptions", "type": "User", "url": "https://api.github.com/users/lhoestq" }
Add num_proc to from_csv/generator/json/parquet/text
https://api.github.com/repos/huggingface/datasets/issues/5239/events
null
https://api.github.com/repos/huggingface/datasets/issues/5239/labels{/name}
2022-11-14T14:53:00Z
null
false
null
{ "diff_url": "https://github.com/huggingface/datasets/pull/5239.diff", "html_url": "https://github.com/huggingface/datasets/pull/5239", "merged_at": "2022-12-06T15:39:09Z", "patch_url": "https://github.com/huggingface/datasets/pull/5239.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/5239" }
1,448,211,373
[]
https://api.github.com/repos/huggingface/datasets/issues/5239
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
MEMBER
Allow multiprocessing to from_* methods
2022-12-06T15:39:10Z
https://github.com/huggingface/datasets/pull/5239
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5239/reactions" }
true
https://api.github.com/repos/huggingface/datasets/issues/5238/comments
https://api.github.com/repos/huggingface/datasets/issues/5238/timeline
2022-11-14T15:27:35Z
null
null
PR_kwDODunzps5C2L9h
closed
[]
false
5,238
{ "avatar_url": "https://avatars.githubusercontent.com/u/47462742?v=4", "events_url": "https://api.github.com/users/mariosasko/events{/privacy}", "followers_url": "https://api.github.com/users/mariosasko/followers", "following_url": "https://api.github.com/users/mariosasko/following{/other_user}", "gists_url": "https://api.github.com/users/mariosasko/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/mariosasko", "id": 47462742, "login": "mariosasko", "node_id": "MDQ6VXNlcjQ3NDYyNzQy", "organizations_url": "https://api.github.com/users/mariosasko/orgs", "received_events_url": "https://api.github.com/users/mariosasko/received_events", "repos_url": "https://api.github.com/users/mariosasko/repos", "site_admin": false, "starred_url": "https://api.github.com/users/mariosasko/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/mariosasko/subscriptions", "type": "User", "url": "https://api.github.com/users/mariosasko" }
Make `Version` hashable
https://api.github.com/repos/huggingface/datasets/issues/5238/events
null
https://api.github.com/repos/huggingface/datasets/issues/5238/labels{/name}
2022-11-14T14:52:55Z
null
false
null
{ "diff_url": "https://github.com/huggingface/datasets/pull/5238.diff", "html_url": "https://github.com/huggingface/datasets/pull/5238", "merged_at": "2022-11-14T15:27:35Z", "patch_url": "https://github.com/huggingface/datasets/pull/5238.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/5238" }
1,448,211,251
[]
https://api.github.com/repos/huggingface/datasets/issues/5238
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
CONTRIBUTOR
Add `__hash__` to the `Version` class to make it hashable (and remove the unneeded methods), as `Version("0.0.0")` is the default value of `BuilderConfig.version` and the default fields of a dataclass need to be hashable in Python 3.11. Fix https://github.com/huggingface/datasets/issues/5230
2022-11-14T15:30:02Z
https://github.com/huggingface/datasets/pull/5238
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5238/reactions" }
true
https://api.github.com/repos/huggingface/datasets/issues/5237/comments
https://api.github.com/repos/huggingface/datasets/issues/5237/timeline
2022-11-14T17:35:59Z
null
null
PR_kwDODunzps5C2KGz
closed
[]
false
5,237
{ "avatar_url": "https://avatars.githubusercontent.com/u/42851186?v=4", "events_url": "https://api.github.com/users/lhoestq/events{/privacy}", "followers_url": "https://api.github.com/users/lhoestq/followers", "following_url": "https://api.github.com/users/lhoestq/following{/other_user}", "gists_url": "https://api.github.com/users/lhoestq/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/lhoestq", "id": 42851186, "login": "lhoestq", "node_id": "MDQ6VXNlcjQyODUxMTg2", "organizations_url": "https://api.github.com/users/lhoestq/orgs", "received_events_url": "https://api.github.com/users/lhoestq/received_events", "repos_url": "https://api.github.com/users/lhoestq/repos", "site_admin": false, "starred_url": "https://api.github.com/users/lhoestq/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/lhoestq/subscriptions", "type": "User", "url": "https://api.github.com/users/lhoestq" }
Encode path only for old versions of hfh
https://api.github.com/repos/huggingface/datasets/issues/5237/events
null
https://api.github.com/repos/huggingface/datasets/issues/5237/labels{/name}
2022-11-14T14:46:57Z
null
false
null
{ "diff_url": "https://github.com/huggingface/datasets/pull/5237.diff", "html_url": "https://github.com/huggingface/datasets/pull/5237", "merged_at": "2022-11-14T17:35:59Z", "patch_url": "https://github.com/huggingface/datasets/pull/5237.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/5237" }
1,448,202,491
[]
https://api.github.com/repos/huggingface/datasets/issues/5237
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
MEMBER
Next version of `huggingface-hub` 0.11 does encode the `path`, and we don't want to encode twice
2022-11-14T17:38:18Z
https://github.com/huggingface/datasets/pull/5237
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5237/reactions" }
true
https://api.github.com/repos/huggingface/datasets/issues/5236/comments
https://api.github.com/repos/huggingface/datasets/issues/5236/timeline
2022-11-14T16:01:48Z
null
null
PR_kwDODunzps5C2Hnj
closed
[]
false
5,236
{ "avatar_url": "https://avatars.githubusercontent.com/u/47462742?v=4", "events_url": "https://api.github.com/users/mariosasko/events{/privacy}", "followers_url": "https://api.github.com/users/mariosasko/followers", "following_url": "https://api.github.com/users/mariosasko/following{/other_user}", "gists_url": "https://api.github.com/users/mariosasko/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/mariosasko", "id": 47462742, "login": "mariosasko", "node_id": "MDQ6VXNlcjQ3NDYyNzQy", "organizations_url": "https://api.github.com/users/mariosasko/orgs", "received_events_url": "https://api.github.com/users/mariosasko/received_events", "repos_url": "https://api.github.com/users/mariosasko/repos", "site_admin": false, "starred_url": "https://api.github.com/users/mariosasko/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/mariosasko/subscriptions", "type": "User", "url": "https://api.github.com/users/mariosasko" }
Handle ArrowNotImplementedError caused by try_type being Image or Audio in cast
https://api.github.com/repos/huggingface/datasets/issues/5236/events
null
https://api.github.com/repos/huggingface/datasets/issues/5236/labels{/name}
2022-11-14T14:38:59Z
null
false
null
{ "diff_url": "https://github.com/huggingface/datasets/pull/5236.diff", "html_url": "https://github.com/huggingface/datasets/pull/5236", "merged_at": "2022-11-14T16:01:48Z", "patch_url": "https://github.com/huggingface/datasets/pull/5236.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/5236" }
1,448,190,801
[]
https://api.github.com/repos/huggingface/datasets/issues/5236
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
CONTRIBUTOR
Handle the `ArrowNotImplementedError` thrown when `try_type` is `Image` or `Audio` and the input array cannot be converted to their storage formats. Reproducer: ```python from datasets import Dataset from PIL import Image import requests ds = Dataset.from_dict({"image": [Image.open(requests.get("https://upload.wikimedia.org/wikipedia/commons/e/e9/Felis_silvestris_silvestris_small_gradual_decrease_of_quality.png", stream=True).raw)]}) ds.map(lambda x: {"image": True}) # ArrowNotImplementedError ``` PS: This could also be fixed by raising `TypeError` in `{Image, Audio}.cast_storage` for unsupported types instead of passing the array to `array_cast.`
2022-11-14T16:04:29Z
https://github.com/huggingface/datasets/pull/5236
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5236/reactions" }
true
https://api.github.com/repos/huggingface/datasets/issues/5235/comments
https://api.github.com/repos/huggingface/datasets/issues/5235/timeline
2022-11-14T13:41:12Z
null
null
PR_kwDODunzps5C1pjc
closed
[]
false
5,235
{ "avatar_url": "https://avatars.githubusercontent.com/u/16348744?v=4", "events_url": "https://api.github.com/users/polinaeterna/events{/privacy}", "followers_url": "https://api.github.com/users/polinaeterna/followers", "following_url": "https://api.github.com/users/polinaeterna/following{/other_user}", "gists_url": "https://api.github.com/users/polinaeterna/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/polinaeterna", "id": 16348744, "login": "polinaeterna", "node_id": "MDQ6VXNlcjE2MzQ4NzQ0", "organizations_url": "https://api.github.com/users/polinaeterna/orgs", "received_events_url": "https://api.github.com/users/polinaeterna/received_events", "repos_url": "https://api.github.com/users/polinaeterna/repos", "site_admin": false, "starred_url": "https://api.github.com/users/polinaeterna/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/polinaeterna/subscriptions", "type": "User", "url": "https://api.github.com/users/polinaeterna" }
Pin `typer` version in tests to <0.5 to fix Windows CI
https://api.github.com/repos/huggingface/datasets/issues/5235/events
null
https://api.github.com/repos/huggingface/datasets/issues/5235/labels{/name}
2022-11-14T13:17:02Z
null
false
null
{ "diff_url": "https://github.com/huggingface/datasets/pull/5235.diff", "html_url": "https://github.com/huggingface/datasets/pull/5235", "merged_at": "2022-11-14T13:41:12Z", "patch_url": "https://github.com/huggingface/datasets/pull/5235.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/5235" }
1,448,052,660
[]
https://api.github.com/repos/huggingface/datasets/issues/5235
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
CONTRIBUTOR
Otherwise `click` fails on Windows: ``` Traceback (most recent call last): File "C:\hostedtoolcache\windows\Python\3.7.9\x64\lib\runpy.py", line 193, in _run_module_as_main "__main__", mod_spec) File "C:\hostedtoolcache\windows\Python\3.7.9\x64\lib\runpy.py", line 85, in _run_code exec(code, run_globals) File "C:\hostedtoolcache\windows\Python\3.7.9\x64\lib\site-packages\spacy\__main__.py", line 4, in <module> setup_cli() File "C:\hostedtoolcache\windows\Python\3.7.9\x64\lib\site-packages\spacy\cli\_util.py", line 71, in setup_cli command(prog_name=COMMAND) File "C:\hostedtoolcache\windows\Python\3.7.9\x64\lib\site-packages\click\core.py", line 829, in __call__ return self.main(*args, **kwargs) File "C:\hostedtoolcache\windows\Python\3.7.9\x64\lib\site-packages\typer\core.py", line 785, in main **extra, File "C:\hostedtoolcache\windows\Python\3.7.9\x64\lib\site-packages\typer\core.py", line 190, in _main args = click.utils._expand_args(args) AttributeError: module 'click.utils' has no attribute '_expand_args' ``` See https://github.com/tiangolo/typer/issues/427
2022-11-14T15:43:01Z
https://github.com/huggingface/datasets/pull/5235
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5235/reactions" }
true
https://api.github.com/repos/huggingface/datasets/issues/5234/comments
https://api.github.com/repos/huggingface/datasets/issues/5234/timeline
2022-12-07T23:46:34Z
null
null
PR_kwDODunzps5C1diq
closed
[]
false
5,234
{ "avatar_url": "https://avatars.githubusercontent.com/u/30353?v=4", "events_url": "https://api.github.com/users/vigsterkr/events{/privacy}", "followers_url": "https://api.github.com/users/vigsterkr/followers", "following_url": "https://api.github.com/users/vigsterkr/following{/other_user}", "gists_url": "https://api.github.com/users/vigsterkr/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/vigsterkr", "id": 30353, "login": "vigsterkr", "node_id": "MDQ6VXNlcjMwMzUz", "organizations_url": "https://api.github.com/users/vigsterkr/orgs", "received_events_url": "https://api.github.com/users/vigsterkr/received_events", "repos_url": "https://api.github.com/users/vigsterkr/repos", "site_admin": false, "starred_url": "https://api.github.com/users/vigsterkr/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/vigsterkr/subscriptions", "type": "User", "url": "https://api.github.com/users/vigsterkr" }
fix: dataset path should be absolute
https://api.github.com/repos/huggingface/datasets/issues/5234/events
null
https://api.github.com/repos/huggingface/datasets/issues/5234/labels{/name}
2022-11-14T12:47:40Z
null
false
null
{ "diff_url": "https://github.com/huggingface/datasets/pull/5234.diff", "html_url": "https://github.com/huggingface/datasets/pull/5234", "merged_at": "2022-12-07T23:46:34Z", "patch_url": "https://github.com/huggingface/datasets/pull/5234.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/5234" }
1,447,999,062
[]
https://api.github.com/repos/huggingface/datasets/issues/5234
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
CONTRIBUTOR
cache_file_name depends on dataset's path. A simple way where this could cause a problem: ``` import os import datasets def add_prefix(example): example["text"] = "Review: " + example["text"] return example ds = datasets.load_from_disk("a/relative/path") os.chdir("/tmp") ds_1 = ds.map(add_prefix) ``` while it may feel that the `chdir` is quite constructed, there are many scenarios when the current working dir can/will change...
2022-12-07T23:49:22Z
https://github.com/huggingface/datasets/pull/5234
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5234/reactions" }
true
https://api.github.com/repos/huggingface/datasets/issues/5233/comments
https://api.github.com/repos/huggingface/datasets/issues/5233/timeline
2022-11-14T14:13:22Z
null
null
PR_kwDODunzps5C1JVh
closed
[]
false
5,233
{ "avatar_url": "https://avatars.githubusercontent.com/u/42851186?v=4", "events_url": "https://api.github.com/users/lhoestq/events{/privacy}", "followers_url": "https://api.github.com/users/lhoestq/followers", "following_url": "https://api.github.com/users/lhoestq/following{/other_user}", "gists_url": "https://api.github.com/users/lhoestq/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/lhoestq", "id": 42851186, "login": "lhoestq", "node_id": "MDQ6VXNlcjQyODUxMTg2", "organizations_url": "https://api.github.com/users/lhoestq/orgs", "received_events_url": "https://api.github.com/users/lhoestq/received_events", "repos_url": "https://api.github.com/users/lhoestq/repos", "site_admin": false, "starred_url": "https://api.github.com/users/lhoestq/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/lhoestq/subscriptions", "type": "User", "url": "https://api.github.com/users/lhoestq" }
Fix shards in IterableDataset.from_generator
https://api.github.com/repos/huggingface/datasets/issues/5233/events
null
https://api.github.com/repos/huggingface/datasets/issues/5233/labels{/name}
2022-11-14T11:42:09Z
null
false
null
{ "diff_url": "https://github.com/huggingface/datasets/pull/5233.diff", "html_url": "https://github.com/huggingface/datasets/pull/5233", "merged_at": "2022-11-14T14:13:22Z", "patch_url": "https://github.com/huggingface/datasets/pull/5233.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/5233" }
1,447,906,868
[]
https://api.github.com/repos/huggingface/datasets/issues/5233
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
MEMBER
Allow to define a sharded iterable dataset
2022-11-14T14:16:03Z
https://github.com/huggingface/datasets/pull/5233
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5233/reactions" }
true
https://api.github.com/repos/huggingface/datasets/issues/5232/comments
https://api.github.com/repos/huggingface/datasets/issues/5232/timeline
2022-11-14T08:07:59Z
null
completed
I_kwDODunzps5WNLKV
closed
[]
null
5,232
{ "avatar_url": "https://avatars.githubusercontent.com/u/10574123?v=4", "events_url": "https://api.github.com/users/vinaykakade/events{/privacy}", "followers_url": "https://api.github.com/users/vinaykakade/followers", "following_url": "https://api.github.com/users/vinaykakade/following{/other_user}", "gists_url": "https://api.github.com/users/vinaykakade/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/vinaykakade", "id": 10574123, "login": "vinaykakade", "node_id": "MDQ6VXNlcjEwNTc0MTIz", "organizations_url": "https://api.github.com/users/vinaykakade/orgs", "received_events_url": "https://api.github.com/users/vinaykakade/received_events", "repos_url": "https://api.github.com/users/vinaykakade/repos", "site_admin": false, "starred_url": "https://api.github.com/users/vinaykakade/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/vinaykakade/subscriptions", "type": "User", "url": "https://api.github.com/users/vinaykakade" }
Incompatible dill versions in datasets 2.6.1
https://api.github.com/repos/huggingface/datasets/issues/5232/events
null
https://api.github.com/repos/huggingface/datasets/issues/5232/labels{/name}
2022-11-12T06:46:23Z
null
false
null
null
1,446,294,165
[]
https://api.github.com/repos/huggingface/datasets/issues/5232
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
NONE
### Describe the bug datasets version 2.6.1 has a dependency on dill<0.3.6. This causes a conflict with dill>=0.3.6 used by multiprocess dependency in datasets 2.6.1 This issue is already fixed in https://github.com/huggingface/datasets/pull/5166/files, but not yet been released. Please release a new version of the datasets library to fix this. ### Steps to reproduce the bug 1. Create requirements.in with only dependency being datasets (or datasets[s3]) 2. Run pip-compile 3. The output is as follows: ``` Could not find a version that matches dill<0.3.6,>=0.3.6 (from datasets[s3]==2.6.1->-r requirements.in (line 1)) Tried: 0.2, 0.2, 0.2.1, 0.2.1, 0.2.2, 0.2.2, 0.2.3, 0.2.3, 0.2.4, 0.2.4, 0.2.5, 0.2.5, 0.2.6, 0.2.7, 0.2.7.1, 0.2.8, 0.2.8.1, 0.2.8.2, 0.2.9, 0.3.0, 0.3.1, 0.3.1.1, 0.3.2, 0.3.3, 0.3.3, 0.3.4, 0.3.4, 0.3.5, 0.3.5, 0.3.5.1, 0.3.5.1, 0.3.6, 0.3.6 Skipped pre-versions: 0.1a1, 0.2a1, 0.2a1, 0.2b1, 0.2b1 There are incompatible versions in the resolved dependencies: dill<0.3.6 (from datasets[s3]==2.6.1->-r requirements.in (line 1)) dill>=0.3.6 (from multiprocess==0.70.14->datasets[s3]==2.6.1->-r requirements.in (line 1)) ``` ### Expected behavior pip-compile produces requirements.txt without any conflicts ### Environment info datasets version 2.6.1
2022-11-14T08:24:43Z
https://github.com/huggingface/datasets/issues/5232
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5232/reactions" }
false
https://api.github.com/repos/huggingface/datasets/issues/5231/comments
https://api.github.com/repos/huggingface/datasets/issues/5231/timeline
2022-11-11T18:59:50Z
null
completed
I_kwDODunzps5WLm2D
closed
[]
null
5,231
{ "avatar_url": "https://avatars.githubusercontent.com/u/99206017?v=4", "events_url": "https://api.github.com/users/plamb-viso/events{/privacy}", "followers_url": "https://api.github.com/users/plamb-viso/followers", "following_url": "https://api.github.com/users/plamb-viso/following{/other_user}", "gists_url": "https://api.github.com/users/plamb-viso/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/plamb-viso", "id": 99206017, "login": "plamb-viso", "node_id": "U_kgDOBenDgQ", "organizations_url": "https://api.github.com/users/plamb-viso/orgs", "received_events_url": "https://api.github.com/users/plamb-viso/received_events", "repos_url": "https://api.github.com/users/plamb-viso/repos", "site_admin": false, "starred_url": "https://api.github.com/users/plamb-viso/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/plamb-viso/subscriptions", "type": "User", "url": "https://api.github.com/users/plamb-viso" }
Using `set_format(type='torch', columns=columns)` makes Array2D/3D columns stop formatting correctly
https://api.github.com/repos/huggingface/datasets/issues/5231/events
null
https://api.github.com/repos/huggingface/datasets/issues/5231/labels{/name}
2022-11-11T18:54:36Z
null
false
null
null
1,445,883,267
[]
https://api.github.com/repos/huggingface/datasets/issues/5231
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
NONE
I have a Dataset with two Features defined as follows: ``` 'image': Array3D(dtype="int64", shape=(3, 224, 224)), 'bbox': Array2D(dtype="int64", shape=(512, 4)), ``` On said dataset, if I `dataset.set_format(type='torch')` and then use the dataset in a dataloader, these columns are correctly cast to Tensors of (batch_size, 3, 224, 244) for example. However, if I `dataset.set_format(type='torch', columns=['image', 'bbox'])` these columns are cast to Lists of tensors and miss the batch size completely (the 3 dimension is the list length). I'm currently digging through datasets formatting code to try and find out why, but was curious if someone knew an immediate solution for this.
2022-11-11T20:42:29Z
https://github.com/huggingface/datasets/issues/5231
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5231/reactions" }
false
https://api.github.com/repos/huggingface/datasets/issues/5230/comments
https://api.github.com/repos/huggingface/datasets/issues/5230/timeline
2022-11-14T15:27:37Z
null
completed
I_kwDODunzps5WKLH8
closed
[ { "avatar_url": "https://avatars.githubusercontent.com/u/47462742?v=4", "events_url": "https://api.github.com/users/mariosasko/events{/privacy}", "followers_url": "https://api.github.com/users/mariosasko/followers", "following_url": "https://api.github.com/users/mariosasko/following{/other_user}", "gists_url": "https://api.github.com/users/mariosasko/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/mariosasko", "id": 47462742, "login": "mariosasko", "node_id": "MDQ6VXNlcjQ3NDYyNzQy", "organizations_url": "https://api.github.com/users/mariosasko/orgs", "received_events_url": "https://api.github.com/users/mariosasko/received_events", "repos_url": "https://api.github.com/users/mariosasko/repos", "site_admin": false, "starred_url": "https://api.github.com/users/mariosasko/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/mariosasko/subscriptions", "type": "User", "url": "https://api.github.com/users/mariosasko" } ]
null
5,230
{ "avatar_url": "https://avatars.githubusercontent.com/u/76044840?v=4", "events_url": "https://api.github.com/users/yonikremer/events{/privacy}", "followers_url": "https://api.github.com/users/yonikremer/followers", "following_url": "https://api.github.com/users/yonikremer/following{/other_user}", "gists_url": "https://api.github.com/users/yonikremer/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/yonikremer", "id": 76044840, "login": "yonikremer", "node_id": "MDQ6VXNlcjc2MDQ0ODQw", "organizations_url": "https://api.github.com/users/yonikremer/orgs", "received_events_url": "https://api.github.com/users/yonikremer/received_events", "repos_url": "https://api.github.com/users/yonikremer/repos", "site_admin": false, "starred_url": "https://api.github.com/users/yonikremer/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/yonikremer/subscriptions", "type": "User", "url": "https://api.github.com/users/yonikremer" }
dataclasses error when importing the library in python 3.11
https://api.github.com/repos/huggingface/datasets/issues/5230/events
null
https://api.github.com/repos/huggingface/datasets/issues/5230/labels{/name}
2022-11-11T13:53:49Z
null
false
{ "avatar_url": "https://avatars.githubusercontent.com/u/47462742?v=4", "events_url": "https://api.github.com/users/mariosasko/events{/privacy}", "followers_url": "https://api.github.com/users/mariosasko/followers", "following_url": "https://api.github.com/users/mariosasko/following{/other_user}", "gists_url": "https://api.github.com/users/mariosasko/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/mariosasko", "id": 47462742, "login": "mariosasko", "node_id": "MDQ6VXNlcjQ3NDYyNzQy", "organizations_url": "https://api.github.com/users/mariosasko/orgs", "received_events_url": "https://api.github.com/users/mariosasko/received_events", "repos_url": "https://api.github.com/users/mariosasko/repos", "site_admin": false, "starred_url": "https://api.github.com/users/mariosasko/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/mariosasko/subscriptions", "type": "User", "url": "https://api.github.com/users/mariosasko" }
null
1,445,507,580
[]
https://api.github.com/repos/huggingface/datasets/issues/5230
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
NONE
### Describe the bug When I import datasets using python 3.11 the dataclasses standard library raises the following error: `ValueError: mutable default <class 'datasets.utils.version.Version'> for field version is not allowed: use default_factory` When I tried to import the library using the following jupyter notebook: ``` %%bash # create python 3.11 conda env conda create --yes --quiet -n myenv -c conda-forge python=3.11 # activate is source activate myenv # install pyarrow /opt/conda/envs/myenv/bin/python -m pip install --quiet --extra-index-url https://pypi.fury.io/arrow-nightlies/ \ --prefer-binary --pre pyarrow # install datasets /opt/conda/envs/myenv/bin/python -m pip install --quiet datasets ``` ``` # create a python file that only imports datasets with open("import_datasets.py", 'w') as f: f.write("import datasets") # run it with the env !/opt/conda/envs/myenv/bin/python import_datasets.py ``` I get the following error: ``` Traceback (most recent call last): File "/kaggle/working/import_datasets.py", line 1, in <module> import datasets File "/opt/conda/envs/myenv/lib/python3.11/site-packages/datasets/__init__.py", line 45, in <module> from .builder import ArrowBasedBuilder, BeamBasedBuilder, BuilderConfig, DatasetBuilder, GeneratorBasedBuilder File "/opt/conda/envs/myenv/lib/python3.11/site-packages/datasets/builder.py", line 91, in <module> @dataclass ^^^^^^^^^ File "/opt/conda/envs/myenv/lib/python3.11/dataclasses.py", line 1221, in dataclass return wrap(cls) ^^^^^^^^^ File "/opt/conda/envs/myenv/lib/python3.11/dataclasses.py", line 1211, in wrap return _process_class(cls, init, repr, eq, order, unsafe_hash, ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/opt/conda/envs/myenv/lib/python3.11/dataclasses.py", line 959, in _process_class cls_fields.append(_get_field(cls, name, type, kw_only)) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/opt/conda/envs/myenv/lib/python3.11/dataclasses.py", line 816, in _get_field raise ValueError(f'mutable default {type(f.default)} for field ' ValueError: mutable default <class 'datasets.utils.version.Version'> for field version is not allowed: use default_factory ``` This is probably due to one of the following changes in the [dataclasses standard library](https://docs.python.org/3/library/dataclasses.html) in version 3.11: 1. Changed in version 3.11: Instead of looking for and disallowing objects of type list, dict, or set, unhashable objects are now not allowed as default values. Unhashability is used to approximate mutability. 2. fields may optionally specify a default value, using normal Python syntax: ``` @dataclass class C: a: int # 'a' has no default value b: int = 0 # assign a default value for 'b' In this example, both a and b will be included in the added __init__() method, which will be defined as: def __init__(self, a: int, b: int = 0): ``` 3. Changed in version 3.11: If a field name is already included in the __slots__ of a base class, it will not be included in the generated __slots__ to prevent [overriding them](https://docs.python.org/3/reference/datamodel.html#datamodel-note-slots). Therefore, do not use __slots__ to retrieve the field names of a dataclass. Use [fields()](https://docs.python.org/3/library/dataclasses.html#dataclasses.fields) instead. To be able to determine inherited slots, base class __slots__ may be any iterable, but not an iterator. 4. weakref_slot: If true (the default is False), add a slot named “__weakref__”, which is required to make an instance weakref-able. It is an error to specify weakref_slot=True without also specifying slots=True. [TypeError](https://docs.python.org/3/library/exceptions.html#TypeError) will be raised if a field without a default value follows a field with a default value. This is true whether this occurs in a single class, or as a result of class inheritance. ### Steps to reproduce the bug Steps to reproduce the behavior: 1. go to [the notebook in kaggle](https://www.kaggle.com/yonikremer/repreducing-issue) 2. rub both of the cells ### Expected behavior I'm expecting no issues. This error should not occur. ### Environment info kaggle kernels, with default settings: pin to original environment, no accelerator.
2023-05-25T04:37:05Z
https://github.com/huggingface/datasets/issues/5230
{ "+1": 3, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 3, "url": "https://api.github.com/repos/huggingface/datasets/issues/5230/reactions" }
false
https://api.github.com/repos/huggingface/datasets/issues/5229/comments
https://api.github.com/repos/huggingface/datasets/issues/5229/timeline
2023-01-13T16:00:53Z
null
completed
I_kwDODunzps5WIswE
closed
[]
null
5,229
{ "avatar_url": "https://avatars.githubusercontent.com/u/7878215?v=4", "events_url": "https://api.github.com/users/phipsgabler/events{/privacy}", "followers_url": "https://api.github.com/users/phipsgabler/followers", "following_url": "https://api.github.com/users/phipsgabler/following{/other_user}", "gists_url": "https://api.github.com/users/phipsgabler/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/phipsgabler", "id": 7878215, "login": "phipsgabler", "node_id": "MDQ6VXNlcjc4NzgyMTU=", "organizations_url": "https://api.github.com/users/phipsgabler/orgs", "received_events_url": "https://api.github.com/users/phipsgabler/received_events", "repos_url": "https://api.github.com/users/phipsgabler/repos", "site_admin": false, "starred_url": "https://api.github.com/users/phipsgabler/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/phipsgabler/subscriptions", "type": "User", "url": "https://api.github.com/users/phipsgabler" }
Type error when calling `map` over dataset containing 0-d tensors
https://api.github.com/repos/huggingface/datasets/issues/5229/events
null
https://api.github.com/repos/huggingface/datasets/issues/5229/labels{/name}
2022-11-11T08:27:28Z
null
false
null
null
1,445,121,028
[]
https://api.github.com/repos/huggingface/datasets/issues/5229
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
NONE
### Describe the bug 0-dimensional tensors in a dataset lead to `TypeError: iteration over a 0-d array` when calling `map`. It is easy to generate such tensors by using `.with_format("...")` on the whole dataset. ### Steps to reproduce the bug ``` ds = datasets.Dataset.from_list([{"a": 1}, {"a": 1}]).with_format("torch") ds.map(None) ``` ### Expected behavior Getting back `ds` without errors. ### Environment info Python 3.10.8 datasets 2.6. torch 1.13.0
2023-01-13T16:00:53Z
https://github.com/huggingface/datasets/issues/5229
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5229/reactions" }
false
https://api.github.com/repos/huggingface/datasets/issues/5228/comments
https://api.github.com/repos/huggingface/datasets/issues/5228/timeline
null
null
null
I_kwDODunzps5WHVXh
open
[]
null
5,228
{ "avatar_url": "https://avatars.githubusercontent.com/u/43149077?v=4", "events_url": "https://api.github.com/users/dakinggg/events{/privacy}", "followers_url": "https://api.github.com/users/dakinggg/followers", "following_url": "https://api.github.com/users/dakinggg/following{/other_user}", "gists_url": "https://api.github.com/users/dakinggg/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/dakinggg", "id": 43149077, "login": "dakinggg", "node_id": "MDQ6VXNlcjQzMTQ5MDc3", "organizations_url": "https://api.github.com/users/dakinggg/orgs", "received_events_url": "https://api.github.com/users/dakinggg/received_events", "repos_url": "https://api.github.com/users/dakinggg/repos", "site_admin": false, "starred_url": "https://api.github.com/users/dakinggg/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/dakinggg/subscriptions", "type": "User", "url": "https://api.github.com/users/dakinggg" }
Loading a dataset from the hub fails if you happen to have a folder of the same name
https://api.github.com/repos/huggingface/datasets/issues/5228/events
null
https://api.github.com/repos/huggingface/datasets/issues/5228/labels{/name}
2022-11-11T00:51:54Z
null
false
null
null
1,444,763,105
[]
https://api.github.com/repos/huggingface/datasets/issues/5228
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
NONE
### Describe the bug I'm not 100% sure this should be considered a bug, but it was certainly annoying to figure out the cause of. And perhaps I am just missing a specific argument needed to avoid this conflict. Basically I had a situation where multiple workers were downloading different parts of the glue dataset and then training on them. Additionally, they were writing their checkpoints to a folder called `glue`. This meant that once one worker had created the `glue` folder to write checkpoints to, the next worker to try to load a glue dataset would fail as shown in the minimal repro below. I'm not sure what the solution would be since I'm not super familiar with the `datasets` code, but I would expect `load_dataset` to not crash just because i have a local folder with the same name as a dataset from the hub. ### Steps to reproduce the bug ``` In [1]: import datasets In [2]: rte = datasets.load_dataset('glue', 'rte') Downloading and preparing dataset glue/rte to /Users/danielking/.cache/huggingface/datasets/glue/rte/1.0.0/dacbe3125aa31d7f70367a07a8a9e72a5a0bfeb5fc42e75c9db75b96da6053ad... Downloading data: 100%|█████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 697k/697k [00:00<00:00, 6.08MB/s] Dataset glue downloaded and prepared to /Users/danielking/.cache/huggingface/datasets/glue/rte/1.0.0/dacbe3125aa31d7f70367a07a8a9e72a5a0bfeb5fc42e75c9db75b96da6053ad. Subsequent calls will reuse this data. 100%|███████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 3/3 [00:00<00:00, 773.81it/s] In [3]: import os In [4]: os.mkdir('glue') In [5]: rte = datasets.load_dataset('glue', 'rte') --------------------------------------------------------------------------- EmptyDatasetError Traceback (most recent call last) <ipython-input-5-0d6b9ad8bbd0> in <cell line: 1>() ----> 1 rte = datasets.load_dataset('glue', 'rte') ~/miniconda3/envs/composer/lib/python3.9/site-packages/datasets/load.py in load_dataset(path, name, data_dir, data_files, split, cache_dir, features, download_config, download_mode, ignore_verifications, keep_in_memory, save_infos, revision, use_auth_token, task, streaming, **config_kwargs) 1717 1718 # Create a dataset builder -> 1719 builder_instance = load_dataset_builder( 1720 path=path, 1721 name=name, ~/miniconda3/envs/composer/lib/python3.9/site-packages/datasets/load.py in load_dataset_builder(path, name, data_dir, data_files, cache_dir, features, download_config, download_mode, revision, use_auth_token, **config_kwargs) 1495 download_config = download_config.copy() if download_config else DownloadConfig() 1496 download_config.use_auth_token = use_auth_token -> 1497 dataset_module = dataset_module_factory( 1498 path, 1499 revision=revision, ~/miniconda3/envs/composer/lib/python3.9/site-packages/datasets/load.py in dataset_module_factory(path, revision, download_config, download_mode, dynamic_modules_path, data_dir, data_files, **download_kwargs) 1152 ).get_module() 1153 elif os.path.isdir(path): -> 1154 return LocalDatasetModuleFactoryWithoutScript( 1155 path, data_dir=data_dir, data_files=data_files, download_mode=download_mode 1156 ).get_module() ~/miniconda3/envs/composer/lib/python3.9/site-packages/datasets/load.py in get_module(self) 624 base_path = os.path.join(self.path, self.data_dir) if self.data_dir else self.path 625 patterns = ( --> 626 sanitize_patterns(self.data_files) if self.data_files is not None else get_data_patterns_locally(base_path) 627 ) 628 data_files = DataFilesDict.from_local_or_remote( ~/miniconda3/envs/composer/lib/python3.9/site-packages/datasets/data_files.py in get_data_patterns_locally(base_path) 458 return _get_data_files_patterns(resolver) 459 except FileNotFoundError: --> 460 raise EmptyDatasetError(f"The directory at {base_path} doesn't contain any data files") from None 461 462 EmptyDatasetError: The directory at glue doesn't contain any data files ``` ### Expected behavior Dataset is still able to be loaded from the hub even if I have a local folder with the same name. ### Environment info datasets version: 2.6.1
2023-05-03T23:23:04Z
https://github.com/huggingface/datasets/issues/5228
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5228/reactions" }
false
https://api.github.com/repos/huggingface/datasets/issues/5227/comments
https://api.github.com/repos/huggingface/datasets/issues/5227/timeline
2022-11-10T22:05:43Z
null
completed
I_kwDODunzps5WGyc-
closed
[]
null
5,227
{ "avatar_url": "https://avatars.githubusercontent.com/u/102275116?v=4", "events_url": "https://api.github.com/users/ScottM-wizard/events{/privacy}", "followers_url": "https://api.github.com/users/ScottM-wizard/followers", "following_url": "https://api.github.com/users/ScottM-wizard/following{/other_user}", "gists_url": "https://api.github.com/users/ScottM-wizard/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/ScottM-wizard", "id": 102275116, "login": "ScottM-wizard", "node_id": "U_kgDOBhiYLA", "organizations_url": "https://api.github.com/users/ScottM-wizard/orgs", "received_events_url": "https://api.github.com/users/ScottM-wizard/received_events", "repos_url": "https://api.github.com/users/ScottM-wizard/repos", "site_admin": false, "starred_url": "https://api.github.com/users/ScottM-wizard/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/ScottM-wizard/subscriptions", "type": "User", "url": "https://api.github.com/users/ScottM-wizard" }
datasets.data_files.EmptyDatasetError: The directory at wikisql doesn't contain any data files
https://api.github.com/repos/huggingface/datasets/issues/5227/events
null
https://api.github.com/repos/huggingface/datasets/issues/5227/labels{/name}
2022-11-10T21:57:06Z
null
false
null
null
1,444,620,094
[]
https://api.github.com/repos/huggingface/datasets/issues/5227
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
NONE
### Describe the bug From these lines: from datasets import list_datasets, load_dataset dataset = load_dataset("wikisql","binary") I get error message: datasets.data_files.EmptyDatasetError: The directory at wikisql doesn't contain any data files And yet the 'wikisql' is reported to exist via the list_datasets(). Any help appreciated. ### Steps to reproduce the bug From these lines: from datasets import list_datasets, load_dataset dataset = load_dataset("wikisql","binary") I get error message: datasets.data_files.EmptyDatasetError: The directory at wikisql doesn't contain any data files And yet the 'wikisql' is reported to exist via the list_datasets(). Any help appreciated. ### Expected behavior Dataset should load. This same code used to work. ### Environment info Mac OS
2023-10-07T05:04:41Z
https://github.com/huggingface/datasets/issues/5227
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5227/reactions" }
false
https://api.github.com/repos/huggingface/datasets/issues/5226/comments
https://api.github.com/repos/huggingface/datasets/issues/5226/timeline
2022-11-29T15:10:10Z
null
completed
I_kwDODunzps5WF5F8
closed
[]
null
5,226
{ "avatar_url": "https://avatars.githubusercontent.com/u/43239645?v=4", "events_url": "https://api.github.com/users/bayartsogt-ya/events{/privacy}", "followers_url": "https://api.github.com/users/bayartsogt-ya/followers", "following_url": "https://api.github.com/users/bayartsogt-ya/following{/other_user}", "gists_url": "https://api.github.com/users/bayartsogt-ya/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/bayartsogt-ya", "id": 43239645, "login": "bayartsogt-ya", "node_id": "MDQ6VXNlcjQzMjM5NjQ1", "organizations_url": "https://api.github.com/users/bayartsogt-ya/orgs", "received_events_url": "https://api.github.com/users/bayartsogt-ya/received_events", "repos_url": "https://api.github.com/users/bayartsogt-ya/repos", "site_admin": false, "starred_url": "https://api.github.com/users/bayartsogt-ya/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/bayartsogt-ya/subscriptions", "type": "User", "url": "https://api.github.com/users/bayartsogt-ya" }
Q: Memory release when removing the column?
https://api.github.com/repos/huggingface/datasets/issues/5226/events
null
https://api.github.com/repos/huggingface/datasets/issues/5226/labels{/name}
2022-11-10T18:35:27Z
null
false
null
null
1,444,385,148
[]
https://api.github.com/repos/huggingface/datasets/issues/5226
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
NONE
### Describe the bug How do I release memory when I use methods like `.remove_columns()` or `clear()` in notebooks? ```python from datasets import load_dataset common_voice = load_dataset("mozilla-foundation/common_voice_11_0", "ja", use_auth_token=True) # check memory -> RAM Used (GB): 0.704 / Total (GB) 33.670 common_voice = common_voice.remove_columns(column_names=common_voice.column_names['train']) common_voice.clear() # check memory -> RAM Used (GB): 0.705 / Total (GB) 33.670 ``` I tried `gc.collect()` but did not help ### Steps to reproduce the bug 1. load dataset 2. remove all the columns 3. check memory is reduced or not [link to reproduce](https://www.kaggle.com/code/bayartsogtya/huggingface-dataset-memory-issue/notebook?scriptVersionId=110630567) ### Expected behavior Memory released when I remove the column ### Environment info - `datasets` version: 2.1.0 - Platform: Linux-5.15.65+-x86_64-with-debian-bullseye-sid - Python version: 3.7.12 - PyArrow version: 8.0.0 - Pandas version: 1.3.5
2022-11-29T15:10:10Z
https://github.com/huggingface/datasets/issues/5226
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5226/reactions" }
false
https://api.github.com/repos/huggingface/datasets/issues/5225/comments
https://api.github.com/repos/huggingface/datasets/issues/5225/timeline
null
null
null
I_kwDODunzps5WFlkf
open
[]
null
5,225
{ "avatar_url": "https://avatars.githubusercontent.com/u/32437151?v=4", "events_url": "https://api.github.com/users/nateraw/events{/privacy}", "followers_url": "https://api.github.com/users/nateraw/followers", "following_url": "https://api.github.com/users/nateraw/following{/other_user}", "gists_url": "https://api.github.com/users/nateraw/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/nateraw", "id": 32437151, "login": "nateraw", "node_id": "MDQ6VXNlcjMyNDM3MTUx", "organizations_url": "https://api.github.com/users/nateraw/orgs", "received_events_url": "https://api.github.com/users/nateraw/received_events", "repos_url": "https://api.github.com/users/nateraw/repos", "site_admin": false, "starred_url": "https://api.github.com/users/nateraw/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/nateraw/subscriptions", "type": "User", "url": "https://api.github.com/users/nateraw" }
Add video feature
https://api.github.com/repos/huggingface/datasets/issues/5225/events
null
https://api.github.com/repos/huggingface/datasets/issues/5225/labels{/name}
2022-11-10T17:36:11Z
null
false
null
null
1,444,305,183
[ { "color": "a2eeef", "default": true, "description": "New feature or request", "id": 1935892871, "name": "enhancement", "node_id": "MDU6TGFiZWwxOTM1ODkyODcx", "url": "https://api.github.com/repos/huggingface/datasets/labels/enhancement" }, { "color": "008672", "default": true, "description": "Extra attention is needed", "id": 1935892884, "name": "help wanted", "node_id": "MDU6TGFiZWwxOTM1ODkyODg0", "url": "https://api.github.com/repos/huggingface/datasets/labels/help%20wanted" }, { "color": "bfdadc", "default": false, "description": "Vision datasets", "id": 3608941089, "name": "vision", "node_id": "LA_kwDODunzps7XHBIh", "url": "https://api.github.com/repos/huggingface/datasets/labels/vision" } ]
https://api.github.com/repos/huggingface/datasets/issues/5225
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
CONTRIBUTOR
### Feature request Add a `Video` feature to the library so folks can include videos in their datasets. ### Motivation Being able to load Video data would be quite helpful. However, there are some challenges when it comes to videos: 1. Videos, unlike images, can end up being extremely large files 2. Often times when training video models, you need to do some very specific sampling. Videos might end up needing to be broken down into X number of clips used for training/inference 3. Videos have an additional audio stream, which must be accounted for 4. The feature needs to be able to encode/decode videos (with right video settings) from bytes. ### Your contribution I did work on this a while back in [this (now closed) PR](https://github.com/huggingface/datasets/pull/4532). It used a library I made called [encoded_video](https://github.com/nateraw/encoded-video), which is basically the utils from [pytorchvideo](https://github.com/facebookresearch/pytorchvideo), but without the `torch` dep. It included the ability to read/write from bytes, as we need to do here. We don't want to be using a sketchy library that I made as a dependency in this repo, though. Would love to use this issue as a place to: - brainstorm ideas on how to do this right - list ways/examples to work around it for now CC @sayakpaul @mariosasko @fcakyon
2022-12-02T15:13:15Z
https://github.com/huggingface/datasets/issues/5225
{ "+1": 3, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 3, "url": "https://api.github.com/repos/huggingface/datasets/issues/5225/reactions" }
false
https://api.github.com/repos/huggingface/datasets/issues/5224/comments
https://api.github.com/repos/huggingface/datasets/issues/5224/timeline
2022-11-22T11:24:19Z
null
completed
I_kwDODunzps5WDDYj
closed
[]
null
5,224
{ "avatar_url": "https://avatars.githubusercontent.com/u/45894267?v=4", "events_url": "https://api.github.com/users/uriii3/events{/privacy}", "followers_url": "https://api.github.com/users/uriii3/followers", "following_url": "https://api.github.com/users/uriii3/following{/other_user}", "gists_url": "https://api.github.com/users/uriii3/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/uriii3", "id": 45894267, "login": "uriii3", "node_id": "MDQ6VXNlcjQ1ODk0MjY3", "organizations_url": "https://api.github.com/users/uriii3/orgs", "received_events_url": "https://api.github.com/users/uriii3/received_events", "repos_url": "https://api.github.com/users/uriii3/repos", "site_admin": false, "starred_url": "https://api.github.com/users/uriii3/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/uriii3/subscriptions", "type": "User", "url": "https://api.github.com/users/uriii3" }
Seems to freeze when loading audio dataset with wav files from local folder
https://api.github.com/repos/huggingface/datasets/issues/5224/events
null
https://api.github.com/repos/huggingface/datasets/issues/5224/labels{/name}
2022-11-10T10:29:31Z
null
false
null
null
1,443,640,867
[]
https://api.github.com/repos/huggingface/datasets/issues/5224
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
NONE
### Describe the bug I'm following the instructions in [https://huggingface.co/docs/datasets/audio_load#audiofolder-with-metadata](url) to be able to load a dataset from a local folder. I have everything into a folder, into a train folder and then the audios and csv. When I try to load the dataset and run from terminal, seems to work but then freezes with no apparent reason. The metadata.csv file contains a few columns but the important ones, `file_name` with the filename and `transcription` with the transcription are okay. The audios are `.wav` files, I don't know if that might be the problem (I will proceed to try to change them all to `.mp3` and try again). ### Steps to reproduce the bug The code I'm using: ```python from datasets import load_dataset dataset = load_dataset("audiofolder", data_dir="../archive/Dataset") dataset[0]["audio"] ``` The output I obtain: ``` Resolving data files: 100%|█████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 439/439 [00:00<00:00, 311135.43it/s] Using custom data configuration default-38d4546ffd010f3e Downloading and preparing dataset audiofolder/default to /Users/mine/.cache/huggingface/datasets/audiofolder/default-38d4546ffd010f3e/0.0.0/6cbdd16f8688354c63b4e2a36e1585d05de285023ee6443ffd71c4182055c0fc... Resolving data files: 100%|█████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 439/439 [00:00<00:00, 166467.72it/s] Using custom data configuration default-38d4546ffd010f3e Resolving data files: 100%|█████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 439/439 [00:00<00:00, 187772.74it/s] Using custom data configuration default-38d4546ffd010f3e Resolving data files: 100%|██████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 439/439 [00:00<00:00, 59623.71it/s] Resolving data files: 100%|█████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 439/439 [00:00<00:00, 138090.55it/s] Resolving data files: 100%|█████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 439/439 [00:00<00:00, 106065.64it/s] Resolving data files: 100%|██████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 439/439 [00:00<00:00, 56036.38it/s] Resolving data files: 100%|██████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 439/439 [00:00<00:00, 74004.24it/s] Resolving data files: 100%|█████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 439/439 [00:00<00:00, 162343.45it/s] Resolving data files: 100%|█████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 439/439 [00:00<00:00, 101881.23it/s] Using custom data configuration default-38d4546ffd010f3e Resolving data files: 100%|██████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 439/439 [00:00<00:00, 60145.67it/s] Resolving data files: 100%|██████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 439/439 [00:00<00:00, 80890.02it/s] Resolving data files: 100%|██████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 439/439 [00:00<00:00, 54036.67it/s] Resolving data files: 100%|██████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 439/439 [00:00<00:00, 95851.09it/s] Resolving data files: 100%|█████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 439/439 [00:00<00:00, 155897.00it/s] Resolving data files: 100%|█████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 439/439 [00:00<00:00, 137656.96it/s] Resolving data files: 100%|█████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 439/439 [00:00<00:00, 131230.81it/s] Using custom data configuration default-38d4546ffd010f3e Using custom data configuration default-38d4546ffd010f3e Using custom data configuration default-38d4546ffd010f3e Using custom data configuration default-38d4546ffd010f3e Using custom data configuration default-38d4546ffd010f3e Using custom data configuration default-38d4546ffd010f3e Using custom data configuration default-38d4546ffd010f3e Using custom data configuration default-38d4546ffd010f3e Using custom data configuration default-38d4546ffd010f3e Using custom data configuration default-38d4546ffd010f3e Using custom data configuration default-38d4546ffd010f3e Using custom data configuration default-38d4546ffd010f3e Using custom data configuration default-38d4546ffd010f3e ``` And then here it just freezes and nothing more happens. ### Expected behavior Load the dataset. ### Environment info Datasets version: datasets 2.6.1 pypi_0 pypi
2023-04-25T09:54:05Z
https://github.com/huggingface/datasets/issues/5224
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5224/reactions" }
false
https://api.github.com/repos/huggingface/datasets/issues/5223/comments
https://api.github.com/repos/huggingface/datasets/issues/5223/timeline
2022-11-15T17:40:21Z
null
null
PR_kwDODunzps5CjT9Z
closed
[]
false
5,223
{ "avatar_url": "https://avatars.githubusercontent.com/u/59462357?v=4", "events_url": "https://api.github.com/users/stevhliu/events{/privacy}", "followers_url": "https://api.github.com/users/stevhliu/followers", "following_url": "https://api.github.com/users/stevhliu/following{/other_user}", "gists_url": "https://api.github.com/users/stevhliu/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/stevhliu", "id": 59462357, "login": "stevhliu", "node_id": "MDQ6VXNlcjU5NDYyMzU3", "organizations_url": "https://api.github.com/users/stevhliu/orgs", "received_events_url": "https://api.github.com/users/stevhliu/received_events", "repos_url": "https://api.github.com/users/stevhliu/repos", "site_admin": false, "starred_url": "https://api.github.com/users/stevhliu/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/stevhliu/subscriptions", "type": "User", "url": "https://api.github.com/users/stevhliu" }
Add SQL guide
https://api.github.com/repos/huggingface/datasets/issues/5223/events
null
https://api.github.com/repos/huggingface/datasets/issues/5223/labels{/name}
2022-11-09T19:10:27Z
null
false
null
{ "diff_url": "https://github.com/huggingface/datasets/pull/5223.diff", "html_url": "https://github.com/huggingface/datasets/pull/5223", "merged_at": "2022-11-15T17:40:21Z", "patch_url": "https://github.com/huggingface/datasets/pull/5223.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/5223" }
1,442,610,658
[]
https://api.github.com/repos/huggingface/datasets/issues/5223
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
MEMBER
This PR adapts @nateraw's awesome SQL notebook as a guide for the docs!
2022-11-15T17:40:25Z
https://github.com/huggingface/datasets/pull/5223
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5223/reactions" }
true
https://api.github.com/repos/huggingface/datasets/issues/5222/comments
https://api.github.com/repos/huggingface/datasets/issues/5222/timeline
2022-11-09T18:06:57Z
null
completed
I_kwDODunzps5V-Xfb
closed
[]
null
5,222
{ "avatar_url": "https://avatars.githubusercontent.com/u/10626398?v=4", "events_url": "https://api.github.com/users/ProGamerGov/events{/privacy}", "followers_url": "https://api.github.com/users/ProGamerGov/followers", "following_url": "https://api.github.com/users/ProGamerGov/following{/other_user}", "gists_url": "https://api.github.com/users/ProGamerGov/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/ProGamerGov", "id": 10626398, "login": "ProGamerGov", "node_id": "MDQ6VXNlcjEwNjI2Mzk4", "organizations_url": "https://api.github.com/users/ProGamerGov/orgs", "received_events_url": "https://api.github.com/users/ProGamerGov/received_events", "repos_url": "https://api.github.com/users/ProGamerGov/repos", "site_admin": false, "starred_url": "https://api.github.com/users/ProGamerGov/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/ProGamerGov/subscriptions", "type": "User", "url": "https://api.github.com/users/ProGamerGov" }
HuggingFace website is incorrectly reporting that my datasets are pickled
https://api.github.com/repos/huggingface/datasets/issues/5222/events
null
https://api.github.com/repos/huggingface/datasets/issues/5222/labels{/name}
2022-11-09T16:41:16Z
null
false
null
null
1,442,412,507
[]
https://api.github.com/repos/huggingface/datasets/issues/5222
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
NONE
### Describe the bug HuggingFace is incorrectly reporting that my datasets are pickled. They are not picked, they are simple ZIP files containing PNG images. Hopefully this is the right location to report this bug. ### Steps to reproduce the bug Inspect my dataset respository here: https://huggingface.co/datasets/ProGamerGov/StableDiffusion-v1-5-Regularization-Images ### Expected behavior They should not be reported as being pickled. ### Environment info N/A
2022-11-09T18:10:46Z
https://github.com/huggingface/datasets/issues/5222
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5222/reactions" }
false
https://api.github.com/repos/huggingface/datasets/issues/5221/comments
https://api.github.com/repos/huggingface/datasets/issues/5221/timeline
2022-11-10T18:11:11Z
null
completed
I_kwDODunzps5V9-Pm
closed
[]
null
5,221
{ "avatar_url": "https://avatars.githubusercontent.com/u/43239645?v=4", "events_url": "https://api.github.com/users/bayartsogt-ya/events{/privacy}", "followers_url": "https://api.github.com/users/bayartsogt-ya/followers", "following_url": "https://api.github.com/users/bayartsogt-ya/following{/other_user}", "gists_url": "https://api.github.com/users/bayartsogt-ya/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/bayartsogt-ya", "id": 43239645, "login": "bayartsogt-ya", "node_id": "MDQ6VXNlcjQzMjM5NjQ1", "organizations_url": "https://api.github.com/users/bayartsogt-ya/orgs", "received_events_url": "https://api.github.com/users/bayartsogt-ya/received_events", "repos_url": "https://api.github.com/users/bayartsogt-ya/repos", "site_admin": false, "starred_url": "https://api.github.com/users/bayartsogt-ya/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/bayartsogt-ya/subscriptions", "type": "User", "url": "https://api.github.com/users/bayartsogt-ya" }
Cannot push
https://api.github.com/repos/huggingface/datasets/issues/5221/events
null
https://api.github.com/repos/huggingface/datasets/issues/5221/labels{/name}
2022-11-09T15:32:05Z
null
false
null
null
1,442,309,094
[]
https://api.github.com/repos/huggingface/datasets/issues/5221
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
NONE
### Describe the bug I am facing the issue when I try to push the tar.gz file around 11G to HUB. ``` (venv) ╭─laptop@laptop ~/PersonalProjects/data/ulaanbal_v0 ‹main●› ╰─$ du -sh * 4.0K README.md 13G data 516K test.jsonl 18M train.jsonl 4.0K ulaanbal_v0.py 11G ulaanbal_v0.tar.gz 452K validation.jsonl (venv) ╭─laptop@laptop~/PersonalProjects/data/ulaanbal_v0 ‹main●› ╰─$ git add ulaanbal_v0.tar.gz && git commit -m 'large version' (venv) ╭─laptop@laptop ~/PersonalProjects/data/ulaanbal_v0 ‹main●› ╰─$ git push EOFoading LFS objects: 0% (0/1), 0 B | 0 B/s Uploading LFS objects: 0% (0/1), 0 B | 0 B/s, done. error: failed to push some refs to 'https://huggingface.co/datasets/bayartsogt/ulaanbal_v0' ``` I have already tried pushing a small version of this and it was working fine. So my guess it is probably because of the big file. Following I run before the commit: ``` ╰─$ git lfs install ╰─$ huggingface-cli lfs-enable-largefiles . ``` ### Steps to reproduce the bug Create a private dataset on huggingface and push 12G tar.gz file ### Expected behavior To be pushed with no issue ### Environment info - `datasets` version: 2.6.1 - Platform: Darwin-21.6.0-x86_64-i386-64bit - Python version: 3.7.11 - PyArrow version: 10.0.0 - Pandas version: 1.3.5
2022-11-10T18:11:21Z
https://github.com/huggingface/datasets/issues/5221
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5221/reactions" }
false
https://api.github.com/repos/huggingface/datasets/issues/5220/comments
https://api.github.com/repos/huggingface/datasets/issues/5220/timeline
2022-11-10T16:12:26Z
null
completed
I_kwDODunzps5V7g15
closed
[]
null
5,220
{ "avatar_url": "https://avatars.githubusercontent.com/u/48946947?v=4", "events_url": "https://api.github.com/users/sanderland/events{/privacy}", "followers_url": "https://api.github.com/users/sanderland/followers", "following_url": "https://api.github.com/users/sanderland/following{/other_user}", "gists_url": "https://api.github.com/users/sanderland/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/sanderland", "id": 48946947, "login": "sanderland", "node_id": "MDQ6VXNlcjQ4OTQ2OTQ3", "organizations_url": "https://api.github.com/users/sanderland/orgs", "received_events_url": "https://api.github.com/users/sanderland/received_events", "repos_url": "https://api.github.com/users/sanderland/repos", "site_admin": false, "starred_url": "https://api.github.com/users/sanderland/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/sanderland/subscriptions", "type": "User", "url": "https://api.github.com/users/sanderland" }
Implicit type conversion of lists in to_pandas
https://api.github.com/repos/huggingface/datasets/issues/5220/events
null
https://api.github.com/repos/huggingface/datasets/issues/5220/labels{/name}
2022-11-09T08:40:18Z
null
false
null
null
1,441,664,377
[]
https://api.github.com/repos/huggingface/datasets/issues/5220
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
CONTRIBUTOR
### Describe the bug ``` ds = Dataset.from_list([{'a':[1,2,3]}]) ds.to_pandas().a.values[0] ``` Results in `array([1, 2, 3])` -- a rather unexpected conversion of types which made downstream tools expecting lists not happy. ### Steps to reproduce the bug See snippet ### Expected behavior Keep the original type ### Environment info datasets 2.6.1 python 3.8.10
2022-11-10T16:12:26Z
https://github.com/huggingface/datasets/issues/5220
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5220/reactions" }
false
https://api.github.com/repos/huggingface/datasets/issues/5219/comments
https://api.github.com/repos/huggingface/datasets/issues/5219/timeline
null
null
null
I_kwDODunzps5V59Hm
open
[]
null
5,219
{ "avatar_url": "https://avatars.githubusercontent.com/u/23002137?v=4", "events_url": "https://api.github.com/users/reichenbch/events{/privacy}", "followers_url": "https://api.github.com/users/reichenbch/followers", "following_url": "https://api.github.com/users/reichenbch/following{/other_user}", "gists_url": "https://api.github.com/users/reichenbch/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/reichenbch", "id": 23002137, "login": "reichenbch", "node_id": "MDQ6VXNlcjIzMDAyMTM3", "organizations_url": "https://api.github.com/users/reichenbch/orgs", "received_events_url": "https://api.github.com/users/reichenbch/received_events", "repos_url": "https://api.github.com/users/reichenbch/repos", "site_admin": false, "starred_url": "https://api.github.com/users/reichenbch/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/reichenbch/subscriptions", "type": "User", "url": "https://api.github.com/users/reichenbch" }
Delta Tables usage using Datasets Library
https://api.github.com/repos/huggingface/datasets/issues/5219/events
null
https://api.github.com/repos/huggingface/datasets/issues/5219/labels{/name}
2022-11-09T02:43:56Z
null
false
null
null
1,441,255,910
[ { "color": "a2eeef", "default": true, "description": "New feature or request", "id": 1935892871, "name": "enhancement", "node_id": "MDU6TGFiZWwxOTM1ODkyODcx", "url": "https://api.github.com/repos/huggingface/datasets/labels/enhancement" } ]
https://api.github.com/repos/huggingface/datasets/issues/5219
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
NONE
### Feature request Adding compatibility of Datasets library with Delta Format. Elevating the utilities of Datasets library from Machine Learning Scope to Data Engineering Scope as well. ### Motivation We know datasets library can absorb csv, json, parquet, etc. file formats but it would be great if Datasets library could work with Delta Tables (with delta format) as it has different features such as time travelling, layout optimization, query performance, aids in Data Engineering. This will help and enhance Datasets library from Machine Learning utility to Data Engineering utilities and expand horizons thereafter. I am totally using Datasets library in all my usecases and as my role expands so does the work, compatibility with Datasets library is something I don't want to lose. ### Your contribution Would love to work on this feature, even if this has to picked up from scratch, including design paradigms and patterns. I have basic idea about Delta Live Tables, would brush it easily for this feature.
2023-03-02T19:29:12Z
https://github.com/huggingface/datasets/issues/5219
{ "+1": 1, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 1, "url": "https://api.github.com/repos/huggingface/datasets/issues/5219/reactions" }
false
https://api.github.com/repos/huggingface/datasets/issues/5218/comments
https://api.github.com/repos/huggingface/datasets/issues/5218/timeline
2022-11-09T02:42:36Z
null
completed
I_kwDODunzps5V58sy
closed
[]
null
5,218
{ "avatar_url": "https://avatars.githubusercontent.com/u/103188035?v=4", "events_url": "https://api.github.com/users/rcv-koo/events{/privacy}", "followers_url": "https://api.github.com/users/rcv-koo/followers", "following_url": "https://api.github.com/users/rcv-koo/following{/other_user}", "gists_url": "https://api.github.com/users/rcv-koo/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/rcv-koo", "id": 103188035, "login": "rcv-koo", "node_id": "U_kgDOBiaGQw", "organizations_url": "https://api.github.com/users/rcv-koo/orgs", "received_events_url": "https://api.github.com/users/rcv-koo/received_events", "repos_url": "https://api.github.com/users/rcv-koo/repos", "site_admin": false, "starred_url": "https://api.github.com/users/rcv-koo/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/rcv-koo/subscriptions", "type": "User", "url": "https://api.github.com/users/rcv-koo" }
Delta Tables usage using Datasets Library
https://api.github.com/repos/huggingface/datasets/issues/5218/events
null
https://api.github.com/repos/huggingface/datasets/issues/5218/labels{/name}
2022-11-09T02:42:18Z
null
false
null
null
1,441,254,194
[ { "color": "a2eeef", "default": true, "description": "New feature or request", "id": 1935892871, "name": "enhancement", "node_id": "MDU6TGFiZWwxOTM1ODkyODcx", "url": "https://api.github.com/repos/huggingface/datasets/labels/enhancement" } ]
https://api.github.com/repos/huggingface/datasets/issues/5218
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
NONE
### Feature request Adding compatibility of Datasets library with Delta Format. Elevating the utilities of Datasets library from Machine Learning Scope to Data Engineering Scope as well. ### Motivation We know datasets library can absorb csv, json, parquet, etc. file formats but it would be great if Datasets library could work with Delta Tables (with delta format) as it has different features such as time travelling, layout optimization, query performance, aids in Data Engineering. This will help and enhance Datasets library from Machine Learning utility to Data Engineering utilities and expand horizons thereafter. I am totally using Datasets library in all my usecases and as my role expands so does the work, compatibility with Datasets library is something I don't want to lose. ### Your contribution Would love to work on this feature, even if this has to picked up from scratch, including design paradigms and patterns. I have basic idea about Delta Live Tables, would brush it easily for this feature.
2022-11-09T02:42:36Z
https://github.com/huggingface/datasets/issues/5218
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5218/reactions" }
false
https://api.github.com/repos/huggingface/datasets/issues/5217/comments
https://api.github.com/repos/huggingface/datasets/issues/5217/timeline
2022-11-10T01:36:09Z
null
null
PR_kwDODunzps5CetXs
closed
[]
false
5,217
{ "avatar_url": "https://avatars.githubusercontent.com/u/22957388?v=4", "events_url": "https://api.github.com/users/sayakpaul/events{/privacy}", "followers_url": "https://api.github.com/users/sayakpaul/followers", "following_url": "https://api.github.com/users/sayakpaul/following{/other_user}", "gists_url": "https://api.github.com/users/sayakpaul/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/sayakpaul", "id": 22957388, "login": "sayakpaul", "node_id": "MDQ6VXNlcjIyOTU3Mzg4", "organizations_url": "https://api.github.com/users/sayakpaul/orgs", "received_events_url": "https://api.github.com/users/sayakpaul/received_events", "repos_url": "https://api.github.com/users/sayakpaul/repos", "site_admin": false, "starred_url": "https://api.github.com/users/sayakpaul/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/sayakpaul/subscriptions", "type": "User", "url": "https://api.github.com/users/sayakpaul" }
Reword E2E training and inference tips in the vision guides
https://api.github.com/repos/huggingface/datasets/issues/5217/events
null
https://api.github.com/repos/huggingface/datasets/issues/5217/labels{/name}
2022-11-09T02:40:01Z
null
false
null
{ "diff_url": "https://github.com/huggingface/datasets/pull/5217.diff", "html_url": "https://github.com/huggingface/datasets/pull/5217", "merged_at": "2022-11-10T01:36:08Z", "patch_url": "https://github.com/huggingface/datasets/pull/5217.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/5217" }
1,441,252,740
[]
https://api.github.com/repos/huggingface/datasets/issues/5217
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
MEMBER
Reference: https://github.com/huggingface/datasets/pull/5188#discussion_r1012148730
2022-11-10T01:38:09Z
https://github.com/huggingface/datasets/pull/5217
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5217/reactions" }
true
https://api.github.com/repos/huggingface/datasets/issues/5216/comments
https://api.github.com/repos/huggingface/datasets/issues/5216/timeline
null
null
null
I_kwDODunzps5V5I4b
open
[]
null
5,216
{ "avatar_url": "https://avatars.githubusercontent.com/u/12739718?v=4", "events_url": "https://api.github.com/users/amobash2/events{/privacy}", "followers_url": "https://api.github.com/users/amobash2/followers", "following_url": "https://api.github.com/users/amobash2/following{/other_user}", "gists_url": "https://api.github.com/users/amobash2/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/amobash2", "id": 12739718, "login": "amobash2", "node_id": "MDQ6VXNlcjEyNzM5NzE4", "organizations_url": "https://api.github.com/users/amobash2/orgs", "received_events_url": "https://api.github.com/users/amobash2/received_events", "repos_url": "https://api.github.com/users/amobash2/repos", "site_admin": false, "starred_url": "https://api.github.com/users/amobash2/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/amobash2/subscriptions", "type": "User", "url": "https://api.github.com/users/amobash2" }
save_elasticsearch_index
https://api.github.com/repos/huggingface/datasets/issues/5216/events
null
https://api.github.com/repos/huggingface/datasets/issues/5216/labels{/name}
2022-11-08T23:06:52Z
null
false
null
null
1,441,041,947
[]
https://api.github.com/repos/huggingface/datasets/issues/5216
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
NONE
Hi, I am new to Dataset and elasticsearch. I was wondering is there any equivalent approach to save elasticsearch index as of save_faiss_index locally for later use, to remove the need to re-index a dataset?
2022-11-09T13:16:45Z
https://github.com/huggingface/datasets/issues/5216
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5216/reactions" }
false
https://api.github.com/repos/huggingface/datasets/issues/5214/comments
https://api.github.com/repos/huggingface/datasets/issues/5214/timeline
2022-11-08T15:39:57Z
null
null
PR_kwDODunzps5CbmWE
closed
[]
false
5,214
{ "avatar_url": "https://avatars.githubusercontent.com/u/11827707?v=4", "events_url": "https://api.github.com/users/mishig25/events{/privacy}", "followers_url": "https://api.github.com/users/mishig25/followers", "following_url": "https://api.github.com/users/mishig25/following{/other_user}", "gists_url": "https://api.github.com/users/mishig25/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/mishig25", "id": 11827707, "login": "mishig25", "node_id": "MDQ6VXNlcjExODI3NzA3", "organizations_url": "https://api.github.com/users/mishig25/orgs", "received_events_url": "https://api.github.com/users/mishig25/received_events", "repos_url": "https://api.github.com/users/mishig25/repos", "site_admin": false, "starred_url": "https://api.github.com/users/mishig25/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/mishig25/subscriptions", "type": "User", "url": "https://api.github.com/users/mishig25" }
Update github pr docs actions
https://api.github.com/repos/huggingface/datasets/issues/5214/events
null
https://api.github.com/repos/huggingface/datasets/issues/5214/labels{/name}
2022-11-08T14:43:37Z
null
false
null
{ "diff_url": "https://github.com/huggingface/datasets/pull/5214.diff", "html_url": "https://github.com/huggingface/datasets/pull/5214", "merged_at": "2022-11-08T15:39:57Z", "patch_url": "https://github.com/huggingface/datasets/pull/5214.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/5214" }
1,440,334,978
[]
https://api.github.com/repos/huggingface/datasets/issues/5214
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
CONTRIBUTOR
null
2022-11-08T15:39:58Z
https://github.com/huggingface/datasets/pull/5214
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5214/reactions" }
true
https://api.github.com/repos/huggingface/datasets/issues/5213/comments
https://api.github.com/repos/huggingface/datasets/issues/5213/timeline
2022-12-02T16:44:07Z
null
null
PR_kwDODunzps5CalQ_
closed
[ { "avatar_url": "https://avatars.githubusercontent.com/u/16348744?v=4", "events_url": "https://api.github.com/users/polinaeterna/events{/privacy}", "followers_url": "https://api.github.com/users/polinaeterna/followers", "following_url": "https://api.github.com/users/polinaeterna/following{/other_user}", "gists_url": "https://api.github.com/users/polinaeterna/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/polinaeterna", "id": 16348744, "login": "polinaeterna", "node_id": "MDQ6VXNlcjE2MzQ4NzQ0", "organizations_url": "https://api.github.com/users/polinaeterna/orgs", "received_events_url": "https://api.github.com/users/polinaeterna/received_events", "repos_url": "https://api.github.com/users/polinaeterna/repos", "site_admin": false, "starred_url": "https://api.github.com/users/polinaeterna/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/polinaeterna/subscriptions", "type": "User", "url": "https://api.github.com/users/polinaeterna" } ]
true
5,213
{ "avatar_url": "https://avatars.githubusercontent.com/u/16348744?v=4", "events_url": "https://api.github.com/users/polinaeterna/events{/privacy}", "followers_url": "https://api.github.com/users/polinaeterna/followers", "following_url": "https://api.github.com/users/polinaeterna/following{/other_user}", "gists_url": "https://api.github.com/users/polinaeterna/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/polinaeterna", "id": 16348744, "login": "polinaeterna", "node_id": "MDQ6VXNlcjE2MzQ4NzQ0", "organizations_url": "https://api.github.com/users/polinaeterna/orgs", "received_events_url": "https://api.github.com/users/polinaeterna/received_events", "repos_url": "https://api.github.com/users/polinaeterna/repos", "site_admin": false, "starred_url": "https://api.github.com/users/polinaeterna/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/polinaeterna/subscriptions", "type": "User", "url": "https://api.github.com/users/polinaeterna" }
Add support for different configs with `push_to_hub`
https://api.github.com/repos/huggingface/datasets/issues/5213/events
null
https://api.github.com/repos/huggingface/datasets/issues/5213/labels{/name}
2022-11-08T11:45:47Z
null
false
{ "avatar_url": "https://avatars.githubusercontent.com/u/16348744?v=4", "events_url": "https://api.github.com/users/polinaeterna/events{/privacy}", "followers_url": "https://api.github.com/users/polinaeterna/followers", "following_url": "https://api.github.com/users/polinaeterna/following{/other_user}", "gists_url": "https://api.github.com/users/polinaeterna/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/polinaeterna", "id": 16348744, "login": "polinaeterna", "node_id": "MDQ6VXNlcjE2MzQ4NzQ0", "organizations_url": "https://api.github.com/users/polinaeterna/orgs", "received_events_url": "https://api.github.com/users/polinaeterna/received_events", "repos_url": "https://api.github.com/users/polinaeterna/repos", "site_admin": false, "starred_url": "https://api.github.com/users/polinaeterna/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/polinaeterna/subscriptions", "type": "User", "url": "https://api.github.com/users/polinaeterna" }
{ "diff_url": "https://github.com/huggingface/datasets/pull/5213.diff", "html_url": "https://github.com/huggingface/datasets/pull/5213", "merged_at": null, "patch_url": "https://github.com/huggingface/datasets/pull/5213.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/5213" }
1,440,037,534
[ { "color": "a2eeef", "default": true, "description": "New feature or request", "id": 1935892871, "name": "enhancement", "node_id": "MDU6TGFiZWwxOTM1ODkyODcx", "url": "https://api.github.com/repos/huggingface/datasets/labels/enhancement" } ]
https://api.github.com/repos/huggingface/datasets/issues/5213
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
CONTRIBUTOR
will solve #5151 @lhoestq @albertvillanova @mariosasko This is still a super draft so please ignore code issues but I want to discuss some conceptually important things. I suggest a way to do `.push_to_hub("repo_id", "config_name")` with pushing parquet files to directories named as `config_name` (inside `data/` dir as it is now), for example: ``` data |__config-v1 train-00000-00002-...-.parquet train-00001-00002-...-.parquet ... |__config-v2 .... ``` When loading a dataset, I parse these configs from repository data files (only for `"data/{split}-[0-9][0-9][0-9][0-9][0-9]-of-[0-9][0-9][0-9][0-9][0-9]*.*"` pattern that is used for parquet datasets pushed with `.push_to_hub`). Therefore, - when user tries to load a dataset that has configs parsed from data files dir names without providing a config (like `load_dataset("repo")` instead of `load_dataset("repo", "config-v1")`) - raise error and asks for config - to be aligned with how it works in datasets with scripts. - for backward compatibility: if user tries to `.push_to_hub(""repo", "config_name")` to an existing parquet repo with no configurations (all parquet files are directly in `data/` dir) - raise error. My initial idea was to raise a warning and move these files to another dir with name (config) like "default" or smth but in a PR and suggest user to merge it on the Hub. But there is no support for renaming (moving) files via `HfApi` yet so it would require deleting and pushing again if I understand it right. This parsing approach can be extended to other Hub packaged modules, and to local packaged modules and other data files patterns (except for cases when splits are in dir names `KEYWORDS_IN_DIR_NAME_BASE_PATTERNS` because we allow for arbitrary depth of directory hierarchy). Do you think it's reasonable? Not sure how to provide flexibility (and backward compatibility) to not parsing configs and load all the data in a single config as it is now. I also thought about getting information about configs from Readme.md `dataset_info` ([example](https://huggingface.co/datasets/polinaeterna/test_push_two_configs/blob/main/README.md)). But that way we are dependent on if it exists. It is created automatically with `.push_to_hub` but what if it is accidentally deleted or smth). Also, what I don't like is that this parsing is a part of Module/DataFiles logic, not Builder's one, which is not aligned with datasets with custom scripts. But I don't know to implement the second approach in current library's logic. What do you think about this all? Am I missing smth? TODO: - [ ] save cache in the same dir for configs of the same datasets - [ ] fix verification errors - [ ] correctly update `dataset_infos.json` too - [ ] ...
2022-12-02T16:48:23Z
https://github.com/huggingface/datasets/pull/5213
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5213/reactions" }
true
https://api.github.com/repos/huggingface/datasets/issues/5212/comments
https://api.github.com/repos/huggingface/datasets/issues/5212/timeline
2022-11-15T06:32:26Z
null
null
PR_kwDODunzps5CZPI2
closed
[]
false
5,212
{ "avatar_url": "https://avatars.githubusercontent.com/u/8515462?v=4", "events_url": "https://api.github.com/users/albertvillanova/events{/privacy}", "followers_url": "https://api.github.com/users/albertvillanova/followers", "following_url": "https://api.github.com/users/albertvillanova/following{/other_user}", "gists_url": "https://api.github.com/users/albertvillanova/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/albertvillanova", "id": 8515462, "login": "albertvillanova", "node_id": "MDQ6VXNlcjg1MTU0NjI=", "organizations_url": "https://api.github.com/users/albertvillanova/orgs", "received_events_url": "https://api.github.com/users/albertvillanova/received_events", "repos_url": "https://api.github.com/users/albertvillanova/repos", "site_admin": false, "starred_url": "https://api.github.com/users/albertvillanova/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/albertvillanova/subscriptions", "type": "User", "url": "https://api.github.com/users/albertvillanova" }
Fix CI require_beam maximum compatible dill version
https://api.github.com/repos/huggingface/datasets/issues/5212/events
null
https://api.github.com/repos/huggingface/datasets/issues/5212/labels{/name}
2022-11-08T07:30:01Z
null
false
null
{ "diff_url": "https://github.com/huggingface/datasets/pull/5212.diff", "html_url": "https://github.com/huggingface/datasets/pull/5212", "merged_at": "2022-11-15T06:32:26Z", "patch_url": "https://github.com/huggingface/datasets/pull/5212.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/5212" }
1,439,642,483
[]
https://api.github.com/repos/huggingface/datasets/issues/5212
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
MEMBER
A previous commit to main branch introduced an additional requirement on maximum compatible `dill` version with `apache-beam` in our CI `require_beam`: - d7c942228b8dcf4de64b00a3053dce59b335f618 - ec222b220b79f10c8d7b015769f0999b15959feb This PR fixes the maximum compatible `dill` version with `apache-beam`, which is <0.3.2 (and not 0.3.6): https://github.com/apache/beam/blob/v2.42.0/sdks/python/setup.py#L219
2022-11-15T06:32:27Z
https://github.com/huggingface/datasets/pull/5212
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5212/reactions" }
true
https://api.github.com/repos/huggingface/datasets/issues/5211/comments
https://api.github.com/repos/huggingface/datasets/issues/5211/timeline
2022-11-29T15:54:17Z
null
null
PR_kwDODunzps5CVgBx
closed
[]
false
5,211
{ "avatar_url": "https://avatars.githubusercontent.com/u/42851186?v=4", "events_url": "https://api.github.com/users/lhoestq/events{/privacy}", "followers_url": "https://api.github.com/users/lhoestq/followers", "following_url": "https://api.github.com/users/lhoestq/following{/other_user}", "gists_url": "https://api.github.com/users/lhoestq/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/lhoestq", "id": 42851186, "login": "lhoestq", "node_id": "MDQ6VXNlcjQyODUxMTg2", "organizations_url": "https://api.github.com/users/lhoestq/orgs", "received_events_url": "https://api.github.com/users/lhoestq/received_events", "repos_url": "https://api.github.com/users/lhoestq/repos", "site_admin": false, "starred_url": "https://api.github.com/users/lhoestq/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/lhoestq/subscriptions", "type": "User", "url": "https://api.github.com/users/lhoestq" }
Update Overview.ipynb google colab
https://api.github.com/repos/huggingface/datasets/issues/5211/events
null
https://api.github.com/repos/huggingface/datasets/issues/5211/labels{/name}
2022-11-07T15:23:52Z
null
false
null
{ "diff_url": "https://github.com/huggingface/datasets/pull/5211.diff", "html_url": "https://github.com/huggingface/datasets/pull/5211", "merged_at": "2022-11-29T15:54:17Z", "patch_url": "https://github.com/huggingface/datasets/pull/5211.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/5211" }
1,438,544,617
[]
https://api.github.com/repos/huggingface/datasets/issues/5211
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
MEMBER
- removed metrics stuff - added image example - added audio example (with ffmpeg instructions) - updated the "add a new dataset" section
2022-11-29T15:59:48Z
https://github.com/huggingface/datasets/pull/5211
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5211/reactions" }
true
https://api.github.com/repos/huggingface/datasets/issues/5210/comments
https://api.github.com/repos/huggingface/datasets/issues/5210/timeline
2022-11-24T11:26:16Z
null
null
PR_kwDODunzps5CVUzx
closed
[]
false
5,210
{ "avatar_url": "https://avatars.githubusercontent.com/u/42851186?v=4", "events_url": "https://api.github.com/users/lhoestq/events{/privacy}", "followers_url": "https://api.github.com/users/lhoestq/followers", "following_url": "https://api.github.com/users/lhoestq/following{/other_user}", "gists_url": "https://api.github.com/users/lhoestq/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/lhoestq", "id": 42851186, "login": "lhoestq", "node_id": "MDQ6VXNlcjQyODUxMTg2", "organizations_url": "https://api.github.com/users/lhoestq/orgs", "received_events_url": "https://api.github.com/users/lhoestq/received_events", "repos_url": "https://api.github.com/users/lhoestq/repos", "site_admin": false, "starred_url": "https://api.github.com/users/lhoestq/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/lhoestq/subscriptions", "type": "User", "url": "https://api.github.com/users/lhoestq" }
Tweak readme
https://api.github.com/repos/huggingface/datasets/issues/5210/events
null
https://api.github.com/repos/huggingface/datasets/issues/5210/labels{/name}
2022-11-07T14:51:23Z
null
false
null
{ "diff_url": "https://github.com/huggingface/datasets/pull/5210.diff", "html_url": "https://github.com/huggingface/datasets/pull/5210", "merged_at": "2022-11-24T11:26:16Z", "patch_url": "https://github.com/huggingface/datasets/pull/5210.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/5210" }
1,438,492,507
[]
https://api.github.com/repos/huggingface/datasets/issues/5210
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
MEMBER
Tweaked some paragraphs mentioning the modalities we support + added a paragraph on security
2022-11-24T11:35:07Z
https://github.com/huggingface/datasets/pull/5210
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5210/reactions" }
true
https://api.github.com/repos/huggingface/datasets/issues/5209/comments
https://api.github.com/repos/huggingface/datasets/issues/5209/timeline
2023-07-21T14:36:01Z
null
completed
I_kwDODunzps5Vu7--
closed
[]
null
5,209
{ "avatar_url": "https://avatars.githubusercontent.com/u/53175384?v=4", "events_url": "https://api.github.com/users/merveenoyan/events{/privacy}", "followers_url": "https://api.github.com/users/merveenoyan/followers", "following_url": "https://api.github.com/users/merveenoyan/following{/other_user}", "gists_url": "https://api.github.com/users/merveenoyan/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/merveenoyan", "id": 53175384, "login": "merveenoyan", "node_id": "MDQ6VXNlcjUzMTc1Mzg0", "organizations_url": "https://api.github.com/users/merveenoyan/orgs", "received_events_url": "https://api.github.com/users/merveenoyan/received_events", "repos_url": "https://api.github.com/users/merveenoyan/repos", "site_admin": false, "starred_url": "https://api.github.com/users/merveenoyan/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/merveenoyan/subscriptions", "type": "User", "url": "https://api.github.com/users/merveenoyan" }
Implement ability to define splits in metadata section of dataset card
https://api.github.com/repos/huggingface/datasets/issues/5209/events
null
https://api.github.com/repos/huggingface/datasets/issues/5209/labels{/name}
2022-11-07T13:27:16Z
null
false
null
null
1,438,367,678
[ { "color": "a2eeef", "default": true, "description": "New feature or request", "id": 1935892871, "name": "enhancement", "node_id": "MDU6TGFiZWwxOTM1ODkyODcx", "url": "https://api.github.com/repos/huggingface/datasets/labels/enhancement" } ]
https://api.github.com/repos/huggingface/datasets/issues/5209
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
CONTRIBUTOR
### Feature request If you go here: https://huggingface.co/datasets/inria-soda/tabular-benchmark/tree/main you will see bunch of folders that has various CSV files. I’d like dataset viewer to show these files instead of only one dataset like it currently does. (and also people to be able to load them as splits instead of loading through `data_files`) e.g GLUE has various splits on viewer but it’s too overkill to ask people to implement loading script, so it would be better to let them define these in the README file instead. Also pinging @polinaeterna @lhoestq @adrinjalali
2023-07-21T14:36:02Z
https://github.com/huggingface/datasets/issues/5209
{ "+1": 1, "-1": 0, "confused": 0, "eyes": 0, "heart": 2, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 3, "url": "https://api.github.com/repos/huggingface/datasets/issues/5209/reactions" }
false
https://api.github.com/repos/huggingface/datasets/issues/5208/comments
https://api.github.com/repos/huggingface/datasets/issues/5208/timeline
2022-11-08T06:49:17Z
null
null
PR_kwDODunzps5CTyxu
closed
[]
false
5,208
{ "avatar_url": "https://avatars.githubusercontent.com/u/8515462?v=4", "events_url": "https://api.github.com/users/albertvillanova/events{/privacy}", "followers_url": "https://api.github.com/users/albertvillanova/followers", "following_url": "https://api.github.com/users/albertvillanova/following{/other_user}", "gists_url": "https://api.github.com/users/albertvillanova/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/albertvillanova", "id": 8515462, "login": "albertvillanova", "node_id": "MDQ6VXNlcjg1MTU0NjI=", "organizations_url": "https://api.github.com/users/albertvillanova/orgs", "received_events_url": "https://api.github.com/users/albertvillanova/received_events", "repos_url": "https://api.github.com/users/albertvillanova/repos", "site_admin": false, "starred_url": "https://api.github.com/users/albertvillanova/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/albertvillanova/subscriptions", "type": "User", "url": "https://api.github.com/users/albertvillanova" }
Refactor CI hub fixtures to use monkeypatch instead of patch
https://api.github.com/repos/huggingface/datasets/issues/5208/events
null
https://api.github.com/repos/huggingface/datasets/issues/5208/labels{/name}
2022-11-07T09:25:05Z
null
false
null
{ "diff_url": "https://github.com/huggingface/datasets/pull/5208.diff", "html_url": "https://github.com/huggingface/datasets/pull/5208", "merged_at": "2022-11-08T06:49:17Z", "patch_url": "https://github.com/huggingface/datasets/pull/5208.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/5208" }
1,438,035,707
[]
https://api.github.com/repos/huggingface/datasets/issues/5208
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
MEMBER
Minor refactoring of CI to use `pytest` `monkeypatch` instead of `unittest` `patch`.
2022-11-08T06:51:20Z
https://github.com/huggingface/datasets/pull/5208
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5208/reactions" }
true
https://api.github.com/repos/huggingface/datasets/issues/5207/comments
https://api.github.com/repos/huggingface/datasets/issues/5207/timeline
null
null
null
I_kwDODunzps5Vs_rK
open
[]
null
5,207
{ "avatar_url": "https://avatars.githubusercontent.com/u/82404?v=4", "events_url": "https://api.github.com/users/leemgs/events{/privacy}", "followers_url": "https://api.github.com/users/leemgs/followers", "following_url": "https://api.github.com/users/leemgs/following{/other_user}", "gists_url": "https://api.github.com/users/leemgs/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/leemgs", "id": 82404, "login": "leemgs", "node_id": "MDQ6VXNlcjgyNDA0", "organizations_url": "https://api.github.com/users/leemgs/orgs", "received_events_url": "https://api.github.com/users/leemgs/received_events", "repos_url": "https://api.github.com/users/leemgs/repos", "site_admin": false, "starred_url": "https://api.github.com/users/leemgs/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/leemgs/subscriptions", "type": "User", "url": "https://api.github.com/users/leemgs" }
Connection error of the HuggingFace's dataset Hub due to SSLError with proxy
https://api.github.com/repos/huggingface/datasets/issues/5207/events
null
https://api.github.com/repos/huggingface/datasets/issues/5207/labels{/name}
2022-11-07T06:56:23Z
null
false
null
null
1,437,858,506
[]
https://api.github.com/repos/huggingface/datasets/issues/5207
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
NONE
### Describe the bug It's weird. I could not normally connect the dataset Hub of HuggingFace due to a SSLError in my office. Even when I try to connect using my company's proxy address (e.g., http_proxy and https_proxy), I'm getting the SSLError issue. What should I do to download the datanet stored in HuggingFace normally? I welcome any comments. I think those comments will be helpful to me. * Dataset address - https://huggingface.co/datasets/moyix/debian_csrc/viewer/moyix--debian_csrc * Log message ``` ............ OMISSION .............. Traceback (most recent call last): File "/data/home/geunsik-lim/qtlab/./transformers/examples/pytorch/language-modeling/run_clm.py", line 587, in <module> main() File "/data/home/geunsik-lim/qtlab/./transformers/examples/pytorch/language-modeling/run_clm.py", line 278, in main raw_datasets = load_dataset( File "/home/geunsik-lim/anaconda3/envs/deepspeed/lib/python3.10/site-packages/datasets/load.py", line 1719, in load_dataset builder_instance = load_dataset_builder( File "/home/geunsik-lim/anaconda3/envs/deepspeed/lib/python3.10/site-packages/datasets/load.py", line 1497, in load_dataset_builder dataset_module = dataset_module_factory( File "/home/geunsik-lim/anaconda3/envs/deepspeed/lib/python3.10/site-packages/datasets/load.py", line 1222, in dataset_module_factory raise e1 from None File "/home/geunsik-lim/anaconda3/envs/deepspeed/lib/python3.10/site-packages/datasets/load.py", line 1179, in dataset_module_factory raise ConnectionError(f"Couldn't reach '{path}' on the Hub ({type(e).__name__})") ConnectionError: Couldn't reach 'moyix/debian_csrc' on the Hub (SSLError) [2022-11-07 15:23:38,476] [INFO] [launch.py:318:sigkill_handler] Killing subprocess 6760 [2022-11-07 15:23:38,476] [ERROR] [launch.py:324:sigkill_handler] ['/home/geunsik-lim/anaconda3/envs/deepspeed/bin/python', '-u', './transformers/examples/pytorch/language-modeling/run_clm.py', '--local_rank=0', '--model_name_or_path=Salesforce/codegen-350M-multi', '--per_device_train_batch_size=1', '--learning_rate', '2e-5', '--num_train_epochs', '1', '--output_dir=./codegen-350M-finetuned', '--overwrite_output_dir', '--dataset_name', 'moyix/debian_csrc', '--cache_dir', '/data/home/geunsik-lim/.cache', '--tokenizer_name', 'Salesforce/codegen-350M-multi', '--block_size', '2048', '--gradient_accumulation_steps', '32', '--do_train', '--fp16', '--deepspeed', 'ds_config_zero2.json'] exits with return code = 1 real 0m7.742s user 0m4.930s ``` ### Steps to reproduce the bug Steps to reproduce this behavior. ``` (deepspeed) geunsik-lim@ai02:~/qtlab$ ./test_debian_csrc_dataset.py Traceback (most recent call last): File "/data/home/geunsik-lim/qtlab/./test_debian_csrc_dataset.py", line 6, in <module> dataset = load_dataset("moyix/debian_csrc") File "/home/geunsik-lim/anaconda3/envs/deepspeed/lib/python3.10/site-packages/datasets/load.py", line 1719, in load_dataset builder_instance = load_dataset_builder( File "/home/geunsik-lim/anaconda3/envs/deepspeed/lib/python3.10/site-packages/datasets/load.py", line 1497, in load_dataset_builder dataset_module = dataset_module_factory( File "/home/geunsik-lim/anaconda3/envs/deepspeed/lib/python3.10/site-packages/datasets/load.py", line 1222, in dataset_module_factory raise e1 from None File "/home/geunsik-lim/anaconda3/envs/deepspeed/lib/python3.10/site-packages/datasets/load.py", line 1179, in dataset_module_factory raise ConnectionError(f"Couldn't reach '{path}' on the Hub ({type(e).__name__})") ConnectionError: Couldn't reach 'moyix/debian_csrc' on the Hub (SSLError) (deepspeed) geunsik-lim@ai02:~/qtlab$ (deepspeed) geunsik-lim@ai02:~/qtlab$ (deepspeed) geunsik-lim@ai02:~/qtlab$ (deepspeed) geunsik-lim@ai02:~/qtlab$ cat ./test_debian_csrc_dataset.py #!/usr/bin/env python from datasets import load_dataset dataset = load_dataset("moyix/debian_csrc") ``` 1. Adde proxy address of a company in /etc/profile 2. Download dataset with load_dataset() function of datasets package that is provided by HuggingFace. 3. In this case, the address would be "moyix--debian_csrc". 4. I get the "`ConnectionError: Couldn't reach 'moyix/debian_csrc' on the Hub (SSLError`)" error message. ### Expected behavior * error message: ConnectionError: Couldn't reach 'moyix/debian_csrc' on the Hub (SSLError) ### Environment info * software version information: ``` (deepspeed) geunsik-lim@ai02:~$ (deepspeed) geunsik-lim@ai02:~$ conda list -f pytorch # packages in environment at /home/geunsik-lim/anaconda3/envs/deepspeed: # # Name Version Build Channel pytorch 1.13.0 py3.10_cuda11.7_cudnn8.5.0_0 pytorch (deepspeed) geunsik-lim@ai02:~$ conda list -f python # packages in environment at /home/geunsik-lim/anaconda3/envs/deepspeed: # # Name Version Build Channel python 3.10.6 haa1d7c7_1 (deepspeed) geunsik-lim@ai02:~$ conda list -f datasets # packages in environment at /home/geunsik-lim/anaconda3/envs/deepspeed: # # Name Version Build Channel datasets 2.6.1 py_0 huggingface (deepspeed) geunsik-lim@ai02:~$ uname -a Linux ai02 5.4.0-131-generic #147-Ubuntu SMP Fri Oct 14 17:07:22 UTC 2022 x86_64 x86_64 x86_64 GNU/Linux (deepspeed) geunsik-lim@ai02:~$ cat /etc/lsb-release DISTRIB_ID=Ubuntu DISTRIB_RELEASE=20.04 DISTRIB_CODENAME=focal DISTRIB_DESCRIPTION="Ubuntu 20.04.5 LTS" ```
2024-02-28T02:48:23Z
https://github.com/huggingface/datasets/issues/5207
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5207/reactions" }
false
https://api.github.com/repos/huggingface/datasets/issues/5206/comments
https://api.github.com/repos/huggingface/datasets/issues/5206/timeline
2022-11-06T00:05:59Z
null
completed
I_kwDODunzps5VqkvW
closed
[]
null
5,206
{ "avatar_url": "https://avatars.githubusercontent.com/u/16692099?v=4", "events_url": "https://api.github.com/users/bilelomrani1/events{/privacy}", "followers_url": "https://api.github.com/users/bilelomrani1/followers", "following_url": "https://api.github.com/users/bilelomrani1/following{/other_user}", "gists_url": "https://api.github.com/users/bilelomrani1/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/bilelomrani1", "id": 16692099, "login": "bilelomrani1", "node_id": "MDQ6VXNlcjE2NjkyMDk5", "organizations_url": "https://api.github.com/users/bilelomrani1/orgs", "received_events_url": "https://api.github.com/users/bilelomrani1/received_events", "repos_url": "https://api.github.com/users/bilelomrani1/repos", "site_admin": false, "starred_url": "https://api.github.com/users/bilelomrani1/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/bilelomrani1/subscriptions", "type": "User", "url": "https://api.github.com/users/bilelomrani1" }
Use logging instead of printing to console
https://api.github.com/repos/huggingface/datasets/issues/5206/events
null
https://api.github.com/repos/huggingface/datasets/issues/5206/labels{/name}
2022-11-05T23:48:02Z
null
false
null
null
1,437,223,894
[]
https://api.github.com/repos/huggingface/datasets/issues/5206
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
NONE
### Describe the bug Some logs ([here](https://github.com/huggingface/datasets/blob/4a6e1fe2735505efc7e3a3dbd3e1835da0702575/src/datasets/builder.py#L778), [here](https://github.com/huggingface/datasets/blob/4a6e1fe2735505efc7e3a3dbd3e1835da0702575/src/datasets/builder.py#L786), and [here](https://github.com/huggingface/datasets/blob/4a6e1fe2735505efc7e3a3dbd3e1835da0702575/src/datasets/builder.py#L830)) generated by the `DatasetBuilder` are printed to the console instead of passed to `datasets` logger. ### Steps to reproduce the bug ```python >> import datasets >> datasets.load_dataset("some-dataset") Downloading and preparing dataset csv/data to <path>... Downloading data files: 100%|██████████████████████████████████████████████████████████████████████████| 3/3 [00:00<00:00, 7729.06it/s] Extracting data files: 100%|████████████████████████████████████████████████████████████████████████████| 3/3 [00:00<00:00, 527.23it/s] Dataset csv downloaded and prepared to <path>. Subsequent calls will reuse this data. ``` ### Expected behavior The logs should not be printed to the console directly but passed to the logger so that the user can redirect them wherever he wants. ### Environment info - `datasets` version: 2.6.1 - Platform: macOS-13.0-x86_64-i386-64bit - Python version: 3.9.15 - PyArrow version: 10.0.0 - Pandas version: 1.5.1
2022-11-06T00:06:00Z
https://github.com/huggingface/datasets/issues/5206
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5206/reactions" }
false
https://api.github.com/repos/huggingface/datasets/issues/5205/comments
https://api.github.com/repos/huggingface/datasets/issues/5205/timeline
2022-11-07T16:20:24Z
null
null
PR_kwDODunzps5CRO33
closed
[]
false
5,205
{ "avatar_url": "https://avatars.githubusercontent.com/u/36760800?v=4", "events_url": "https://api.github.com/users/alvarobartt/events{/privacy}", "followers_url": "https://api.github.com/users/alvarobartt/followers", "following_url": "https://api.github.com/users/alvarobartt/following{/other_user}", "gists_url": "https://api.github.com/users/alvarobartt/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/alvarobartt", "id": 36760800, "login": "alvarobartt", "node_id": "MDQ6VXNlcjM2NzYwODAw", "organizations_url": "https://api.github.com/users/alvarobartt/orgs", "received_events_url": "https://api.github.com/users/alvarobartt/received_events", "repos_url": "https://api.github.com/users/alvarobartt/repos", "site_admin": false, "starred_url": "https://api.github.com/users/alvarobartt/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/alvarobartt/subscriptions", "type": "User", "url": "https://api.github.com/users/alvarobartt" }
Add missing `DownloadConfig.use_auth_token` value
https://api.github.com/repos/huggingface/datasets/issues/5205/events
null
https://api.github.com/repos/huggingface/datasets/issues/5205/labels{/name}
2022-11-05T23:36:36Z
null
false
null
{ "diff_url": "https://github.com/huggingface/datasets/pull/5205.diff", "html_url": "https://github.com/huggingface/datasets/pull/5205", "merged_at": "2022-11-07T16:20:24Z", "patch_url": "https://github.com/huggingface/datasets/pull/5205.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/5205" }
1,437,221,987
[]
https://api.github.com/repos/huggingface/datasets/issues/5205
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
CONTRIBUTOR
This PR solves https://github.com/huggingface/datasets/issues/5204 Now the `token` is propagated so that `DownloadConfig.use_auth_token` value is set before trying to download private files from existing datasets in the Hub.
2022-11-08T08:13:00Z
https://github.com/huggingface/datasets/pull/5205
{ "+1": 1, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 1, "url": "https://api.github.com/repos/huggingface/datasets/issues/5205/reactions" }
true
https://api.github.com/repos/huggingface/datasets/issues/5204/comments
https://api.github.com/repos/huggingface/datasets/issues/5204/timeline
2022-11-08T10:12:08Z
null
completed
I_kwDODunzps5VqkGL
closed
[ { "avatar_url": "https://avatars.githubusercontent.com/u/36760800?v=4", "events_url": "https://api.github.com/users/alvarobartt/events{/privacy}", "followers_url": "https://api.github.com/users/alvarobartt/followers", "following_url": "https://api.github.com/users/alvarobartt/following{/other_user}", "gists_url": "https://api.github.com/users/alvarobartt/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/alvarobartt", "id": 36760800, "login": "alvarobartt", "node_id": "MDQ6VXNlcjM2NzYwODAw", "organizations_url": "https://api.github.com/users/alvarobartt/orgs", "received_events_url": "https://api.github.com/users/alvarobartt/received_events", "repos_url": "https://api.github.com/users/alvarobartt/repos", "site_admin": false, "starred_url": "https://api.github.com/users/alvarobartt/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/alvarobartt/subscriptions", "type": "User", "url": "https://api.github.com/users/alvarobartt" } ]
null
5,204
{ "avatar_url": "https://avatars.githubusercontent.com/u/36760800?v=4", "events_url": "https://api.github.com/users/alvarobartt/events{/privacy}", "followers_url": "https://api.github.com/users/alvarobartt/followers", "following_url": "https://api.github.com/users/alvarobartt/following{/other_user}", "gists_url": "https://api.github.com/users/alvarobartt/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/alvarobartt", "id": 36760800, "login": "alvarobartt", "node_id": "MDQ6VXNlcjM2NzYwODAw", "organizations_url": "https://api.github.com/users/alvarobartt/orgs", "received_events_url": "https://api.github.com/users/alvarobartt/received_events", "repos_url": "https://api.github.com/users/alvarobartt/repos", "site_admin": false, "starred_url": "https://api.github.com/users/alvarobartt/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/alvarobartt/subscriptions", "type": "User", "url": "https://api.github.com/users/alvarobartt" }
`push_to_hub` not propagating `token` through `DownloadConfig`
https://api.github.com/repos/huggingface/datasets/issues/5204/events
null
https://api.github.com/repos/huggingface/datasets/issues/5204/labels{/name}
2022-11-05T23:32:20Z
null
false
{ "avatar_url": "https://avatars.githubusercontent.com/u/36760800?v=4", "events_url": "https://api.github.com/users/alvarobartt/events{/privacy}", "followers_url": "https://api.github.com/users/alvarobartt/followers", "following_url": "https://api.github.com/users/alvarobartt/following{/other_user}", "gists_url": "https://api.github.com/users/alvarobartt/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/alvarobartt", "id": 36760800, "login": "alvarobartt", "node_id": "MDQ6VXNlcjM2NzYwODAw", "organizations_url": "https://api.github.com/users/alvarobartt/orgs", "received_events_url": "https://api.github.com/users/alvarobartt/received_events", "repos_url": "https://api.github.com/users/alvarobartt/repos", "site_admin": false, "starred_url": "https://api.github.com/users/alvarobartt/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/alvarobartt/subscriptions", "type": "User", "url": "https://api.github.com/users/alvarobartt" }
null
1,437,221,259
[]
https://api.github.com/repos/huggingface/datasets/issues/5204
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
CONTRIBUTOR
### Describe the bug When trying to upload a new 🤗 Dataset to the Hub via Python, and providing the `token` as a parameter to the `Dataset.push_to_hub` function, it just works for the first time, assuming that the dataset didn't exist before. But when trying to run `Dataset.push_to_hub` again over the same dataset, instead of updating it, it throws a `ConnectionError` when trying to retrieve the `README.md` that may contain some metadata about the dataset, so as to also update it, but since the `token` is not propagated, the `DownloadConfig` provided to the `datasets.utils.file_utils.get_from_cache` function doesn't contain the `use_auth_token` value set to `token`, it's just using the default one which is None/False. So on, when uploading a dataset via Python with `push_to_hub` with the `token` as a parameter with the HuggingFace API Token as value, it can just be uploaded when the dataset is new, otherwise it fails with to `ConnectionError` due to the `token` not being propagated as `use_auth_token`. ### Steps to reproduce the bug Let's create a new dataset in our HF account via Python as: ```python from datasets import Dataset data = {"a": [1, 2, 3], "b": [4, 5, 6]} ds = Dataset.from_dict(data) ds.push_to_hub(repo_id=<HF_USERNAME>/<HF_DATASET>, private=private, token=<HF_TOKEN_HERE>) ``` When we create the `Dataset` for the first time it works and there are no issues, but when trying to actually upload a new version of the same dataset (same name under the same username), we encounter the following issue: ```python from datasets import Dataset data = {"a": [1, 2, 3], "b": [4, 5, 6]} ds = Dataset.from_dict(data) ds.push_to_hub(repo_id=<HF_USERNAME>/<HF_DATASET>, private=private, token=<HF_TOKEN_HERE>) >>> ConnectionError: Couldn't reach https://huggingface.co/datasets/alvarobartt/demo/resolve/main/README.md (ConnectionError('Unauthorized for URL https://huggingface.co/datasets/<HF_USERNAME>/<HF_DATASET>/resolve/main/README.md. Please use the parameter `use_auth_token=True` after logging in with `huggingface-cli login`')) ``` ### Expected behavior Ideally, the `token` parameter provided to `push_to_hub` should be propagated and used to download the `README.md` when trying to update a `Dataset`, instead of throwing that exception, so that the authentication can be done directly through code without running `huggingface-cli login`as mentioned at https://huggingface.co/docs/datasets/upload_dataset#upload-with-python. ### Environment info - `datasets` version: 2.6.1 - Platform: macOS-13.0-arm64-arm-64bit - Python version: 3.10.8 - PyArrow version: 10.0.0 - Pandas version: 1.5.1
2022-11-08T10:12:09Z
https://github.com/huggingface/datasets/issues/5204
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5204/reactions" }
false
https://api.github.com/repos/huggingface/datasets/issues/5203/comments
https://api.github.com/repos/huggingface/datasets/issues/5203/timeline
2022-11-07T18:40:19Z
null
null
PR_kwDODunzps5CPlnW
closed
[]
false
5,203
{ "avatar_url": "https://avatars.githubusercontent.com/u/59462357?v=4", "events_url": "https://api.github.com/users/stevhliu/events{/privacy}", "followers_url": "https://api.github.com/users/stevhliu/followers", "following_url": "https://api.github.com/users/stevhliu/following{/other_user}", "gists_url": "https://api.github.com/users/stevhliu/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/stevhliu", "id": 59462357, "login": "stevhliu", "node_id": "MDQ6VXNlcjU5NDYyMzU3", "organizations_url": "https://api.github.com/users/stevhliu/orgs", "received_events_url": "https://api.github.com/users/stevhliu/received_events", "repos_url": "https://api.github.com/users/stevhliu/repos", "site_admin": false, "starred_url": "https://api.github.com/users/stevhliu/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/stevhliu/subscriptions", "type": "User", "url": "https://api.github.com/users/stevhliu" }
Update canonical links to Hub links
https://api.github.com/repos/huggingface/datasets/issues/5203/events
null
https://api.github.com/repos/huggingface/datasets/issues/5203/labels{/name}
2022-11-04T22:50:50Z
null
false
null
{ "diff_url": "https://github.com/huggingface/datasets/pull/5203.diff", "html_url": "https://github.com/huggingface/datasets/pull/5203", "merged_at": "2022-11-07T18:40:19Z", "patch_url": "https://github.com/huggingface/datasets/pull/5203.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/5203" }
1,436,710,518
[]
https://api.github.com/repos/huggingface/datasets/issues/5203
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
MEMBER
This PR updates some of the canonical dataset links to their corresponding links on the Hub; closes #5200.
2022-11-07T18:43:05Z
https://github.com/huggingface/datasets/pull/5203
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5203/reactions" }
true
https://api.github.com/repos/huggingface/datasets/issues/5202/comments
https://api.github.com/repos/huggingface/datasets/issues/5202/timeline
2023-02-16T09:11:10Z
null
completed
I_kwDODunzps5VleIK
closed
[ { "avatar_url": "https://avatars.githubusercontent.com/u/8515462?v=4", "events_url": "https://api.github.com/users/albertvillanova/events{/privacy}", "followers_url": "https://api.github.com/users/albertvillanova/followers", "following_url": "https://api.github.com/users/albertvillanova/following{/other_user}", "gists_url": "https://api.github.com/users/albertvillanova/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/albertvillanova", "id": 8515462, "login": "albertvillanova", "node_id": "MDQ6VXNlcjg1MTU0NjI=", "organizations_url": "https://api.github.com/users/albertvillanova/orgs", "received_events_url": "https://api.github.com/users/albertvillanova/received_events", "repos_url": "https://api.github.com/users/albertvillanova/repos", "site_admin": false, "starred_url": "https://api.github.com/users/albertvillanova/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/albertvillanova/subscriptions", "type": "User", "url": "https://api.github.com/users/albertvillanova" } ]
null
5,202
{ "avatar_url": "https://avatars.githubusercontent.com/u/8515462?v=4", "events_url": "https://api.github.com/users/albertvillanova/events{/privacy}", "followers_url": "https://api.github.com/users/albertvillanova/followers", "following_url": "https://api.github.com/users/albertvillanova/following{/other_user}", "gists_url": "https://api.github.com/users/albertvillanova/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/albertvillanova", "id": 8515462, "login": "albertvillanova", "node_id": "MDQ6VXNlcjg1MTU0NjI=", "organizations_url": "https://api.github.com/users/albertvillanova/orgs", "received_events_url": "https://api.github.com/users/albertvillanova/received_events", "repos_url": "https://api.github.com/users/albertvillanova/repos", "site_admin": false, "starred_url": "https://api.github.com/users/albertvillanova/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/albertvillanova/subscriptions", "type": "User", "url": "https://api.github.com/users/albertvillanova" }
CI fails after bulk edit of canonical datasets
https://api.github.com/repos/huggingface/datasets/issues/5202/events
null
https://api.github.com/repos/huggingface/datasets/issues/5202/labels{/name}
2022-11-04T10:51:20Z
null
false
{ "avatar_url": "https://avatars.githubusercontent.com/u/8515462?v=4", "events_url": "https://api.github.com/users/albertvillanova/events{/privacy}", "followers_url": "https://api.github.com/users/albertvillanova/followers", "following_url": "https://api.github.com/users/albertvillanova/following{/other_user}", "gists_url": "https://api.github.com/users/albertvillanova/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/albertvillanova", "id": 8515462, "login": "albertvillanova", "node_id": "MDQ6VXNlcjg1MTU0NjI=", "organizations_url": "https://api.github.com/users/albertvillanova/orgs", "received_events_url": "https://api.github.com/users/albertvillanova/received_events", "repos_url": "https://api.github.com/users/albertvillanova/repos", "site_admin": false, "starred_url": "https://api.github.com/users/albertvillanova/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/albertvillanova/subscriptions", "type": "User", "url": "https://api.github.com/users/albertvillanova" }
null
1,435,886,090
[ { "color": "d73a4a", "default": true, "description": "Something isn't working", "id": 1935892857, "name": "bug", "node_id": "MDU6TGFiZWwxOTM1ODkyODU3", "url": "https://api.github.com/repos/huggingface/datasets/labels/bug" } ]
https://api.github.com/repos/huggingface/datasets/issues/5202
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
MEMBER
``` ______ test_get_dataset_config_info[paws-labeled_final-expected_splits2] _______ [gw0] linux -- Python 3.7.15 /opt/hostedtoolcache/Python/3.7.15/x64/bin/python path = 'paws', config_name = 'labeled_final' expected_splits = ['train', 'test', 'validation'] @pytest.mark.parametrize( "path, config_name, expected_splits", [ ("squad", "plain_text", ["train", "validation"]), ("dalle-mini/wit", "dalle-mini--wit", ["train"]), ("paws", "labeled_final", ["train", "test", "validation"]), ], ) def test_get_dataset_config_info(path, config_name, expected_splits): info = get_dataset_config_info(path, config_name=config_name) assert info.config_name == config_name > assert list(info.splits.keys()) == expected_splits E AssertionError: assert ['test', 'tra... 'validation'] == ['train', 'te... 'validation'] E At index 0 diff: 'test' != 'train' E Full diff: E - ['train', 'test', 'validation'] E + ['test', 'train', 'validation'] tests/test_inspect.py:45: AssertionError _ test_get_dataset_info[paws-expected_configs2-expected_splits_in_first_config2] _ [gw0] linux -- Python 3.7.15 /opt/hostedtoolcache/Python/3.7.15/x64/bin/python path = 'paws' expected_configs = ['labeled_final', 'labeled_swap', 'unlabeled_final'] expected_splits_in_first_config = ['train', 'test', 'validation'] @pytest.mark.parametrize( "path, expected_configs, expected_splits_in_first_config", [ ("squad", ["plain_text"], ["train", "validation"]), ("dalle-mini/wit", ["dalle-mini--wit"], ["train"]), ("paws", ["labeled_final", "labeled_swap", "unlabeled_final"], ["train", "test", "validation"]), ], ) def test_get_dataset_info(path, expected_configs, expected_splits_in_first_config): infos = get_dataset_infos(path) assert list(infos.keys()) == expected_configs expected_config = expected_configs[0] assert expected_config in infos info = infos[expected_config] assert info.config_name == expected_config > assert list(info.splits.keys()) == expected_splits_in_first_config E AssertionError: assert ['test', 'tra... 'validation'] == ['train', 'te... 'validation'] E At index 0 diff: 'test' != 'train' E Full diff: E - ['train', 'test', 'validation'] E + ['test', 'train', 'validation'] tests/test_inspect.py:90: AssertionError ______ test_get_dataset_split_names[paws-labeled_final-expected_splits2] _______ [gw0] linux -- Python 3.7.15 /opt/hostedtoolcache/Python/3.7.15/x64/bin/python path = 'paws', expected_config = 'labeled_final' expected_splits = ['train', 'test', 'validation'] @pytest.mark.parametrize( "path, expected_config, expected_splits", [ ("squad", "plain_text", ["train", "validation"]), ("dalle-mini/wit", "dalle-mini--wit", ["train"]), ("paws", "labeled_final", ["train", "test", "validation"]), ], ) def test_get_dataset_split_names(path, expected_config, expected_splits): infos = get_dataset_infos(path) assert expected_config in infos info = infos[expected_config] assert info.config_name == expected_config > assert list(info.splits.keys()) == expected_splits E AssertionError: assert ['test', 'tra... 'validation'] == ['train', 'te... 'validation'] E At index 0 diff: 'test' != 'train' E Full diff: E - ['train', 'test', 'validation'] E + ['test', 'train', 'validation'] ```
2023-02-16T09:11:10Z
https://github.com/huggingface/datasets/issues/5202
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5202/reactions" }
false
https://api.github.com/repos/huggingface/datasets/issues/5201/comments
https://api.github.com/repos/huggingface/datasets/issues/5201/timeline
2022-11-04T14:45:09Z
null
null
PR_kwDODunzps5CM0zn
closed
[]
false
5,201
{ "avatar_url": "https://avatars.githubusercontent.com/u/16348744?v=4", "events_url": "https://api.github.com/users/polinaeterna/events{/privacy}", "followers_url": "https://api.github.com/users/polinaeterna/followers", "following_url": "https://api.github.com/users/polinaeterna/following{/other_user}", "gists_url": "https://api.github.com/users/polinaeterna/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/polinaeterna", "id": 16348744, "login": "polinaeterna", "node_id": "MDQ6VXNlcjE2MzQ4NzQ0", "organizations_url": "https://api.github.com/users/polinaeterna/orgs", "received_events_url": "https://api.github.com/users/polinaeterna/received_events", "repos_url": "https://api.github.com/users/polinaeterna/repos", "site_admin": false, "starred_url": "https://api.github.com/users/polinaeterna/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/polinaeterna/subscriptions", "type": "User", "url": "https://api.github.com/users/polinaeterna" }
Do not sort splits in dataset info
https://api.github.com/repos/huggingface/datasets/issues/5201/events
null
https://api.github.com/repos/huggingface/datasets/issues/5201/labels{/name}
2022-11-04T10:47:21Z
null
false
null
{ "diff_url": "https://github.com/huggingface/datasets/pull/5201.diff", "html_url": "https://github.com/huggingface/datasets/pull/5201", "merged_at": "2022-11-04T14:45:09Z", "patch_url": "https://github.com/huggingface/datasets/pull/5201.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/5201" }
1,435,881,554
[]
https://api.github.com/repos/huggingface/datasets/issues/5201
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
CONTRIBUTOR
I suggest not to sort splits by their names in dataset_info in README so that they are displayed in the order specified in the loading script. Otherwise `test` split is displayed first, see this repo: https://huggingface.co/datasets/paws What do you think? But I added sorting in tests to fix CI (for the same dataset).
2022-11-04T14:47:37Z
https://github.com/huggingface/datasets/pull/5201
{ "+1": 1, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 1, "url": "https://api.github.com/repos/huggingface/datasets/issues/5201/reactions" }
true
https://api.github.com/repos/huggingface/datasets/issues/5200/comments
https://api.github.com/repos/huggingface/datasets/issues/5200/timeline
2022-11-07T18:40:20Z
null
completed
I_kwDODunzps5VlQ0H
closed
[]
null
5,200
{ "avatar_url": "https://avatars.githubusercontent.com/u/16348744?v=4", "events_url": "https://api.github.com/users/polinaeterna/events{/privacy}", "followers_url": "https://api.github.com/users/polinaeterna/followers", "following_url": "https://api.github.com/users/polinaeterna/following{/other_user}", "gists_url": "https://api.github.com/users/polinaeterna/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/polinaeterna", "id": 16348744, "login": "polinaeterna", "node_id": "MDQ6VXNlcjE2MzQ4NzQ0", "organizations_url": "https://api.github.com/users/polinaeterna/orgs", "received_events_url": "https://api.github.com/users/polinaeterna/received_events", "repos_url": "https://api.github.com/users/polinaeterna/repos", "site_admin": false, "starred_url": "https://api.github.com/users/polinaeterna/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/polinaeterna/subscriptions", "type": "User", "url": "https://api.github.com/users/polinaeterna" }
Some links to canonical datasets in the docs are outdated
https://api.github.com/repos/huggingface/datasets/issues/5200/events
null
https://api.github.com/repos/huggingface/datasets/issues/5200/labels{/name}
2022-11-04T10:06:21Z
null
false
null
null
1,435,831,559
[ { "color": "0075ca", "default": true, "description": "Improvements or additions to documentation", "id": 1935892861, "name": "documentation", "node_id": "MDU6TGFiZWwxOTM1ODkyODYx", "url": "https://api.github.com/repos/huggingface/datasets/labels/documentation" } ]
https://api.github.com/repos/huggingface/datasets/issues/5200
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
CONTRIBUTOR
As we don't have canonical datasets in the github repo anymore, some old links to them doesn't work. I don't know how many of them are there, I found link to SuperGlue here: https://huggingface.co/docs/datasets/dataset_script#multiple-configurations, probably there are more of them. These links should be replaced by links to the corresponding datasets on the Hub.
2022-11-07T18:40:20Z
https://github.com/huggingface/datasets/issues/5200
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5200/reactions" }
false
https://api.github.com/repos/huggingface/datasets/issues/5199/comments
https://api.github.com/repos/huggingface/datasets/issues/5199/timeline
2022-11-04T13:59:47Z
null
null
PR_kwDODunzps5CJSv1
closed
[]
false
5,199
{ "avatar_url": "https://avatars.githubusercontent.com/u/47462742?v=4", "events_url": "https://api.github.com/users/mariosasko/events{/privacy}", "followers_url": "https://api.github.com/users/mariosasko/followers", "following_url": "https://api.github.com/users/mariosasko/following{/other_user}", "gists_url": "https://api.github.com/users/mariosasko/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/mariosasko", "id": 47462742, "login": "mariosasko", "node_id": "MDQ6VXNlcjQ3NDYyNzQy", "organizations_url": "https://api.github.com/users/mariosasko/orgs", "received_events_url": "https://api.github.com/users/mariosasko/received_events", "repos_url": "https://api.github.com/users/mariosasko/repos", "site_admin": false, "starred_url": "https://api.github.com/users/mariosasko/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/mariosasko/subscriptions", "type": "User", "url": "https://api.github.com/users/mariosasko" }
Deprecate dummy data generation command
https://api.github.com/repos/huggingface/datasets/issues/5199/events
null
https://api.github.com/repos/huggingface/datasets/issues/5199/labels{/name}
2022-11-03T15:05:54Z
null
false
null
{ "diff_url": "https://github.com/huggingface/datasets/pull/5199.diff", "html_url": "https://github.com/huggingface/datasets/pull/5199", "merged_at": "2022-11-04T13:59:47Z", "patch_url": "https://github.com/huggingface/datasets/pull/5199.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/5199" }
1,434,818,836
[]
https://api.github.com/repos/huggingface/datasets/issues/5199
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
CONTRIBUTOR
Deprecate the `dummy_data` CLI command.
2022-11-04T14:01:50Z
https://github.com/huggingface/datasets/pull/5199
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5199/reactions" }
true
https://api.github.com/repos/huggingface/datasets/issues/5198/comments
https://api.github.com/repos/huggingface/datasets/issues/5198/timeline
2022-11-04T12:46:01Z
null
null
PR_kwDODunzps5CI49J
closed
[ { "avatar_url": "https://avatars.githubusercontent.com/u/16348744?v=4", "events_url": "https://api.github.com/users/polinaeterna/events{/privacy}", "followers_url": "https://api.github.com/users/polinaeterna/followers", "following_url": "https://api.github.com/users/polinaeterna/following{/other_user}", "gists_url": "https://api.github.com/users/polinaeterna/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/polinaeterna", "id": 16348744, "login": "polinaeterna", "node_id": "MDQ6VXNlcjE2MzQ4NzQ0", "organizations_url": "https://api.github.com/users/polinaeterna/orgs", "received_events_url": "https://api.github.com/users/polinaeterna/received_events", "repos_url": "https://api.github.com/users/polinaeterna/repos", "site_admin": false, "starred_url": "https://api.github.com/users/polinaeterna/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/polinaeterna/subscriptions", "type": "User", "url": "https://api.github.com/users/polinaeterna" } ]
false
5,198
{ "avatar_url": "https://avatars.githubusercontent.com/u/16348744?v=4", "events_url": "https://api.github.com/users/polinaeterna/events{/privacy}", "followers_url": "https://api.github.com/users/polinaeterna/followers", "following_url": "https://api.github.com/users/polinaeterna/following{/other_user}", "gists_url": "https://api.github.com/users/polinaeterna/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/polinaeterna", "id": 16348744, "login": "polinaeterna", "node_id": "MDQ6VXNlcjE2MzQ4NzQ0", "organizations_url": "https://api.github.com/users/polinaeterna/orgs", "received_events_url": "https://api.github.com/users/polinaeterna/received_events", "repos_url": "https://api.github.com/users/polinaeterna/repos", "site_admin": false, "starred_url": "https://api.github.com/users/polinaeterna/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/polinaeterna/subscriptions", "type": "User", "url": "https://api.github.com/users/polinaeterna" }
Add note about the name of a dataset script
https://api.github.com/repos/huggingface/datasets/issues/5198/events
null
https://api.github.com/repos/huggingface/datasets/issues/5198/labels{/name}
2022-11-03T13:51:32Z
null
false
{ "avatar_url": "https://avatars.githubusercontent.com/u/16348744?v=4", "events_url": "https://api.github.com/users/polinaeterna/events{/privacy}", "followers_url": "https://api.github.com/users/polinaeterna/followers", "following_url": "https://api.github.com/users/polinaeterna/following{/other_user}", "gists_url": "https://api.github.com/users/polinaeterna/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/polinaeterna", "id": 16348744, "login": "polinaeterna", "node_id": "MDQ6VXNlcjE2MzQ4NzQ0", "organizations_url": "https://api.github.com/users/polinaeterna/orgs", "received_events_url": "https://api.github.com/users/polinaeterna/received_events", "repos_url": "https://api.github.com/users/polinaeterna/repos", "site_admin": false, "starred_url": "https://api.github.com/users/polinaeterna/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/polinaeterna/subscriptions", "type": "User", "url": "https://api.github.com/users/polinaeterna" }
{ "diff_url": "https://github.com/huggingface/datasets/pull/5198.diff", "html_url": "https://github.com/huggingface/datasets/pull/5198", "merged_at": "2022-11-04T12:46:01Z", "patch_url": "https://github.com/huggingface/datasets/pull/5198.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/5198" }
1,434,699,165
[]
https://api.github.com/repos/huggingface/datasets/issues/5198
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
CONTRIBUTOR
Add note that a dataset script should has the same name as a repo/dir, a bit related to this issue https://github.com/huggingface/datasets/issues/5193 also fixed two minor issues in audio docs (broken links)
2022-11-04T12:47:59Z
https://github.com/huggingface/datasets/pull/5198
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5198/reactions" }
true
https://api.github.com/repos/huggingface/datasets/issues/5197/comments
https://api.github.com/repos/huggingface/datasets/issues/5197/timeline
null
null
null
PR_kwDODunzps5CI0Ac
open
[]
false
5,197
{ "avatar_url": "https://avatars.githubusercontent.com/u/728699?v=4", "events_url": "https://api.github.com/users/reyoung/events{/privacy}", "followers_url": "https://api.github.com/users/reyoung/followers", "following_url": "https://api.github.com/users/reyoung/following{/other_user}", "gists_url": "https://api.github.com/users/reyoung/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/reyoung", "id": 728699, "login": "reyoung", "node_id": "MDQ6VXNlcjcyODY5OQ==", "organizations_url": "https://api.github.com/users/reyoung/orgs", "received_events_url": "https://api.github.com/users/reyoung/received_events", "repos_url": "https://api.github.com/users/reyoung/repos", "site_admin": false, "starred_url": "https://api.github.com/users/reyoung/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/reyoung/subscriptions", "type": "User", "url": "https://api.github.com/users/reyoung" }
[zstd] Use max window log size
https://api.github.com/repos/huggingface/datasets/issues/5197/events
null
https://api.github.com/repos/huggingface/datasets/issues/5197/labels{/name}
2022-11-03T13:35:58Z
null
false
null
{ "diff_url": "https://github.com/huggingface/datasets/pull/5197.diff", "html_url": "https://github.com/huggingface/datasets/pull/5197", "merged_at": null, "patch_url": "https://github.com/huggingface/datasets/pull/5197.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/5197" }
1,434,676,150
[]
https://api.github.com/repos/huggingface/datasets/issues/5197
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
NONE
ZstdDecompressor has a parameter `max_window_size` to limit max memory usage when decompressing zstd files. The default `max_window_size` is not enough when files are compressed by `zstd --ultra` flags. Change `max_window_size` to the zstd's max window size. NOTE, the `zstd.WINDOWLOG_MAX` is the log_2 value of the max window size.
2022-11-03T13:45:19Z
https://github.com/huggingface/datasets/pull/5197
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5197/reactions" }
true
https://api.github.com/repos/huggingface/datasets/issues/5196/comments
https://api.github.com/repos/huggingface/datasets/issues/5196/timeline
2022-11-09T07:15:12Z
null
null
PR_kwDODunzps5CH439
closed
[]
false
5,196
{ "avatar_url": "https://avatars.githubusercontent.com/u/8515462?v=4", "events_url": "https://api.github.com/users/albertvillanova/events{/privacy}", "followers_url": "https://api.github.com/users/albertvillanova/followers", "following_url": "https://api.github.com/users/albertvillanova/following{/other_user}", "gists_url": "https://api.github.com/users/albertvillanova/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/albertvillanova", "id": 8515462, "login": "albertvillanova", "node_id": "MDQ6VXNlcjg1MTU0NjI=", "organizations_url": "https://api.github.com/users/albertvillanova/orgs", "received_events_url": "https://api.github.com/users/albertvillanova/received_events", "repos_url": "https://api.github.com/users/albertvillanova/repos", "site_admin": false, "starred_url": "https://api.github.com/users/albertvillanova/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/albertvillanova/subscriptions", "type": "User", "url": "https://api.github.com/users/albertvillanova" }
Use hfh hf_hub_url function
https://api.github.com/repos/huggingface/datasets/issues/5196/events
null
https://api.github.com/repos/huggingface/datasets/issues/5196/labels{/name}
2022-11-03T10:08:09Z
null
false
null
{ "diff_url": "https://github.com/huggingface/datasets/pull/5196.diff", "html_url": "https://github.com/huggingface/datasets/pull/5196", "merged_at": "2022-11-09T07:15:12Z", "patch_url": "https://github.com/huggingface/datasets/pull/5196.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/5196" }
1,434,401,646
[]
https://api.github.com/repos/huggingface/datasets/issues/5196
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
MEMBER
Small refactoring to use `hf_hub_url` function from `huggingface_hub`. This PR also creates the `hub` module that will contain all `huggingface_hub` functionalities relevant to `datasets`. This is a necessary stage before implementing the use of the `hfh` caching system (which uses its `hf_hub_url` under the hood). EDIT: ~~Finally, we use our `config.HUB_DATASETS_URL` when using `hfh.hf_hub_url`~~ There is a breaking change: the `hfh` `hf_hub_url` function uses - `hfh` `HUGGINGFACE_CO_URL_TEMPLATE` URL template, different from the `datasets` `config.HUB_DATASETS_URL` - also, `hfh` `DEFAULT_REVISION`, instead of `datasets` `config.HUB_DEFAULT_VERSION`
2022-12-06T11:38:17Z
https://github.com/huggingface/datasets/pull/5196
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5196/reactions" }
true
https://api.github.com/repos/huggingface/datasets/issues/5195/comments
https://api.github.com/repos/huggingface/datasets/issues/5195/timeline
2023-04-04T15:10:33Z
null
null
PR_kwDODunzps5CHhF2
closed
[]
false
5,195
{ "avatar_url": "https://avatars.githubusercontent.com/u/11827707?v=4", "events_url": "https://api.github.com/users/mishig25/events{/privacy}", "followers_url": "https://api.github.com/users/mishig25/followers", "following_url": "https://api.github.com/users/mishig25/following{/other_user}", "gists_url": "https://api.github.com/users/mishig25/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/mishig25", "id": 11827707, "login": "mishig25", "node_id": "MDQ6VXNlcjExODI3NzA3", "organizations_url": "https://api.github.com/users/mishig25/orgs", "received_events_url": "https://api.github.com/users/mishig25/received_events", "repos_url": "https://api.github.com/users/mishig25/repos", "site_admin": false, "starred_url": "https://api.github.com/users/mishig25/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/mishig25/subscriptions", "type": "User", "url": "https://api.github.com/users/mishig25" }
[wip testing docs]
https://api.github.com/repos/huggingface/datasets/issues/5195/events
null
https://api.github.com/repos/huggingface/datasets/issues/5195/labels{/name}
2022-11-03T08:37:34Z
null
false
null
{ "diff_url": "https://github.com/huggingface/datasets/pull/5195.diff", "html_url": "https://github.com/huggingface/datasets/pull/5195", "merged_at": null, "patch_url": "https://github.com/huggingface/datasets/pull/5195.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/5195" }
1,434,290,689
[]
https://api.github.com/repos/huggingface/datasets/issues/5195
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
CONTRIBUTOR
null
2023-04-04T15:10:37Z
https://github.com/huggingface/datasets/pull/5195
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5195/reactions" }
true
https://api.github.com/repos/huggingface/datasets/issues/5194/comments
https://api.github.com/repos/huggingface/datasets/issues/5194/timeline
2022-11-03T13:29:21Z
null
null
PR_kwDODunzps5CHPNY
closed
[]
false
5,194
{ "avatar_url": "https://avatars.githubusercontent.com/u/8515462?v=4", "events_url": "https://api.github.com/users/albertvillanova/events{/privacy}", "followers_url": "https://api.github.com/users/albertvillanova/followers", "following_url": "https://api.github.com/users/albertvillanova/following{/other_user}", "gists_url": "https://api.github.com/users/albertvillanova/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/albertvillanova", "id": 8515462, "login": "albertvillanova", "node_id": "MDQ6VXNlcjg1MTU0NjI=", "organizations_url": "https://api.github.com/users/albertvillanova/orgs", "received_events_url": "https://api.github.com/users/albertvillanova/received_events", "repos_url": "https://api.github.com/users/albertvillanova/repos", "site_admin": false, "starred_url": "https://api.github.com/users/albertvillanova/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/albertvillanova/subscriptions", "type": "User", "url": "https://api.github.com/users/albertvillanova" }
Fix docs about dataset_info in YAML
https://api.github.com/repos/huggingface/datasets/issues/5194/events
null
https://api.github.com/repos/huggingface/datasets/issues/5194/labels{/name}
2022-11-03T07:10:23Z
null
false
null
{ "diff_url": "https://github.com/huggingface/datasets/pull/5194.diff", "html_url": "https://github.com/huggingface/datasets/pull/5194", "merged_at": "2022-11-03T13:29:21Z", "patch_url": "https://github.com/huggingface/datasets/pull/5194.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/5194" }
1,434,206,951
[]
https://api.github.com/repos/huggingface/datasets/issues/5194
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
MEMBER
This PR fixes some misalignment in the docs after we transferred the dataset_info from `dataset_infos.json` to YAML in the dataset card: - #4926 Related to: - #5193
2022-11-03T13:31:27Z
https://github.com/huggingface/datasets/pull/5194
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5194/reactions" }
true
https://api.github.com/repos/huggingface/datasets/issues/5193/comments
https://api.github.com/repos/huggingface/datasets/issues/5193/timeline
2022-11-03T13:35:44Z
null
completed
I_kwDODunzps5Vd1SE
closed
[]
null
5,193
{ "avatar_url": "https://avatars.githubusercontent.com/u/20109584?v=4", "events_url": "https://api.github.com/users/lambda-science/events{/privacy}", "followers_url": "https://api.github.com/users/lambda-science/followers", "following_url": "https://api.github.com/users/lambda-science/following{/other_user}", "gists_url": "https://api.github.com/users/lambda-science/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/lambda-science", "id": 20109584, "login": "lambda-science", "node_id": "MDQ6VXNlcjIwMTA5NTg0", "organizations_url": "https://api.github.com/users/lambda-science/orgs", "received_events_url": "https://api.github.com/users/lambda-science/received_events", "repos_url": "https://api.github.com/users/lambda-science/repos", "site_admin": false, "starred_url": "https://api.github.com/users/lambda-science/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/lambda-science/subscriptions", "type": "User", "url": "https://api.github.com/users/lambda-science" }
"One or several metadata. were found, but not in the same directory or in a parent directory"
https://api.github.com/repos/huggingface/datasets/issues/5193/events
null
https://api.github.com/repos/huggingface/datasets/issues/5193/labels{/name}
2022-11-02T22:46:25Z
null
false
null
null
1,433,883,780
[]
https://api.github.com/repos/huggingface/datasets/issues/5193
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
NONE
### Describe the bug When loading my own dataset, on loading it I get an error. Here is my dataset link: https://huggingface.co/datasets/corentinm7/MyoQuant-SDH-Data And the error after loading with: ```python from datasets import load_dataset load_dataset("corentinm7/MyoQuant-SDH-Data") ``` ```python Downloading readme: 100%|████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 3.34k/3.34k [00:00<00:00, 4.45MB/s] Using custom data configuration SDH_16k-53e7301a92ab0025 Downloading and preparing dataset None/SDH_16k to /home/corentin/.cache/huggingface/datasets/corentinm7___imagefolder/SDH_16k-53e7301a92ab0025/0.0.0/37fbb85cc714a338bea574ac6c7d0b5be5aff46c1862c1989b20e0771199e93f... Downloading data: 100%|██████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 3.28M/3.28M [00:00<00:00, 4.31MB/s] Downloading data files: 100%|███████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 1/1 [00:01<00:00, 1.75s/it] Downloading data: 100%|██████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 1.13G/1.13G [00:15<00:00, 74.3MB/s] Downloading data files: 100%|███████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 1/1 [00:16<00:00, 16.09s/it] Extracting data files: 100%|████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 1/1 [00:13<00:00, 13.16s/it] Traceback (most recent call last): File "<stdin>", line 1, in <module> File "/home/corentin/code-project/hugging_face_play/.venv/lib/python3.10/site-packages/datasets/load.py", line 1742, in load_dataset builder_instance.download_and_prepare( File "/home/corentin/code-project/hugging_face_play/.venv/lib/python3.10/site-packages/datasets/builder.py", line 814, in download_and_prepare self._download_and_prepare( File "/home/corentin/code-project/hugging_face_play/.venv/lib/python3.10/site-packages/datasets/builder.py", line 1423, in _download_and_prepare super()._download_and_prepare( File "/home/corentin/code-project/hugging_face_play/.venv/lib/python3.10/site-packages/datasets/builder.py", line 905, in _download_and_prepare self._prepare_split(split_generator, **prepare_split_kwargs) File "/home/corentin/code-project/hugging_face_play/.venv/lib/python3.10/site-packages/datasets/builder.py", line 1374, in _prepare_split for key, record in logging.tqdm( File "/home/corentin/code-project/hugging_face_play/.venv/lib/python3.10/site-packages/tqdm/std.py", line 1195, in __iter__ for obj in iterable: File "/home/corentin/code-project/hugging_face_play/.venv/lib/python3.10/site-packages/datasets/packaged_modules/folder_based_builder/folder_based_builder.py", line 394, in _generate_examples raise ValueError( ValueError: One or several metadata. were found, but not in the same directory or in a parent directory of /home/corentin/.cache/huggingface/datasets/downloads/extracted/60c4aa8d4da3065bb3d310de4373dffd73bd4dc331aedcb4ee867febe4fdb7cd/validation/sick/2_CG_SDH_TAM_Bin1cKO_ko_pla_4_1640.tif. ``` However the test command is working fine. ```datasets-cli test hugging_face_play/ds_test/SDH_16k.py --save_info --all_configs --force_redownload``` ``` Using custom data configuration SDH_16k Testing builder 'SDH_16k' (1/1) Downloading and preparing dataset sdh_16k/SDH_16k to /home/corentin/.cache/huggingface/datasets/sdh_16k/SDH_16k/1.0.0/21b584239a638aeeda33cba1ac2ca4869d48e4b4f20fb22274d5a5ddc487659d... Downloading data: 100%|██████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 1.13G/1.13G [00:14<00:00, 76.5MB/s] Downloading data files: 100%|███████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 1/1 [00:15<00:00, 15.66s/it] Downloading data: 100%|██████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 3.28M/3.28M [00:02<00:00, 1.44MB/s] Downloading data files: 100%|███████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 1/1 [00:03<00:00, 3.21s/it] Downloading data files: 100%|████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 1/1 [00:00<00:00, 11586.48it/s] Extracting data files: 100%|████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 1/1 [00:13<00:00, 13.42s/it] Dataset sdh_16k downloaded and prepared to /home/corentin/.cache/huggingface/datasets/sdh_16k/SDH_16k/1.0.0/21b584239a638aeeda33cba1ac2ca4869d48e4b4f20fb22274d5a5ddc487659d. Subsequent calls will reuse this data. 100%|██████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 3/3 [00:00<00:00, 605.27it/s] Dataset card saved at hugging_face_play/ds_test/README.md Test successful. ``` ### Steps to reproduce the bug Simply run on python ```python from datasets import load_dataset load_dataset("corentinm7/MyoQuant-SDH-Data") ``` ### Expected behavior As the test command worked, this error should not appear ### Environment info - `datasets` version: 2.6.1 - Platform: Linux-5.10.16.3-microsoft-standard-WSL2-x86_64-with-glibc2.31 - Python version: 3.10.6 - PyArrow version: 10.0.0 - Pandas version: 1.5.1
2022-11-03T13:39:16Z
https://github.com/huggingface/datasets/issues/5193
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5193/reactions" }
false
https://api.github.com/repos/huggingface/datasets/issues/5192/comments
https://api.github.com/repos/huggingface/datasets/issues/5192/timeline
2022-11-15T16:31:07Z
null
null
PR_kwDODunzps5CD2BQ
closed
[ { "avatar_url": "https://avatars.githubusercontent.com/u/16348744?v=4", "events_url": "https://api.github.com/users/polinaeterna/events{/privacy}", "followers_url": "https://api.github.com/users/polinaeterna/followers", "following_url": "https://api.github.com/users/polinaeterna/following{/other_user}", "gists_url": "https://api.github.com/users/polinaeterna/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/polinaeterna", "id": 16348744, "login": "polinaeterna", "node_id": "MDQ6VXNlcjE2MzQ4NzQ0", "organizations_url": "https://api.github.com/users/polinaeterna/orgs", "received_events_url": "https://api.github.com/users/polinaeterna/received_events", "repos_url": "https://api.github.com/users/polinaeterna/repos", "site_admin": false, "starred_url": "https://api.github.com/users/polinaeterna/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/polinaeterna/subscriptions", "type": "User", "url": "https://api.github.com/users/polinaeterna" } ]
false
5,192
{ "avatar_url": "https://avatars.githubusercontent.com/u/16348744?v=4", "events_url": "https://api.github.com/users/polinaeterna/events{/privacy}", "followers_url": "https://api.github.com/users/polinaeterna/followers", "following_url": "https://api.github.com/users/polinaeterna/following{/other_user}", "gists_url": "https://api.github.com/users/polinaeterna/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/polinaeterna", "id": 16348744, "login": "polinaeterna", "node_id": "MDQ6VXNlcjE2MzQ4NzQ0", "organizations_url": "https://api.github.com/users/polinaeterna/orgs", "received_events_url": "https://api.github.com/users/polinaeterna/received_events", "repos_url": "https://api.github.com/users/polinaeterna/repos", "site_admin": false, "starred_url": "https://api.github.com/users/polinaeterna/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/polinaeterna/subscriptions", "type": "User", "url": "https://api.github.com/users/polinaeterna" }
Drop labels in Image and Audio folders if files are on different levels in directory or if there is only one label
https://api.github.com/repos/huggingface/datasets/issues/5192/events
null
https://api.github.com/repos/huggingface/datasets/issues/5192/labels{/name}
2022-11-02T14:01:41Z
null
false
{ "avatar_url": "https://avatars.githubusercontent.com/u/16348744?v=4", "events_url": "https://api.github.com/users/polinaeterna/events{/privacy}", "followers_url": "https://api.github.com/users/polinaeterna/followers", "following_url": "https://api.github.com/users/polinaeterna/following{/other_user}", "gists_url": "https://api.github.com/users/polinaeterna/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/polinaeterna", "id": 16348744, "login": "polinaeterna", "node_id": "MDQ6VXNlcjE2MzQ4NzQ0", "organizations_url": "https://api.github.com/users/polinaeterna/orgs", "received_events_url": "https://api.github.com/users/polinaeterna/received_events", "repos_url": "https://api.github.com/users/polinaeterna/repos", "site_admin": false, "starred_url": "https://api.github.com/users/polinaeterna/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/polinaeterna/subscriptions", "type": "User", "url": "https://api.github.com/users/polinaeterna" }
{ "diff_url": "https://github.com/huggingface/datasets/pull/5192.diff", "html_url": "https://github.com/huggingface/datasets/pull/5192", "merged_at": "2022-11-15T16:31:07Z", "patch_url": "https://github.com/huggingface/datasets/pull/5192.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/5192" }
1,433,199,790
[ { "color": "d73a4a", "default": true, "description": "Something isn't working", "id": 1935892857, "name": "bug", "node_id": "MDU6TGFiZWwxOTM1ODkyODU3", "url": "https://api.github.com/repos/huggingface/datasets/labels/bug" } ]
https://api.github.com/repos/huggingface/datasets/issues/5192
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
CONTRIBUTOR
Will close https://github.com/huggingface/datasets/issues/5153 Drop labels by default (`drop_labels=None`) when: * there are files on different levels of directory hierarchy by checking their path depth * all files are in the same directory (=only one label was inferred) First one fixes cases like this: ``` repo image3.jpg image4.jpg data image1.jpg image2.jpg ``` Second one fixes cases like this: ``` repo image1.jpg image2.jpg image3.jpg ``` This is mostly to fix the viewer for people who just drop images in the Hub interface into the root dir. I added tests for both of the cases on local and remote files. **I also changed data files for old test on drop_labels** (`test_generate_examples_drop_labels`). The files I provide to `test_generate_examples_drop_labels` now has "canonical" classification structure (two dirs) in order not to change the logic of the test (=not to check these two cases addressed in the PR).
2022-11-15T16:32:53Z
https://github.com/huggingface/datasets/pull/5192
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5192/reactions" }
true
https://api.github.com/repos/huggingface/datasets/issues/5191/comments
https://api.github.com/repos/huggingface/datasets/issues/5191/timeline
2022-11-02T17:18:42Z
null
null
PR_kwDODunzps5CD0Qp
closed
[]
false
5,191
{ "avatar_url": "https://avatars.githubusercontent.com/u/47462742?v=4", "events_url": "https://api.github.com/users/mariosasko/events{/privacy}", "followers_url": "https://api.github.com/users/mariosasko/followers", "following_url": "https://api.github.com/users/mariosasko/following{/other_user}", "gists_url": "https://api.github.com/users/mariosasko/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/mariosasko", "id": 47462742, "login": "mariosasko", "node_id": "MDQ6VXNlcjQ3NDYyNzQy", "organizations_url": "https://api.github.com/users/mariosasko/orgs", "received_events_url": "https://api.github.com/users/mariosasko/received_events", "repos_url": "https://api.github.com/users/mariosasko/repos", "site_admin": false, "starred_url": "https://api.github.com/users/mariosasko/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/mariosasko/subscriptions", "type": "User", "url": "https://api.github.com/users/mariosasko" }
Make torch.Tensor and spacy models cacheable
https://api.github.com/repos/huggingface/datasets/issues/5191/events
null
https://api.github.com/repos/huggingface/datasets/issues/5191/labels{/name}
2022-11-02T13:56:18Z
null
false
null
{ "diff_url": "https://github.com/huggingface/datasets/pull/5191.diff", "html_url": "https://github.com/huggingface/datasets/pull/5191", "merged_at": "2022-11-02T17:18:42Z", "patch_url": "https://github.com/huggingface/datasets/pull/5191.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/5191" }
1,433,191,658
[]
https://api.github.com/repos/huggingface/datasets/issues/5191
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
CONTRIBUTOR
Override `Pickler.save` to implement deterministic reduction (lazily registered; inspired by https://github.com/uqfoundation/dill/blob/master/dill/_dill.py#L343) functions for `torch.Tensor` and spaCy models. Fix https://github.com/huggingface/datasets/issues/5170, fix https://github.com/huggingface/datasets/issues/3178
2022-11-02T17:20:48Z
https://github.com/huggingface/datasets/pull/5191
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5191/reactions" }
true
https://api.github.com/repos/huggingface/datasets/issues/5190/comments
https://api.github.com/repos/huggingface/datasets/issues/5190/timeline
2022-11-02T12:55:02Z
null
completed
I_kwDODunzps5VahFi
closed
[]
null
5,190
{ "avatar_url": "https://avatars.githubusercontent.com/u/26859204?v=4", "events_url": "https://api.github.com/users/lewtun/events{/privacy}", "followers_url": "https://api.github.com/users/lewtun/followers", "following_url": "https://api.github.com/users/lewtun/following{/other_user}", "gists_url": "https://api.github.com/users/lewtun/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/lewtun", "id": 26859204, "login": "lewtun", "node_id": "MDQ6VXNlcjI2ODU5MjA0", "organizations_url": "https://api.github.com/users/lewtun/orgs", "received_events_url": "https://api.github.com/users/lewtun/received_events", "repos_url": "https://api.github.com/users/lewtun/repos", "site_admin": false, "starred_url": "https://api.github.com/users/lewtun/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/lewtun/subscriptions", "type": "User", "url": "https://api.github.com/users/lewtun" }
`path` is `None` when downloading a custom audio dataset from the Hub
https://api.github.com/repos/huggingface/datasets/issues/5190/events
null
https://api.github.com/repos/huggingface/datasets/issues/5190/labels{/name}
2022-11-02T11:51:25Z
null
false
null
null
1,433,014,626
[]
https://api.github.com/repos/huggingface/datasets/issues/5190
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
MEMBER
### Describe the bug I've created an [audio dataset](https://huggingface.co/datasets/lewtun/audio-test-push) using the `audiofolder` feature desribed in the [docs](https://huggingface.co/docs/datasets/audio_dataset#audiofolder) and then pushed it to the Hub. Locally, I can see the `audio.path` feature is of the expected form `path/to/data_dir`, but when I download the dataset from the Hub, I see `audio.path` is `None` Here's an example: ```python from datasets import load_dataset ds = load_dataset("lewtun/audio-test-push") ds["train"][0] # { # "audio": { # "path": None, <-- Is this expected? # "array": array( # [ # 3.97140226e-07, # 7.30310290e-07, # 7.56406735e-07, # ..., # -1.19636677e-01, # -1.16811886e-01, # -1.12441722e-01, # ] # ), # "sampling_rate": 44100, # }, # "song_id": 0, # "genre_id": 0, # "genre": "Electronic", # } ``` Is this expected behaviour? If yes, feel free to close this issue as it's not a true bug then :) ### Steps to reproduce the bug 1. Create an audio dataset with the `audiofolder` feature 2. Push the dataset to the Hub with `push_to_hub()` 3. Download the Hub dataset and inspect the `audio.path` feature ### Expected behavior `audio.path` points to the file associated with the audio data ### Environment info - `datasets` version: 2.6.2.dev0 - Platform: macOS-10.16-x86_64-i386-64bit - Python version: 3.8.13 - PyArrow version: 9.0.0 - Pandas version: 1.5.1
2022-11-02T12:55:02Z
https://github.com/huggingface/datasets/issues/5190
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5190/reactions" }
false
https://api.github.com/repos/huggingface/datasets/issues/5189/comments
https://api.github.com/repos/huggingface/datasets/issues/5189/timeline
null
null
null
I_kwDODunzps5VZlJ3
open
[ { "avatar_url": "https://avatars.githubusercontent.com/u/42851186?v=4", "events_url": "https://api.github.com/users/lhoestq/events{/privacy}", "followers_url": "https://api.github.com/users/lhoestq/followers", "following_url": "https://api.github.com/users/lhoestq/following{/other_user}", "gists_url": "https://api.github.com/users/lhoestq/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/lhoestq", "id": 42851186, "login": "lhoestq", "node_id": "MDQ6VXNlcjQyODUxMTg2", "organizations_url": "https://api.github.com/users/lhoestq/orgs", "received_events_url": "https://api.github.com/users/lhoestq/received_events", "repos_url": "https://api.github.com/users/lhoestq/repos", "site_admin": false, "starred_url": "https://api.github.com/users/lhoestq/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/lhoestq/subscriptions", "type": "User", "url": "https://api.github.com/users/lhoestq" } ]
null
5,189
{ "avatar_url": "https://avatars.githubusercontent.com/u/53175384?v=4", "events_url": "https://api.github.com/users/merveenoyan/events{/privacy}", "followers_url": "https://api.github.com/users/merveenoyan/followers", "following_url": "https://api.github.com/users/merveenoyan/following{/other_user}", "gists_url": "https://api.github.com/users/merveenoyan/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/merveenoyan", "id": 53175384, "login": "merveenoyan", "node_id": "MDQ6VXNlcjUzMTc1Mzg0", "organizations_url": "https://api.github.com/users/merveenoyan/orgs", "received_events_url": "https://api.github.com/users/merveenoyan/received_events", "repos_url": "https://api.github.com/users/merveenoyan/repos", "site_admin": false, "starred_url": "https://api.github.com/users/merveenoyan/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/merveenoyan/subscriptions", "type": "User", "url": "https://api.github.com/users/merveenoyan" }
Reduce friction in tabular dataset workflow by eliminating having splits when dataset is loaded
https://api.github.com/repos/huggingface/datasets/issues/5189/events
null
https://api.github.com/repos/huggingface/datasets/issues/5189/labels{/name}
2022-11-02T09:15:02Z
null
false
{ "avatar_url": "https://avatars.githubusercontent.com/u/42851186?v=4", "events_url": "https://api.github.com/users/lhoestq/events{/privacy}", "followers_url": "https://api.github.com/users/lhoestq/followers", "following_url": "https://api.github.com/users/lhoestq/following{/other_user}", "gists_url": "https://api.github.com/users/lhoestq/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/lhoestq", "id": 42851186, "login": "lhoestq", "node_id": "MDQ6VXNlcjQyODUxMTg2", "organizations_url": "https://api.github.com/users/lhoestq/orgs", "received_events_url": "https://api.github.com/users/lhoestq/received_events", "repos_url": "https://api.github.com/users/lhoestq/repos", "site_admin": false, "starred_url": "https://api.github.com/users/lhoestq/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/lhoestq/subscriptions", "type": "User", "url": "https://api.github.com/users/lhoestq" }
null
1,432,769,143
[ { "color": "a2eeef", "default": true, "description": "New feature or request", "id": 1935892871, "name": "enhancement", "node_id": "MDU6TGFiZWwxOTM1ODkyODcx", "url": "https://api.github.com/repos/huggingface/datasets/labels/enhancement" } ]
https://api.github.com/repos/huggingface/datasets/issues/5189
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
CONTRIBUTOR
### Feature request Sorry for cryptic name but I'd like to explain using code itself. When I want to load a specific dataset from a repository (for instance, this: https://huggingface.co/datasets/inria-soda/tabular-benchmark) ```python from datasets import load_dataset dataset = load_dataset("inria-soda/tabular-benchmark", data_files=["reg_cat/house_sales.csv"], streaming=True) print(next(iter(dataset["train"]))) ``` `datasets` library is essentially designed for people who'd like to use benchmark datasets on various modalities to fine-tune their models, and these benchmark datasets usually have pre-defined train and test splits. However, for tabular workflows, having train and test splits usually ends up model overfitting to validation split so usually the users would like to do validation techniques like `StratifiedKFoldCrossValidation` or when they tune for hyperparameters they do `GridSearchCrossValidation` so often the behavior is to create their own splits. Even [in this paper](https://hal.archives-ouvertes.fr/hal-03723551) a benchmark is introduced but the split is done by authors. It's a bit confusing for average tabular user to try and load a dataset and see `"train"` so it would be nice if we would not load dataset into a split called `train `by default. ```diff from datasets import load_dataset dataset = load_dataset("inria-soda/tabular-benchmark", data_files=["reg_cat/house_sales.csv"], streaming=True) -print(next(iter(dataset["train"]))) +print(next(iter(dataset))) ``` ### Motivation I explained it above 😅 ### Your contribution I think this is quite a big change that seems small (e.g. how to determine datasets that will not be load to train split?), it's best if we discuss first!
2022-12-06T12:13:17Z
https://github.com/huggingface/datasets/issues/5189
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5189/reactions" }
false
https://api.github.com/repos/huggingface/datasets/issues/5188/comments
https://api.github.com/repos/huggingface/datasets/issues/5188/timeline
2022-11-04T18:23:34Z
null
null
PR_kwDODunzps5CBaoQ
closed
[ { "avatar_url": "https://avatars.githubusercontent.com/u/22957388?v=4", "events_url": "https://api.github.com/users/sayakpaul/events{/privacy}", "followers_url": "https://api.github.com/users/sayakpaul/followers", "following_url": "https://api.github.com/users/sayakpaul/following{/other_user}", "gists_url": "https://api.github.com/users/sayakpaul/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/sayakpaul", "id": 22957388, "login": "sayakpaul", "node_id": "MDQ6VXNlcjIyOTU3Mzg4", "organizations_url": "https://api.github.com/users/sayakpaul/orgs", "received_events_url": "https://api.github.com/users/sayakpaul/received_events", "repos_url": "https://api.github.com/users/sayakpaul/repos", "site_admin": false, "starred_url": "https://api.github.com/users/sayakpaul/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/sayakpaul/subscriptions", "type": "User", "url": "https://api.github.com/users/sayakpaul" } ]
false
5,188
{ "avatar_url": "https://avatars.githubusercontent.com/u/22957388?v=4", "events_url": "https://api.github.com/users/sayakpaul/events{/privacy}", "followers_url": "https://api.github.com/users/sayakpaul/followers", "following_url": "https://api.github.com/users/sayakpaul/following{/other_user}", "gists_url": "https://api.github.com/users/sayakpaul/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/sayakpaul", "id": 22957388, "login": "sayakpaul", "node_id": "MDQ6VXNlcjIyOTU3Mzg4", "organizations_url": "https://api.github.com/users/sayakpaul/orgs", "received_events_url": "https://api.github.com/users/sayakpaul/received_events", "repos_url": "https://api.github.com/users/sayakpaul/repos", "site_admin": false, "starred_url": "https://api.github.com/users/sayakpaul/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/sayakpaul/subscriptions", "type": "User", "url": "https://api.github.com/users/sayakpaul" }
add: segmentation guide.
https://api.github.com/repos/huggingface/datasets/issues/5188/events
null
https://api.github.com/repos/huggingface/datasets/issues/5188/labels{/name}
2022-11-02T04:34:36Z
null
false
{ "avatar_url": "https://avatars.githubusercontent.com/u/22957388?v=4", "events_url": "https://api.github.com/users/sayakpaul/events{/privacy}", "followers_url": "https://api.github.com/users/sayakpaul/followers", "following_url": "https://api.github.com/users/sayakpaul/following{/other_user}", "gists_url": "https://api.github.com/users/sayakpaul/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/sayakpaul", "id": 22957388, "login": "sayakpaul", "node_id": "MDQ6VXNlcjIyOTU3Mzg4", "organizations_url": "https://api.github.com/users/sayakpaul/orgs", "received_events_url": "https://api.github.com/users/sayakpaul/received_events", "repos_url": "https://api.github.com/users/sayakpaul/repos", "site_admin": false, "starred_url": "https://api.github.com/users/sayakpaul/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/sayakpaul/subscriptions", "type": "User", "url": "https://api.github.com/users/sayakpaul" }
{ "diff_url": "https://github.com/huggingface/datasets/pull/5188.diff", "html_url": "https://github.com/huggingface/datasets/pull/5188", "merged_at": "2022-11-04T18:23:34Z", "patch_url": "https://github.com/huggingface/datasets/pull/5188.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/5188" }
1,432,477,139
[ { "color": "0075ca", "default": true, "description": "Improvements or additions to documentation", "id": 1935892861, "name": "documentation", "node_id": "MDU6TGFiZWwxOTM1ODkyODYx", "url": "https://api.github.com/repos/huggingface/datasets/labels/documentation" } ]
https://api.github.com/repos/huggingface/datasets/issues/5188
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
MEMBER
Closes #5181 I have opened a PR on Hub (https://huggingface.co/datasets/huggingface/documentation-images/discussions/5) to include the images in our central Hub repository. Once the PR is merged I will edit the image links. I have also prepared a [Colab Notebook](https://colab.research.google.com/drive/1BMDCfOTBnyshoME5RSxn5iQy-TWeFbOA?usp=sharing) in case anyone wants to play. - [x] Replace the image links
2022-11-04T18:25:57Z
https://github.com/huggingface/datasets/pull/5188
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5188/reactions" }
true
https://api.github.com/repos/huggingface/datasets/issues/5187/comments
https://api.github.com/repos/huggingface/datasets/issues/5187/timeline
2022-11-03T01:49:56Z
null
null
PR_kwDODunzps5CBE08
closed
[ { "avatar_url": "https://avatars.githubusercontent.com/u/22957388?v=4", "events_url": "https://api.github.com/users/sayakpaul/events{/privacy}", "followers_url": "https://api.github.com/users/sayakpaul/followers", "following_url": "https://api.github.com/users/sayakpaul/following{/other_user}", "gists_url": "https://api.github.com/users/sayakpaul/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/sayakpaul", "id": 22957388, "login": "sayakpaul", "node_id": "MDQ6VXNlcjIyOTU3Mzg4", "organizations_url": "https://api.github.com/users/sayakpaul/orgs", "received_events_url": "https://api.github.com/users/sayakpaul/received_events", "repos_url": "https://api.github.com/users/sayakpaul/repos", "site_admin": false, "starred_url": "https://api.github.com/users/sayakpaul/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/sayakpaul/subscriptions", "type": "User", "url": "https://api.github.com/users/sayakpaul" } ]
false
5,187
{ "avatar_url": "https://avatars.githubusercontent.com/u/22957388?v=4", "events_url": "https://api.github.com/users/sayakpaul/events{/privacy}", "followers_url": "https://api.github.com/users/sayakpaul/followers", "following_url": "https://api.github.com/users/sayakpaul/following{/other_user}", "gists_url": "https://api.github.com/users/sayakpaul/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/sayakpaul", "id": 22957388, "login": "sayakpaul", "node_id": "MDQ6VXNlcjIyOTU3Mzg4", "organizations_url": "https://api.github.com/users/sayakpaul/orgs", "received_events_url": "https://api.github.com/users/sayakpaul/received_events", "repos_url": "https://api.github.com/users/sayakpaul/repos", "site_admin": false, "starred_url": "https://api.github.com/users/sayakpaul/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/sayakpaul/subscriptions", "type": "User", "url": "https://api.github.com/users/sayakpaul" }
chore: add notebook links to img cls and obj det.
https://api.github.com/repos/huggingface/datasets/issues/5187/events
null
https://api.github.com/repos/huggingface/datasets/issues/5187/labels{/name}
2022-11-02T02:30:09Z
null
false
{ "avatar_url": "https://avatars.githubusercontent.com/u/22957388?v=4", "events_url": "https://api.github.com/users/sayakpaul/events{/privacy}", "followers_url": "https://api.github.com/users/sayakpaul/followers", "following_url": "https://api.github.com/users/sayakpaul/following{/other_user}", "gists_url": "https://api.github.com/users/sayakpaul/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/sayakpaul", "id": 22957388, "login": "sayakpaul", "node_id": "MDQ6VXNlcjIyOTU3Mzg4", "organizations_url": "https://api.github.com/users/sayakpaul/orgs", "received_events_url": "https://api.github.com/users/sayakpaul/received_events", "repos_url": "https://api.github.com/users/sayakpaul/repos", "site_admin": false, "starred_url": "https://api.github.com/users/sayakpaul/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/sayakpaul/subscriptions", "type": "User", "url": "https://api.github.com/users/sayakpaul" }
{ "diff_url": "https://github.com/huggingface/datasets/pull/5187.diff", "html_url": "https://github.com/huggingface/datasets/pull/5187", "merged_at": "2022-11-03T01:49:56Z", "patch_url": "https://github.com/huggingface/datasets/pull/5187.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/5187" }
1,432,375,375
[ { "color": "a2eeef", "default": true, "description": "New feature or request", "id": 1935892871, "name": "enhancement", "node_id": "MDU6TGFiZWwxOTM1ODkyODcx", "url": "https://api.github.com/repos/huggingface/datasets/labels/enhancement" } ]
https://api.github.com/repos/huggingface/datasets/issues/5187
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
MEMBER
Closes https://github.com/huggingface/datasets/issues/5182
2022-11-03T01:52:24Z
https://github.com/huggingface/datasets/pull/5187
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5187/reactions" }
true
https://api.github.com/repos/huggingface/datasets/issues/5186/comments
https://api.github.com/repos/huggingface/datasets/issues/5186/timeline
2022-11-15T18:24:39Z
null
completed
I_kwDODunzps5VW0XT
closed
[ { "avatar_url": "https://avatars.githubusercontent.com/u/47462742?v=4", "events_url": "https://api.github.com/users/mariosasko/events{/privacy}", "followers_url": "https://api.github.com/users/mariosasko/followers", "following_url": "https://api.github.com/users/mariosasko/following{/other_user}", "gists_url": "https://api.github.com/users/mariosasko/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/mariosasko", "id": 47462742, "login": "mariosasko", "node_id": "MDQ6VXNlcjQ3NDYyNzQy", "organizations_url": "https://api.github.com/users/mariosasko/orgs", "received_events_url": "https://api.github.com/users/mariosasko/received_events", "repos_url": "https://api.github.com/users/mariosasko/repos", "site_admin": false, "starred_url": "https://api.github.com/users/mariosasko/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/mariosasko/subscriptions", "type": "User", "url": "https://api.github.com/users/mariosasko" } ]
null
5,186
{ "avatar_url": "https://avatars.githubusercontent.com/u/32437151?v=4", "events_url": "https://api.github.com/users/nateraw/events{/privacy}", "followers_url": "https://api.github.com/users/nateraw/followers", "following_url": "https://api.github.com/users/nateraw/following{/other_user}", "gists_url": "https://api.github.com/users/nateraw/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/nateraw", "id": 32437151, "login": "nateraw", "node_id": "MDQ6VXNlcjMyNDM3MTUx", "organizations_url": "https://api.github.com/users/nateraw/orgs", "received_events_url": "https://api.github.com/users/nateraw/received_events", "repos_url": "https://api.github.com/users/nateraw/repos", "site_admin": false, "starred_url": "https://api.github.com/users/nateraw/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/nateraw/subscriptions", "type": "User", "url": "https://api.github.com/users/nateraw" }
Incorrect error message when Dataset.from_sql fails and sqlalchemy not installed
https://api.github.com/repos/huggingface/datasets/issues/5186/events
null
https://api.github.com/repos/huggingface/datasets/issues/5186/labels{/name}
2022-11-01T20:25:51Z
null
false
{ "avatar_url": "https://avatars.githubusercontent.com/u/47462742?v=4", "events_url": "https://api.github.com/users/mariosasko/events{/privacy}", "followers_url": "https://api.github.com/users/mariosasko/followers", "following_url": "https://api.github.com/users/mariosasko/following{/other_user}", "gists_url": "https://api.github.com/users/mariosasko/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/mariosasko", "id": 47462742, "login": "mariosasko", "node_id": "MDQ6VXNlcjQ3NDYyNzQy", "organizations_url": "https://api.github.com/users/mariosasko/orgs", "received_events_url": "https://api.github.com/users/mariosasko/received_events", "repos_url": "https://api.github.com/users/mariosasko/repos", "site_admin": false, "starred_url": "https://api.github.com/users/mariosasko/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/mariosasko/subscriptions", "type": "User", "url": "https://api.github.com/users/mariosasko" }
null
1,432,045,011
[]
https://api.github.com/repos/huggingface/datasets/issues/5186
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
CONTRIBUTOR
### Describe the bug When calling `Dataset.from_sql` (in my case, with sqlite3), it fails with a message ```ValueError: Please pass `features` or at least one example when writing data``` when I don't have `sqlalchemy` installed. ### Steps to reproduce the bug Make a new sqlite db with `sqlite3` and `pandas` from a remote [URL](https://raw.githubusercontent.com/nytimes/covid-19-data/master/us-states.csv). ```python import sqlite3 import pandas as pd from datasets import Dataset conn = sqlite3.connect('us_covid_data.db') df = pd.read_csv('https://raw.githubusercontent.com/nytimes/covid-19-data/master/us-states.csv') df.to_sql('states', conn, if_exists='replace') ``` Then if you try to query this DB like this: ```python ds = Dataset.from_sql('''SELECT * from states WHERE state=="New York";''', "sqlite:///us_covid_data.db") ``` You run into the error I described above: ```ValueError: Please pass `features` or at least one example when writing data``` However, if you try to pass features, as the error suggests, then you get an error that tells you the underlying problem... ```python from datasets import Dataset, Features, Value features = Features({ 'date': Value('date32'), 'label': Value('string'), 'fips': Value('int32'), 'cases': Value('int32'), 'deaths': Value('int32') }) ds = Dataset.from_sql( '''SELECT * from states WHERE state=="New York";''', "sqlite:///us_covid_data.db", features=features ) ``` Which results in the actual underlying error: `ImportError: Using URI string without sqlalchemy installed.` ### Expected behavior Instead of `ValueError` about needing to pass features, we should provide the actual underlying error about not having SQLAlchemy installed when it isn't found in the environment. ### Environment info - `datasets` version: 2.6.1 - Platform: macOS-10.16-x86_64-i386-64bit - Python version: 3.8.10 - PyArrow version: 10.0.0 - Pandas version: 1.2.5
2022-11-15T18:24:39Z
https://github.com/huggingface/datasets/issues/5186
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5186/reactions" }
false
https://api.github.com/repos/huggingface/datasets/issues/5185/comments
https://api.github.com/repos/huggingface/datasets/issues/5185/timeline
null
null
null
I_kwDODunzps5VWupr
open
[]
null
5,185
{ "avatar_url": "https://avatars.githubusercontent.com/u/48946947?v=4", "events_url": "https://api.github.com/users/sanderland/events{/privacy}", "followers_url": "https://api.github.com/users/sanderland/followers", "following_url": "https://api.github.com/users/sanderland/following{/other_user}", "gists_url": "https://api.github.com/users/sanderland/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/sanderland", "id": 48946947, "login": "sanderland", "node_id": "MDQ6VXNlcjQ4OTQ2OTQ3", "organizations_url": "https://api.github.com/users/sanderland/orgs", "received_events_url": "https://api.github.com/users/sanderland/received_events", "repos_url": "https://api.github.com/users/sanderland/repos", "site_admin": false, "starred_url": "https://api.github.com/users/sanderland/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/sanderland/subscriptions", "type": "User", "url": "https://api.github.com/users/sanderland" }
Allow passing a subset of output features to Dataset.map
https://api.github.com/repos/huggingface/datasets/issues/5185/events
null
https://api.github.com/repos/huggingface/datasets/issues/5185/labels{/name}
2022-11-01T20:07:20Z
null
false
null
null
1,432,021,611
[ { "color": "a2eeef", "default": true, "description": "New feature or request", "id": 1935892871, "name": "enhancement", "node_id": "MDU6TGFiZWwxOTM1ODkyODcx", "url": "https://api.github.com/repos/huggingface/datasets/labels/enhancement" } ]
https://api.github.com/repos/huggingface/datasets/issues/5185
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
CONTRIBUTOR
### Feature request Currently, map does one of two things to the features (if I'm not mistaken): * when you do not pass features, types are assumed to be equal to the input if they can be cast, and inferred otherwise * when you pass a full specification of features, output features are set to this However, sometimes you want to just pass some of the output types, particularly when the first of these modes makes an incorrect type. This currently crashes. ### Motivation To give a little background: this problem appears in converting labels to ids, where the labels happen to be floats rather than strings Consider the following use of map to convert from float to int ```python data = Dataset.from_dict({'y':[1.0,2.0,3.0]}) mapped = data.map(lambda r: {'y': int(r['y'])}) mapped['y'] # is floats, not ints ``` The result is a float again, since after the mapping operation it forces the old datatypes back on the data. Passing `features=Features({"y": Value(dtype="int64")})` to map works in principle, but then extending it a little to e.g. ```python def format_data(r): return {**tokenizer(r["text"]), "y": int(r["y"])} data = Dataset.from_dict({"y": [1.0, 2.0, 3.0], "text": ["one", "two", "three"]}) mapped = data.map( format_data, features=Features({'y': Value(dtype="int64")}), remove_columns=["text"], ) ``` Results in a crash in dataset internals, as it expects either all or no output features to be specified. Of course one can pass a full feature specification, but this becomes tokenizer specific and very awkward. ### Your contribution I've looked at `write_batch` and particularly `col_type = features[col] if features else None`, but checking for `col in features` here makes it fail elsewhere, but the structure makes it hard to understand how and why. I do not think I would have the time myself to get to the bottom of this anytime soon.
2022-11-01T20:07:34Z
https://github.com/huggingface/datasets/issues/5185
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5185/reactions" }
false
https://api.github.com/repos/huggingface/datasets/issues/5183/comments
https://api.github.com/repos/huggingface/datasets/issues/5183/timeline
2022-11-02T11:57:50Z
null
completed
I_kwDODunzps5VUbTS
closed
[]
null
5,183
{ "avatar_url": "https://avatars.githubusercontent.com/u/112555442?v=4", "events_url": "https://api.github.com/users/Taghreed7878/events{/privacy}", "followers_url": "https://api.github.com/users/Taghreed7878/followers", "following_url": "https://api.github.com/users/Taghreed7878/following{/other_user}", "gists_url": "https://api.github.com/users/Taghreed7878/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/Taghreed7878", "id": 112555442, "login": "Taghreed7878", "node_id": "U_kgDOBrV1sg", "organizations_url": "https://api.github.com/users/Taghreed7878/orgs", "received_events_url": "https://api.github.com/users/Taghreed7878/received_events", "repos_url": "https://api.github.com/users/Taghreed7878/repos", "site_admin": false, "starred_url": "https://api.github.com/users/Taghreed7878/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/Taghreed7878/subscriptions", "type": "User", "url": "https://api.github.com/users/Taghreed7878" }
Loading an external dataset in a format similar to conll2003
https://api.github.com/repos/huggingface/datasets/issues/5183/events
null
https://api.github.com/repos/huggingface/datasets/issues/5183/labels{/name}
2022-11-01T13:18:29Z
null
false
null
null
1,431,418,066
[]
https://api.github.com/repos/huggingface/datasets/issues/5183
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
NONE
I'm trying to load a custom dataset in a Dataset object, it's similar to conll2003 but with 2 columns only (word entity), I used the following script: features = datasets.Features( {"tokens": datasets.Sequence(datasets.Value("string")), "ner_tags": datasets.Sequence( datasets.features.ClassLabel( names=["B-PER", .... etc.]))} ) from datasets import Dataset INPUT_COLUMNS = "tokens ner_tags".split(" ") def read_conll(file): #all_labels = [] example = {col: [] for col in INPUT_COLUMNS} idx = 0 with open(file) as f: for line in f: if line: if line.startswith("-DOCSTART-") and example["tokens"] != []: print(idx, example) yield idx, example idx += 1 example = {col: [] for col in INPUT_COLUMNS} elif line == "\n" or (line.startswith("-DOCSTART-") and example["tokens"] == []): continue else: row_cols = line.split(" ") for i, col in enumerate(example): example[col] = row_cols[i].rstrip() dset = Dataset.from_generator(read_conll, gen_kwargs={"file": "/content/new_train.txt"}, features = features) The following error happened: [/usr/local/lib/python3.7/dist-packages/datasets/utils/py_utils.py](https://localhost:8080/#) in <genexpr>(.0) 285 for key in unique_values(itertools.chain(*dicts)): # set merge all keys 286 # Will raise KeyError if the dict don't have the same keys --> 287 yield key, tuple(d[key] for d in dicts) 288 TypeError: tuple indices must be integers or slices, not str What does this mean and what should I modify?
2022-11-02T11:57:50Z
https://github.com/huggingface/datasets/issues/5183
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5183/reactions" }
false
https://api.github.com/repos/huggingface/datasets/issues/5182/comments
https://api.github.com/repos/huggingface/datasets/issues/5182/timeline
2022-11-03T01:49:57Z
null
completed
I_kwDODunzps5VS8cr
closed
[ { "avatar_url": "https://avatars.githubusercontent.com/u/22957388?v=4", "events_url": "https://api.github.com/users/sayakpaul/events{/privacy}", "followers_url": "https://api.github.com/users/sayakpaul/followers", "following_url": "https://api.github.com/users/sayakpaul/following{/other_user}", "gists_url": "https://api.github.com/users/sayakpaul/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/sayakpaul", "id": 22957388, "login": "sayakpaul", "node_id": "MDQ6VXNlcjIyOTU3Mzg4", "organizations_url": "https://api.github.com/users/sayakpaul/orgs", "received_events_url": "https://api.github.com/users/sayakpaul/received_events", "repos_url": "https://api.github.com/users/sayakpaul/repos", "site_admin": false, "starred_url": "https://api.github.com/users/sayakpaul/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/sayakpaul/subscriptions", "type": "User", "url": "https://api.github.com/users/sayakpaul" } ]
null
5,182
{ "avatar_url": "https://avatars.githubusercontent.com/u/22957388?v=4", "events_url": "https://api.github.com/users/sayakpaul/events{/privacy}", "followers_url": "https://api.github.com/users/sayakpaul/followers", "following_url": "https://api.github.com/users/sayakpaul/following{/other_user}", "gists_url": "https://api.github.com/users/sayakpaul/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/sayakpaul", "id": 22957388, "login": "sayakpaul", "node_id": "MDQ6VXNlcjIyOTU3Mzg4", "organizations_url": "https://api.github.com/users/sayakpaul/orgs", "received_events_url": "https://api.github.com/users/sayakpaul/received_events", "repos_url": "https://api.github.com/users/sayakpaul/repos", "site_admin": false, "starred_url": "https://api.github.com/users/sayakpaul/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/sayakpaul/subscriptions", "type": "User", "url": "https://api.github.com/users/sayakpaul" }
Add notebook / other resource links to the task-specific data loading guides
https://api.github.com/repos/huggingface/datasets/issues/5182/events
null
https://api.github.com/repos/huggingface/datasets/issues/5182/labels{/name}
2022-11-01T07:57:26Z
null
false
{ "avatar_url": "https://avatars.githubusercontent.com/u/22957388?v=4", "events_url": "https://api.github.com/users/sayakpaul/events{/privacy}", "followers_url": "https://api.github.com/users/sayakpaul/followers", "following_url": "https://api.github.com/users/sayakpaul/following{/other_user}", "gists_url": "https://api.github.com/users/sayakpaul/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/sayakpaul", "id": 22957388, "login": "sayakpaul", "node_id": "MDQ6VXNlcjIyOTU3Mzg4", "organizations_url": "https://api.github.com/users/sayakpaul/orgs", "received_events_url": "https://api.github.com/users/sayakpaul/received_events", "repos_url": "https://api.github.com/users/sayakpaul/repos", "site_admin": false, "starred_url": "https://api.github.com/users/sayakpaul/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/sayakpaul/subscriptions", "type": "User", "url": "https://api.github.com/users/sayakpaul" }
null
1,431,029,547
[ { "color": "a2eeef", "default": true, "description": "New feature or request", "id": 1935892871, "name": "enhancement", "node_id": "MDU6TGFiZWwxOTM1ODkyODcx", "url": "https://api.github.com/repos/huggingface/datasets/labels/enhancement" } ]
https://api.github.com/repos/huggingface/datasets/issues/5182
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
MEMBER
Does it make sense to include links to notebooks / scripts that show how to use a dataset for training / fine-tuning a model? For example, here in [https://huggingface.co/docs/datasets/image_classification] we could include a mention of https://github.com/huggingface/notebooks/blob/main/examples/image_classification.ipynb. Applies to https://huggingface.co/docs/datasets/object_detection as well. Cc: @osanseviero @nateraw
2022-11-03T01:49:57Z
https://github.com/huggingface/datasets/issues/5182
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5182/reactions" }
false
https://api.github.com/repos/huggingface/datasets/issues/5181/comments
https://api.github.com/repos/huggingface/datasets/issues/5181/timeline
2022-11-04T18:23:36Z
null
completed
I_kwDODunzps5VS72e
closed
[ { "avatar_url": "https://avatars.githubusercontent.com/u/22957388?v=4", "events_url": "https://api.github.com/users/sayakpaul/events{/privacy}", "followers_url": "https://api.github.com/users/sayakpaul/followers", "following_url": "https://api.github.com/users/sayakpaul/following{/other_user}", "gists_url": "https://api.github.com/users/sayakpaul/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/sayakpaul", "id": 22957388, "login": "sayakpaul", "node_id": "MDQ6VXNlcjIyOTU3Mzg4", "organizations_url": "https://api.github.com/users/sayakpaul/orgs", "received_events_url": "https://api.github.com/users/sayakpaul/received_events", "repos_url": "https://api.github.com/users/sayakpaul/repos", "site_admin": false, "starred_url": "https://api.github.com/users/sayakpaul/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/sayakpaul/subscriptions", "type": "User", "url": "https://api.github.com/users/sayakpaul" } ]
null
5,181
{ "avatar_url": "https://avatars.githubusercontent.com/u/22957388?v=4", "events_url": "https://api.github.com/users/sayakpaul/events{/privacy}", "followers_url": "https://api.github.com/users/sayakpaul/followers", "following_url": "https://api.github.com/users/sayakpaul/following{/other_user}", "gists_url": "https://api.github.com/users/sayakpaul/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/sayakpaul", "id": 22957388, "login": "sayakpaul", "node_id": "MDQ6VXNlcjIyOTU3Mzg4", "organizations_url": "https://api.github.com/users/sayakpaul/orgs", "received_events_url": "https://api.github.com/users/sayakpaul/received_events", "repos_url": "https://api.github.com/users/sayakpaul/repos", "site_admin": false, "starred_url": "https://api.github.com/users/sayakpaul/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/sayakpaul/subscriptions", "type": "User", "url": "https://api.github.com/users/sayakpaul" }
Add a guide for semantic segmentation
https://api.github.com/repos/huggingface/datasets/issues/5181/events
null
https://api.github.com/repos/huggingface/datasets/issues/5181/labels{/name}
2022-11-01T07:54:50Z
null
false
{ "avatar_url": "https://avatars.githubusercontent.com/u/22957388?v=4", "events_url": "https://api.github.com/users/sayakpaul/events{/privacy}", "followers_url": "https://api.github.com/users/sayakpaul/followers", "following_url": "https://api.github.com/users/sayakpaul/following{/other_user}", "gists_url": "https://api.github.com/users/sayakpaul/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/sayakpaul", "id": 22957388, "login": "sayakpaul", "node_id": "MDQ6VXNlcjIyOTU3Mzg4", "organizations_url": "https://api.github.com/users/sayakpaul/orgs", "received_events_url": "https://api.github.com/users/sayakpaul/received_events", "repos_url": "https://api.github.com/users/sayakpaul/repos", "site_admin": false, "starred_url": "https://api.github.com/users/sayakpaul/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/sayakpaul/subscriptions", "type": "User", "url": "https://api.github.com/users/sayakpaul" }
null
1,431,027,102
[ { "color": "0075ca", "default": true, "description": "Improvements or additions to documentation", "id": 1935892861, "name": "documentation", "node_id": "MDU6TGFiZWwxOTM1ODkyODYx", "url": "https://api.github.com/repos/huggingface/datasets/labels/documentation" } ]
https://api.github.com/repos/huggingface/datasets/issues/5181
[ "", "" ]
https://api.github.com/repos/huggingface/datasets
MEMBER
Currently, we have these guides for object detection and image classification: * https://huggingface.co/docs/datasets/object_detection * https://huggingface.co/docs/datasets/image_classification I am proposing adding a similar guide for semantic segmentation. I am happy to contribute a PR for it. Cc: @osanseviero @nateraw
2022-11-04T18:23:36Z
https://github.com/huggingface/datasets/issues/5181
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/5181/reactions" }
false