Stern5497 commited on
Commit
b61b56c
·
1 Parent(s): 42ac335

Update querie.py

Browse files
Files changed (1) hide show
  1. querie.py +4 -4
querie.py CHANGED
@@ -33,9 +33,9 @@ _LICENSE = ""
33
  # The HuggingFace Datasets library doesn't host the datasets but only points to the original files.
34
  # This can be an arbitrary nested dict/list of URLs (see below in `_split_generators` method)
35
  _URLS = {
36
- "german": "https://huggingface.co/datasets/Stern5497/querie/resolve/main/queries_de",
37
- "italian": "https://huggingface.co/datasets/Stern5497/querie/resolve/main/queries_it",
38
- "french": "https://huggingface.co/datasets/Stern5497/querie/resolve/main/queries_fr",
39
 
40
  }
41
 
@@ -87,7 +87,7 @@ class querie(datasets.GeneratorBasedBuilder):
87
  # It can accept any type or nested list/dict and will give back the same structure with the url replaced with path to local files.
88
  # By default the archives will be extracted and a path to a cached folder where they are extracted is returned instead of the archive
89
  urls = get_url(self.config.name)
90
- filepath = dl_manager.download(os.path.join(urls, ".jsonl"))
91
 
92
  return [
93
  datasets.SplitGenerator(
 
33
  # The HuggingFace Datasets library doesn't host the datasets but only points to the original files.
34
  # This can be an arbitrary nested dict/list of URLs (see below in `_split_generators` method)
35
  _URLS = {
36
+ "german": "https://huggingface.co/datasets/Stern5497/querie/resolve/main/queries_de.jsonl",
37
+ "italian": "https://huggingface.co/datasets/Stern5497/querie/resolve/main/queries_it.jsonl",
38
+ "french": "https://huggingface.co/datasets/Stern5497/querie/resolve/main/queries_fr.jsonl",
39
 
40
  }
41
 
 
87
  # It can accept any type or nested list/dict and will give back the same structure with the url replaced with path to local files.
88
  # By default the archives will be extracted and a path to a cached folder where they are extracted is returned instead of the archive
89
  urls = get_url(self.config.name)
90
+ filepath = dl_manager.download(urls)
91
 
92
  return [
93
  datasets.SplitGenerator(