repo
stringlengths
7
55
path
stringlengths
4
223
func_name
stringlengths
1
134
original_string
stringlengths
75
104k
language
stringclasses
1 value
code
stringlengths
75
104k
code_tokens
listlengths
19
28.4k
docstring
stringlengths
1
46.9k
docstring_tokens
listlengths
1
1.97k
sha
stringlengths
40
40
url
stringlengths
87
315
partition
stringclasses
1 value
tensorflow/datasets
tensorflow_datasets/image/sun.py
_decode_image
def _decode_image(fobj, session, filename): """Reads and decodes an image from a file object as a Numpy array. The SUN dataset contains images in several formats (despite the fact that all of them have .jpg extension). Some of them are: - BMP (RGB) - PNG (grayscale, RGBA, RGB interlaced) - JPEG (RGB)...
python
def _decode_image(fobj, session, filename): """Reads and decodes an image from a file object as a Numpy array. The SUN dataset contains images in several formats (despite the fact that all of them have .jpg extension). Some of them are: - BMP (RGB) - PNG (grayscale, RGBA, RGB interlaced) - JPEG (RGB)...
[ "def", "_decode_image", "(", "fobj", ",", "session", ",", "filename", ")", ":", "buf", "=", "fobj", ".", "read", "(", ")", "image", "=", "tfds", ".", "core", ".", "lazy_imports", ".", "cv2", ".", "imdecode", "(", "np", ".", "fromstring", "(", "buf", ...
Reads and decodes an image from a file object as a Numpy array. The SUN dataset contains images in several formats (despite the fact that all of them have .jpg extension). Some of them are: - BMP (RGB) - PNG (grayscale, RGBA, RGB interlaced) - JPEG (RGB) - GIF (1-frame RGB) Since TFDS assumes tha...
[ "Reads", "and", "decodes", "an", "image", "from", "a", "file", "object", "as", "a", "Numpy", "array", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/image/sun.py#L65-L102
train
tensorflow/datasets
tensorflow_datasets/image/sun.py
_process_image_file
def _process_image_file(fobj, session, filename): """Process image files from the dataset.""" # We need to read the image files and convert them to JPEG, since some files # actually contain GIF, PNG or BMP data (despite having a .jpg extension) and # some encoding options that will make TF crash in general. i...
python
def _process_image_file(fobj, session, filename): """Process image files from the dataset.""" # We need to read the image files and convert them to JPEG, since some files # actually contain GIF, PNG or BMP data (despite having a .jpg extension) and # some encoding options that will make TF crash in general. i...
[ "def", "_process_image_file", "(", "fobj", ",", "session", ",", "filename", ")", ":", "# We need to read the image files and convert them to JPEG, since some files", "# actually contain GIF, PNG or BMP data (despite having a .jpg extension) and", "# some encoding options that will make TF cr...
Process image files from the dataset.
[ "Process", "image", "files", "from", "the", "dataset", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/image/sun.py#L113-L119
train
tensorflow/datasets
tensorflow_datasets/image/sun.py
Sun397._generate_examples
def _generate_examples(self, archive): """Yields examples.""" prefix_len = len("SUN397") with tf.Graph().as_default(): with utils.nogpu_session() as sess: for filepath, fobj in archive: if (filepath.endswith(".jpg") and filepath not in _SUN397_IGNORE_IMAGES): ...
python
def _generate_examples(self, archive): """Yields examples.""" prefix_len = len("SUN397") with tf.Graph().as_default(): with utils.nogpu_session() as sess: for filepath, fobj in archive: if (filepath.endswith(".jpg") and filepath not in _SUN397_IGNORE_IMAGES): ...
[ "def", "_generate_examples", "(", "self", ",", "archive", ")", ":", "prefix_len", "=", "len", "(", "\"SUN397\"", ")", "with", "tf", ".", "Graph", "(", ")", ".", "as_default", "(", ")", ":", "with", "utils", ".", "nogpu_session", "(", ")", "as", "sess",...
Yields examples.
[ "Yields", "examples", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/image/sun.py#L157-L176
train
tensorflow/datasets
tensorflow_datasets/translate/wmt.py
_parse_parallel_sentences
def _parse_parallel_sentences(f1, f2): """Returns examples from parallel SGML or text files, which may be gzipped.""" def _parse_text(path): """Returns the sentences from a single text file, which may be gzipped.""" split_path = path.split(".") if split_path[-1] == "gz": lang = split_path[-2] ...
python
def _parse_parallel_sentences(f1, f2): """Returns examples from parallel SGML or text files, which may be gzipped.""" def _parse_text(path): """Returns the sentences from a single text file, which may be gzipped.""" split_path = path.split(".") if split_path[-1] == "gz": lang = split_path[-2] ...
[ "def", "_parse_parallel_sentences", "(", "f1", ",", "f2", ")", ":", "def", "_parse_text", "(", "path", ")", ":", "\"\"\"Returns the sentences from a single text file, which may be gzipped.\"\"\"", "split_path", "=", "path", ".", "split", "(", "\".\"", ")", "if", "spli...
Returns examples from parallel SGML or text files, which may be gzipped.
[ "Returns", "examples", "from", "parallel", "SGML", "or", "text", "files", "which", "may", "be", "gzipped", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/translate/wmt.py#L761-L820
train
tensorflow/datasets
tensorflow_datasets/translate/wmt.py
_parse_tmx
def _parse_tmx(path): """Generates examples from TMX file.""" def _get_tuv_lang(tuv): for k, v in tuv.items(): if k.endswith("}lang"): return v raise AssertionError("Language not found in `tuv` attributes.") def _get_tuv_seg(tuv): segs = tuv.findall("seg") assert len(segs) == 1, "In...
python
def _parse_tmx(path): """Generates examples from TMX file.""" def _get_tuv_lang(tuv): for k, v in tuv.items(): if k.endswith("}lang"): return v raise AssertionError("Language not found in `tuv` attributes.") def _get_tuv_seg(tuv): segs = tuv.findall("seg") assert len(segs) == 1, "In...
[ "def", "_parse_tmx", "(", "path", ")", ":", "def", "_get_tuv_lang", "(", "tuv", ")", ":", "for", "k", ",", "v", "in", "tuv", ".", "items", "(", ")", ":", "if", "k", ".", "endswith", "(", "\"}lang\"", ")", ":", "return", "v", "raise", "AssertionErro...
Generates examples from TMX file.
[ "Generates", "examples", "from", "TMX", "file", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/translate/wmt.py#L838-L858
train
tensorflow/datasets
tensorflow_datasets/translate/wmt.py
_parse_tsv
def _parse_tsv(path, language_pair=None): """Generates examples from TSV file.""" if language_pair is None: lang_match = re.match(r".*\.([a-z][a-z])-([a-z][a-z])\.tsv", path) assert lang_match is not None, "Invalid TSV filename: %s" % path l1, l2 = lang_match.groups() else: l1, l2 = language_pair ...
python
def _parse_tsv(path, language_pair=None): """Generates examples from TSV file.""" if language_pair is None: lang_match = re.match(r".*\.([a-z][a-z])-([a-z][a-z])\.tsv", path) assert lang_match is not None, "Invalid TSV filename: %s" % path l1, l2 = lang_match.groups() else: l1, l2 = language_pair ...
[ "def", "_parse_tsv", "(", "path", ",", "language_pair", "=", "None", ")", ":", "if", "language_pair", "is", "None", ":", "lang_match", "=", "re", ".", "match", "(", "r\".*\\.([a-z][a-z])-([a-z][a-z])\\.tsv\"", ",", "path", ")", "assert", "lang_match", "is", "n...
Generates examples from TSV file.
[ "Generates", "examples", "from", "TSV", "file", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/translate/wmt.py#L861-L881
train
tensorflow/datasets
tensorflow_datasets/translate/wmt.py
_parse_wikiheadlines
def _parse_wikiheadlines(path): """Generates examples from Wikiheadlines dataset file.""" lang_match = re.match(r".*\.([a-z][a-z])-([a-z][a-z])$", path) assert lang_match is not None, "Invalid Wikiheadlines filename: %s" % path l1, l2 = lang_match.groups() with tf.io.gfile.GFile(path) as f: for line in f:...
python
def _parse_wikiheadlines(path): """Generates examples from Wikiheadlines dataset file.""" lang_match = re.match(r".*\.([a-z][a-z])-([a-z][a-z])$", path) assert lang_match is not None, "Invalid Wikiheadlines filename: %s" % path l1, l2 = lang_match.groups() with tf.io.gfile.GFile(path) as f: for line in f:...
[ "def", "_parse_wikiheadlines", "(", "path", ")", ":", "lang_match", "=", "re", ".", "match", "(", "r\".*\\.([a-z][a-z])-([a-z][a-z])$\"", ",", "path", ")", "assert", "lang_match", "is", "not", "None", ",", "\"Invalid Wikiheadlines filename: %s\"", "%", "path", "l1",...
Generates examples from Wikiheadlines dataset file.
[ "Generates", "examples", "from", "Wikiheadlines", "dataset", "file", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/translate/wmt.py#L884-L895
train
tensorflow/datasets
tensorflow_datasets/translate/wmt.py
_parse_czeng
def _parse_czeng(*paths, **kwargs): """Generates examples from CzEng v1.6, with optional filtering for v1.7.""" filter_path = kwargs.get("filter_path", None) if filter_path: re_block = re.compile(r"^[^-]+-b(\d+)-\d\d[tde]") with tf.io.gfile.GFile(filter_path) as f: bad_blocks = { blk for b...
python
def _parse_czeng(*paths, **kwargs): """Generates examples from CzEng v1.6, with optional filtering for v1.7.""" filter_path = kwargs.get("filter_path", None) if filter_path: re_block = re.compile(r"^[^-]+-b(\d+)-\d\d[tde]") with tf.io.gfile.GFile(filter_path) as f: bad_blocks = { blk for b...
[ "def", "_parse_czeng", "(", "*", "paths", ",", "*", "*", "kwargs", ")", ":", "filter_path", "=", "kwargs", ".", "get", "(", "\"filter_path\"", ",", "None", ")", "if", "filter_path", ":", "re_block", "=", "re", ".", "compile", "(", "r\"^[^-]+-b(\\d+)-\\d\\d...
Generates examples from CzEng v1.6, with optional filtering for v1.7.
[ "Generates", "examples", "from", "CzEng", "v1", ".", "6", "with", "optional", "filtering", "for", "v1", ".", "7", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/translate/wmt.py#L898-L927
train
tensorflow/datasets
tensorflow_datasets/translate/wmt.py
SubDataset._inject_language
def _inject_language(self, src, strings): """Injects languages into (potentially) template strings.""" if src not in self.sources: raise ValueError("Invalid source for '{0}': {1}".format(self.name, src)) def _format_string(s): if "{0}" in s and "{1}" and "{src}" in s: return s.format(*so...
python
def _inject_language(self, src, strings): """Injects languages into (potentially) template strings.""" if src not in self.sources: raise ValueError("Invalid source for '{0}': {1}".format(self.name, src)) def _format_string(s): if "{0}" in s and "{1}" and "{src}" in s: return s.format(*so...
[ "def", "_inject_language", "(", "self", ",", "src", ",", "strings", ")", ":", "if", "src", "not", "in", "self", ".", "sources", ":", "raise", "ValueError", "(", "\"Invalid source for '{0}': {1}\"", ".", "format", "(", "self", ".", "name", ",", "src", ")", ...
Injects languages into (potentially) template strings.
[ "Injects", "languages", "into", "(", "potentially", ")", "template", "strings", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/translate/wmt.py#L97-L110
train
tensorflow/datasets
tensorflow_datasets/translate/wmt.py
WmtTranslate.subsets
def subsets(self): """Subsets that make up each split of the dataset for the language pair.""" source, target = self.builder_config.language_pair filtered_subsets = {} for split, ss_names in self._subsets.items(): filtered_subsets[split] = [] for ss_name in ss_names: ds = DATASET_MAP...
python
def subsets(self): """Subsets that make up each split of the dataset for the language pair.""" source, target = self.builder_config.language_pair filtered_subsets = {} for split, ss_names in self._subsets.items(): filtered_subsets[split] = [] for ss_name in ss_names: ds = DATASET_MAP...
[ "def", "subsets", "(", "self", ")", ":", "source", ",", "target", "=", "self", ".", "builder_config", ".", "language_pair", "filtered_subsets", "=", "{", "}", "for", "split", ",", "ss_names", "in", "self", ".", "_subsets", ".", "items", "(", ")", ":", ...
Subsets that make up each split of the dataset for the language pair.
[ "Subsets", "that", "make", "up", "each", "split", "of", "the", "dataset", "for", "the", "language", "pair", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/translate/wmt.py#L615-L630
train
tensorflow/datasets
tensorflow_datasets/translate/wmt.py
WmtTranslate._generate_examples
def _generate_examples(self, split_subsets, extraction_map): """Returns the examples in the raw (text) form.""" source, _ = self.builder_config.language_pair def _get_local_paths(ds, extract_dirs): rel_paths = ds.get_path(source) if len(extract_dirs) == 1: extract_dirs = extract_dirs * ...
python
def _generate_examples(self, split_subsets, extraction_map): """Returns the examples in the raw (text) form.""" source, _ = self.builder_config.language_pair def _get_local_paths(ds, extract_dirs): rel_paths = ds.get_path(source) if len(extract_dirs) == 1: extract_dirs = extract_dirs * ...
[ "def", "_generate_examples", "(", "self", ",", "split_subsets", ",", "extraction_map", ")", ":", "source", ",", "_", "=", "self", ".", "builder_config", ".", "language_pair", "def", "_get_local_paths", "(", "ds", ",", "extract_dirs", ")", ":", "rel_paths", "="...
Returns the examples in the raw (text) form.
[ "Returns", "the", "examples", "in", "the", "raw", "(", "text", ")", "form", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/translate/wmt.py#L703-L758
train
tensorflow/datasets
tensorflow_datasets/core/registered.py
builder
def builder(name, **builder_init_kwargs): """Fetches a `tfds.core.DatasetBuilder` by string name. Args: name: `str`, the registered name of the `DatasetBuilder` (the snake case version of the class name). This can be either `"dataset_name"` or `"dataset_name/config_name"` for datasets with `Builder...
python
def builder(name, **builder_init_kwargs): """Fetches a `tfds.core.DatasetBuilder` by string name. Args: name: `str`, the registered name of the `DatasetBuilder` (the snake case version of the class name). This can be either `"dataset_name"` or `"dataset_name/config_name"` for datasets with `Builder...
[ "def", "builder", "(", "name", ",", "*", "*", "builder_init_kwargs", ")", ":", "name", ",", "builder_kwargs", "=", "_dataset_name_and_kwargs_from_name_str", "(", "name", ")", "builder_kwargs", ".", "update", "(", "builder_init_kwargs", ")", "if", "name", "in", "...
Fetches a `tfds.core.DatasetBuilder` by string name. Args: name: `str`, the registered name of the `DatasetBuilder` (the snake case version of the class name). This can be either `"dataset_name"` or `"dataset_name/config_name"` for datasets with `BuilderConfig`s. As a convenience, this string m...
[ "Fetches", "a", "tfds", ".", "core", ".", "DatasetBuilder", "by", "string", "name", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/core/registered.py#L137-L172
train
tensorflow/datasets
tensorflow_datasets/core/registered.py
load
def load(name, split=None, data_dir=None, batch_size=1, download=True, as_supervised=False, with_info=False, builder_kwargs=None, download_and_prepare_kwargs=None, as_dataset_kwargs=None, try_gcs=False): """Loads the named datas...
python
def load(name, split=None, data_dir=None, batch_size=1, download=True, as_supervised=False, with_info=False, builder_kwargs=None, download_and_prepare_kwargs=None, as_dataset_kwargs=None, try_gcs=False): """Loads the named datas...
[ "def", "load", "(", "name", ",", "split", "=", "None", ",", "data_dir", "=", "None", ",", "batch_size", "=", "1", ",", "download", "=", "True", ",", "as_supervised", "=", "False", ",", "with_info", "=", "False", ",", "builder_kwargs", "=", "None", ",",...
Loads the named dataset into a `tf.data.Dataset`. If `split=None` (the default), returns all splits for the dataset. Otherwise, returns the specified split. `load` is a convenience method that fetches the `tfds.core.DatasetBuilder` by string name, optionally calls `DatasetBuilder.download_and_prepare` (if `...
[ "Loads", "the", "named", "dataset", "into", "a", "tf", ".", "data", ".", "Dataset", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/core/registered.py#L176-L297
train
tensorflow/datasets
tensorflow_datasets/core/registered.py
_dataset_name_and_kwargs_from_name_str
def _dataset_name_and_kwargs_from_name_str(name_str): """Extract kwargs from name str.""" res = _NAME_REG.match(name_str) if not res: raise ValueError(_NAME_STR_ERR.format(name_str)) name = res.group("dataset_name") kwargs = _kwargs_str_to_kwargs(res.group("kwargs")) try: for attr in ["config", "ver...
python
def _dataset_name_and_kwargs_from_name_str(name_str): """Extract kwargs from name str.""" res = _NAME_REG.match(name_str) if not res: raise ValueError(_NAME_STR_ERR.format(name_str)) name = res.group("dataset_name") kwargs = _kwargs_str_to_kwargs(res.group("kwargs")) try: for attr in ["config", "ver...
[ "def", "_dataset_name_and_kwargs_from_name_str", "(", "name_str", ")", ":", "res", "=", "_NAME_REG", ".", "match", "(", "name_str", ")", "if", "not", "res", ":", "raise", "ValueError", "(", "_NAME_STR_ERR", ".", "format", "(", "name_str", ")", ")", "name", "...
Extract kwargs from name str.
[ "Extract", "kwargs", "from", "name", "str", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/core/registered.py#L311-L329
train
tensorflow/datasets
tensorflow_datasets/core/registered.py
_cast_to_pod
def _cast_to_pod(val): """Try cast to int, float, bool, str, in that order.""" bools = {"True": True, "False": False} if val in bools: return bools[val] try: return int(val) except ValueError: try: return float(val) except ValueError: return tf.compat.as_text(val)
python
def _cast_to_pod(val): """Try cast to int, float, bool, str, in that order.""" bools = {"True": True, "False": False} if val in bools: return bools[val] try: return int(val) except ValueError: try: return float(val) except ValueError: return tf.compat.as_text(val)
[ "def", "_cast_to_pod", "(", "val", ")", ":", "bools", "=", "{", "\"True\"", ":", "True", ",", "\"False\"", ":", "False", "}", "if", "val", "in", "bools", ":", "return", "bools", "[", "val", "]", "try", ":", "return", "int", "(", "val", ")", "except...
Try cast to int, float, bool, str, in that order.
[ "Try", "cast", "to", "int", "float", "bool", "str", "in", "that", "order", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/core/registered.py#L343-L354
train
tensorflow/datasets
tensorflow_datasets/core/lazy_imports.py
_try_import
def _try_import(module_name): """Try importing a module, with an informative error message on failure.""" try: mod = importlib.import_module(module_name) return mod except ImportError: err_msg = ("Tried importing %s but failed. See setup.py extras_require. " "The dataset you are trying ...
python
def _try_import(module_name): """Try importing a module, with an informative error message on failure.""" try: mod = importlib.import_module(module_name) return mod except ImportError: err_msg = ("Tried importing %s but failed. See setup.py extras_require. " "The dataset you are trying ...
[ "def", "_try_import", "(", "module_name", ")", ":", "try", ":", "mod", "=", "importlib", ".", "import_module", "(", "module_name", ")", "return", "mod", "except", "ImportError", ":", "err_msg", "=", "(", "\"Tried importing %s but failed. See setup.py extras_require. \...
Try importing a module, with an informative error message on failure.
[ "Try", "importing", "a", "module", "with", "an", "informative", "error", "message", "on", "failure", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/core/lazy_imports.py#L27-L36
train
tensorflow/datasets
tensorflow_datasets/core/features/sequence_feature.py
np_to_list
def np_to_list(elem): """Returns list from list, tuple or ndarray.""" if isinstance(elem, list): return elem elif isinstance(elem, tuple): return list(elem) elif isinstance(elem, np.ndarray): return list(elem) else: raise ValueError( 'Input elements of a sequence should be either a num...
python
def np_to_list(elem): """Returns list from list, tuple or ndarray.""" if isinstance(elem, list): return elem elif isinstance(elem, tuple): return list(elem) elif isinstance(elem, np.ndarray): return list(elem) else: raise ValueError( 'Input elements of a sequence should be either a num...
[ "def", "np_to_list", "(", "elem", ")", ":", "if", "isinstance", "(", "elem", ",", "list", ")", ":", "return", "elem", "elif", "isinstance", "(", "elem", ",", "tuple", ")", ":", "return", "list", "(", "elem", ")", "elif", "isinstance", "(", "elem", ",...
Returns list from list, tuple or ndarray.
[ "Returns", "list", "from", "list", "tuple", "or", "ndarray", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/core/features/sequence_feature.py#L257-L268
train
tensorflow/datasets
tensorflow_datasets/core/features/sequence_feature.py
_transpose_dict_list
def _transpose_dict_list(dict_list): """Transpose a nested dict[list] into a list[nested dict].""" # 1. Unstack numpy arrays into list dict_list = utils.map_nested(np_to_list, dict_list, dict_only=True) # 2. Extract the sequence length (and ensure the length is constant for all # elements) length = {'value...
python
def _transpose_dict_list(dict_list): """Transpose a nested dict[list] into a list[nested dict].""" # 1. Unstack numpy arrays into list dict_list = utils.map_nested(np_to_list, dict_list, dict_only=True) # 2. Extract the sequence length (and ensure the length is constant for all # elements) length = {'value...
[ "def", "_transpose_dict_list", "(", "dict_list", ")", ":", "# 1. Unstack numpy arrays into list", "dict_list", "=", "utils", ".", "map_nested", "(", "np_to_list", ",", "dict_list", ",", "dict_only", "=", "True", ")", "# 2. Extract the sequence length (and ensure the length ...
Transpose a nested dict[list] into a list[nested dict].
[ "Transpose", "a", "nested", "dict", "[", "list", "]", "into", "a", "list", "[", "nested", "dict", "]", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/core/features/sequence_feature.py#L271-L293
train
tensorflow/datasets
tensorflow_datasets/core/features/sequence_feature.py
SequenceDict.get_tensor_info
def get_tensor_info(self): """See base class for details.""" # Add the additional length dimension to every shape def add_length_dim(tensor_info): return feature_lib.TensorInfo( shape=(self._length,) + tensor_info.shape, dtype=tensor_info.dtype, ) tensor_info = super(Se...
python
def get_tensor_info(self): """See base class for details.""" # Add the additional length dimension to every shape def add_length_dim(tensor_info): return feature_lib.TensorInfo( shape=(self._length,) + tensor_info.shape, dtype=tensor_info.dtype, ) tensor_info = super(Se...
[ "def", "get_tensor_info", "(", "self", ")", ":", "# Add the additional length dimension to every shape", "def", "add_length_dim", "(", "tensor_info", ")", ":", "return", "feature_lib", ".", "TensorInfo", "(", "shape", "=", "(", "self", ".", "_length", ",", ")", "+...
See base class for details.
[ "See", "base", "class", "for", "details", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/core/features/sequence_feature.py#L90-L101
train
tensorflow/datasets
tensorflow_datasets/core/features/sequence_feature.py
SequenceDict.get_serialized_info
def get_serialized_info(self): """See base class for details.""" # Add the additional length dimension to every serialized features def add_length_dim(serialized_info): """Add the length dimension to the serialized_info. Args: serialized_info: One of tf.io.FixedLenFeature, tf.io.VarLen...
python
def get_serialized_info(self): """See base class for details.""" # Add the additional length dimension to every serialized features def add_length_dim(serialized_info): """Add the length dimension to the serialized_info. Args: serialized_info: One of tf.io.FixedLenFeature, tf.io.VarLen...
[ "def", "get_serialized_info", "(", "self", ")", ":", "# Add the additional length dimension to every serialized features", "def", "add_length_dim", "(", "serialized_info", ")", ":", "\"\"\"Add the length dimension to the serialized_info.\n\n Args:\n serialized_info: One of tf.i...
See base class for details.
[ "See", "base", "class", "for", "details", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/core/features/sequence_feature.py#L103-L137
train
tensorflow/datasets
tensorflow_datasets/image/mnist.py
MNIST._split_generators
def _split_generators(self, dl_manager): """Returns SplitGenerators.""" # Download the full MNIST Database filenames = { "train_data": _MNIST_TRAIN_DATA_FILENAME, "train_labels": _MNIST_TRAIN_LABELS_FILENAME, "test_data": _MNIST_TEST_DATA_FILENAME, "test_labels": _MNIST_TEST_...
python
def _split_generators(self, dl_manager): """Returns SplitGenerators.""" # Download the full MNIST Database filenames = { "train_data": _MNIST_TRAIN_DATA_FILENAME, "train_labels": _MNIST_TRAIN_LABELS_FILENAME, "test_data": _MNIST_TEST_DATA_FILENAME, "test_labels": _MNIST_TEST_...
[ "def", "_split_generators", "(", "self", ",", "dl_manager", ")", ":", "# Download the full MNIST Database", "filenames", "=", "{", "\"train_data\"", ":", "_MNIST_TRAIN_DATA_FILENAME", ",", "\"train_labels\"", ":", "_MNIST_TRAIN_LABELS_FILENAME", ",", "\"test_data\"", ":", ...
Returns SplitGenerators.
[ "Returns", "SplitGenerators", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/image/mnist.py#L113-L144
train
tensorflow/datasets
tensorflow_datasets/image/mnist.py
MNIST._generate_examples
def _generate_examples(self, num_examples, data_path, label_path): """Generate MNIST examples as dicts. Args: num_examples (int): The number of example. data_path (str): Path to the data files label_path (str): Path to the labels Yields: Generator yielding the next examples """...
python
def _generate_examples(self, num_examples, data_path, label_path): """Generate MNIST examples as dicts. Args: num_examples (int): The number of example. data_path (str): Path to the data files label_path (str): Path to the labels Yields: Generator yielding the next examples """...
[ "def", "_generate_examples", "(", "self", ",", "num_examples", ",", "data_path", ",", "label_path", ")", ":", "images", "=", "_extract_mnist_images", "(", "data_path", ",", "num_examples", ")", "labels", "=", "_extract_mnist_labels", "(", "label_path", ",", "num_e...
Generate MNIST examples as dicts. Args: num_examples (int): The number of example. data_path (str): Path to the data files label_path (str): Path to the labels Yields: Generator yielding the next examples
[ "Generate", "MNIST", "examples", "as", "dicts", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/image/mnist.py#L146-L166
train
tensorflow/datasets
tensorflow_datasets/image/oxford_flowers102.py
OxfordFlowers102._split_generators
def _split_generators(self, dl_manager): """Returns SplitGenerators.""" # Download images and annotations that come in separate archives. # Note, that the extension of archives is .tar.gz even though the actual # archives format is uncompressed tar. dl_paths = dl_manager.download_and_extract({ ...
python
def _split_generators(self, dl_manager): """Returns SplitGenerators.""" # Download images and annotations that come in separate archives. # Note, that the extension of archives is .tar.gz even though the actual # archives format is uncompressed tar. dl_paths = dl_manager.download_and_extract({ ...
[ "def", "_split_generators", "(", "self", ",", "dl_manager", ")", ":", "# Download images and annotations that come in separate archives.", "# Note, that the extension of archives is .tar.gz even though the actual", "# archives format is uncompressed tar.", "dl_paths", "=", "dl_manager", ...
Returns SplitGenerators.
[ "Returns", "SplitGenerators", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/image/oxford_flowers102.py#L70-L102
train
tensorflow/datasets
tensorflow_datasets/image/oxford_flowers102.py
OxfordFlowers102._generate_examples
def _generate_examples(self, images_dir_path, labels_path, setid_path, split_name): """Yields examples.""" with tf.io.gfile.GFile(labels_path, "rb") as f: labels = tfds.core.lazy_imports.scipy.io.loadmat(f)["labels"][0] with tf.io.gfile.GFile(setid_path, "rb") as f: exam...
python
def _generate_examples(self, images_dir_path, labels_path, setid_path, split_name): """Yields examples.""" with tf.io.gfile.GFile(labels_path, "rb") as f: labels = tfds.core.lazy_imports.scipy.io.loadmat(f)["labels"][0] with tf.io.gfile.GFile(setid_path, "rb") as f: exam...
[ "def", "_generate_examples", "(", "self", ",", "images_dir_path", ",", "labels_path", ",", "setid_path", ",", "split_name", ")", ":", "with", "tf", ".", "io", ".", "gfile", ".", "GFile", "(", "labels_path", ",", "\"rb\"", ")", "as", "f", ":", "labels", "...
Yields examples.
[ "Yields", "examples", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/image/oxford_flowers102.py#L104-L118
train
tensorflow/datasets
tensorflow_datasets/core/dataset_info.py
get_dataset_feature_statistics
def get_dataset_feature_statistics(builder, split): """Calculate statistics for the specified split.""" statistics = statistics_pb2.DatasetFeatureStatistics() # Make this to the best of our abilities. schema = schema_pb2.Schema() dataset = builder.as_dataset(split=split) # Just computing the number of ex...
python
def get_dataset_feature_statistics(builder, split): """Calculate statistics for the specified split.""" statistics = statistics_pb2.DatasetFeatureStatistics() # Make this to the best of our abilities. schema = schema_pb2.Schema() dataset = builder.as_dataset(split=split) # Just computing the number of ex...
[ "def", "get_dataset_feature_statistics", "(", "builder", ",", "split", ")", ":", "statistics", "=", "statistics_pb2", ".", "DatasetFeatureStatistics", "(", ")", "# Make this to the best of our abilities.", "schema", "=", "schema_pb2", ".", "Schema", "(", ")", "dataset",...
Calculate statistics for the specified split.
[ "Calculate", "statistics", "for", "the", "specified", "split", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/core/dataset_info.py#L443-L556
train
tensorflow/datasets
tensorflow_datasets/core/dataset_info.py
read_from_json
def read_from_json(json_filename): """Read JSON-formatted proto into DatasetInfo proto.""" with tf.io.gfile.GFile(json_filename) as f: dataset_info_json_str = f.read() # Parse it back into a proto. parsed_proto = json_format.Parse(dataset_info_json_str, dataset_info_pb2.Da...
python
def read_from_json(json_filename): """Read JSON-formatted proto into DatasetInfo proto.""" with tf.io.gfile.GFile(json_filename) as f: dataset_info_json_str = f.read() # Parse it back into a proto. parsed_proto = json_format.Parse(dataset_info_json_str, dataset_info_pb2.Da...
[ "def", "read_from_json", "(", "json_filename", ")", ":", "with", "tf", ".", "io", ".", "gfile", ".", "GFile", "(", "json_filename", ")", "as", "f", ":", "dataset_info_json_str", "=", "f", ".", "read", "(", ")", "# Parse it back into a proto.", "parsed_proto", ...
Read JSON-formatted proto into DatasetInfo proto.
[ "Read", "JSON", "-", "formatted", "proto", "into", "DatasetInfo", "proto", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/core/dataset_info.py#L559-L566
train
tensorflow/datasets
tensorflow_datasets/core/dataset_info.py
DatasetInfo.full_name
def full_name(self): """Full canonical name: (<dataset_name>/<config_name>/<version>).""" names = [self._builder.name] if self._builder.builder_config: names.append(self._builder.builder_config.name) names.append(str(self.version)) return posixpath.join(*names)
python
def full_name(self): """Full canonical name: (<dataset_name>/<config_name>/<version>).""" names = [self._builder.name] if self._builder.builder_config: names.append(self._builder.builder_config.name) names.append(str(self.version)) return posixpath.join(*names)
[ "def", "full_name", "(", "self", ")", ":", "names", "=", "[", "self", ".", "_builder", ".", "name", "]", "if", "self", ".", "_builder", ".", "builder_config", ":", "names", ".", "append", "(", "self", ".", "_builder", ".", "builder_config", ".", "name"...
Full canonical name: (<dataset_name>/<config_name>/<version>).
[ "Full", "canonical", "name", ":", "(", "<dataset_name", ">", "/", "<config_name", ">", "/", "<version", ">", ")", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/core/dataset_info.py#L150-L156
train
tensorflow/datasets
tensorflow_datasets/core/dataset_info.py
DatasetInfo.update_splits_if_different
def update_splits_if_different(self, split_dict): """Overwrite the splits if they are different from the current ones. * If splits aren't already defined or different (ex: different number of shards), then the new split dict is used. This will trigger stats computation during download_and_prepare. ...
python
def update_splits_if_different(self, split_dict): """Overwrite the splits if they are different from the current ones. * If splits aren't already defined or different (ex: different number of shards), then the new split dict is used. This will trigger stats computation during download_and_prepare. ...
[ "def", "update_splits_if_different", "(", "self", ",", "split_dict", ")", ":", "assert", "isinstance", "(", "split_dict", ",", "splits_lib", ".", "SplitDict", ")", "# If splits are already defined and identical, then we do not update", "if", "self", ".", "_splits", "and",...
Overwrite the splits if they are different from the current ones. * If splits aren't already defined or different (ex: different number of shards), then the new split dict is used. This will trigger stats computation during download_and_prepare. * If splits are already defined in DatasetInfo and si...
[ "Overwrite", "the", "splits", "if", "they", "are", "different", "from", "the", "current", "ones", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/core/dataset_info.py#L197-L217
train
tensorflow/datasets
tensorflow_datasets/core/dataset_info.py
DatasetInfo._set_splits
def _set_splits(self, split_dict): """Split setter (private method).""" # Update the dictionary representation. # Use from/to proto for a clean copy self._splits = split_dict.copy() # Update the proto del self.as_proto.splits[:] # Clear previous for split_info in split_dict.to_proto(): ...
python
def _set_splits(self, split_dict): """Split setter (private method).""" # Update the dictionary representation. # Use from/to proto for a clean copy self._splits = split_dict.copy() # Update the proto del self.as_proto.splits[:] # Clear previous for split_info in split_dict.to_proto(): ...
[ "def", "_set_splits", "(", "self", ",", "split_dict", ")", ":", "# Update the dictionary representation.", "# Use from/to proto for a clean copy", "self", ".", "_splits", "=", "split_dict", ".", "copy", "(", ")", "# Update the proto", "del", "self", ".", "as_proto", "...
Split setter (private method).
[ "Split", "setter", "(", "private", "method", ")", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/core/dataset_info.py#L219-L228
train
tensorflow/datasets
tensorflow_datasets/core/dataset_info.py
DatasetInfo._compute_dynamic_properties
def _compute_dynamic_properties(self, builder): """Update from the DatasetBuilder.""" # Fill other things by going over the dataset. splits = self.splits for split_info in utils.tqdm( splits.values(), desc="Computing statistics...", unit=" split"): try: split_name = split_info.name...
python
def _compute_dynamic_properties(self, builder): """Update from the DatasetBuilder.""" # Fill other things by going over the dataset. splits = self.splits for split_info in utils.tqdm( splits.values(), desc="Computing statistics...", unit=" split"): try: split_name = split_info.name...
[ "def", "_compute_dynamic_properties", "(", "self", ",", "builder", ")", ":", "# Fill other things by going over the dataset.", "splits", "=", "self", ".", "splits", "for", "split_info", "in", "utils", ".", "tqdm", "(", "splits", ".", "values", "(", ")", ",", "de...
Update from the DatasetBuilder.
[ "Update", "from", "the", "DatasetBuilder", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/core/dataset_info.py#L249-L278
train
tensorflow/datasets
tensorflow_datasets/core/dataset_info.py
DatasetInfo.write_to_directory
def write_to_directory(self, dataset_info_dir): """Write `DatasetInfo` as JSON to `dataset_info_dir`.""" # Save the metadata from the features (vocabulary, labels,...) if self.features: self.features.save_metadata(dataset_info_dir) if self.redistribution_info.license: with tf.io.gfile.GFile...
python
def write_to_directory(self, dataset_info_dir): """Write `DatasetInfo` as JSON to `dataset_info_dir`.""" # Save the metadata from the features (vocabulary, labels,...) if self.features: self.features.save_metadata(dataset_info_dir) if self.redistribution_info.license: with tf.io.gfile.GFile...
[ "def", "write_to_directory", "(", "self", ",", "dataset_info_dir", ")", ":", "# Save the metadata from the features (vocabulary, labels,...)", "if", "self", ".", "features", ":", "self", ".", "features", ".", "save_metadata", "(", "dataset_info_dir", ")", "if", "self", ...
Write `DatasetInfo` as JSON to `dataset_info_dir`.
[ "Write", "DatasetInfo", "as", "JSON", "to", "dataset_info_dir", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/core/dataset_info.py#L284-L297
train
tensorflow/datasets
tensorflow_datasets/core/dataset_info.py
DatasetInfo.read_from_directory
def read_from_directory(self, dataset_info_dir): """Update DatasetInfo from the JSON file in `dataset_info_dir`. This function updates all the dynamically generated fields (num_examples, hash, time of creation,...) of the DatasetInfo. This will overwrite all previous metadata. Args: dataset...
python
def read_from_directory(self, dataset_info_dir): """Update DatasetInfo from the JSON file in `dataset_info_dir`. This function updates all the dynamically generated fields (num_examples, hash, time of creation,...) of the DatasetInfo. This will overwrite all previous metadata. Args: dataset...
[ "def", "read_from_directory", "(", "self", ",", "dataset_info_dir", ")", ":", "if", "not", "dataset_info_dir", ":", "raise", "ValueError", "(", "\"Calling read_from_directory with undefined dataset_info_dir.\"", ")", "json_filename", "=", "self", ".", "_dataset_info_filenam...
Update DatasetInfo from the JSON file in `dataset_info_dir`. This function updates all the dynamically generated fields (num_examples, hash, time of creation,...) of the DatasetInfo. This will overwrite all previous metadata. Args: dataset_info_dir: `str` The directory containing the metadata f...
[ "Update", "DatasetInfo", "from", "the", "JSON", "file", "in", "dataset_info_dir", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/core/dataset_info.py#L299-L367
train
tensorflow/datasets
tensorflow_datasets/core/dataset_info.py
DatasetInfo.initialize_from_bucket
def initialize_from_bucket(self): """Initialize DatasetInfo from GCS bucket info files.""" # In order to support Colab, we use the HTTP GCS API to access the metadata # files. They are copied locally and then loaded. tmp_dir = tempfile.mkdtemp("tfds") data_files = gcs_utils.gcs_dataset_info_files(se...
python
def initialize_from_bucket(self): """Initialize DatasetInfo from GCS bucket info files.""" # In order to support Colab, we use the HTTP GCS API to access the metadata # files. They are copied locally and then loaded. tmp_dir = tempfile.mkdtemp("tfds") data_files = gcs_utils.gcs_dataset_info_files(se...
[ "def", "initialize_from_bucket", "(", "self", ")", ":", "# In order to support Colab, we use the HTTP GCS API to access the metadata", "# files. They are copied locally and then loaded.", "tmp_dir", "=", "tempfile", ".", "mkdtemp", "(", "\"tfds\"", ")", "data_files", "=", "gcs_ut...
Initialize DatasetInfo from GCS bucket info files.
[ "Initialize", "DatasetInfo", "from", "GCS", "bucket", "info", "files", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/core/dataset_info.py#L369-L381
train
tensorflow/datasets
tensorflow_datasets/image/cycle_gan.py
CycleGAN._split_generators
def _split_generators(self, dl_manager): """Returns SplitGenerators.""" url = _DL_URLS[self.builder_config.name] data_dirs = dl_manager.download_and_extract(url) path_to_dataset = os.path.join(data_dirs, tf.io.gfile.listdir(data_dirs)[0]) train_a_path = os.path.join(path_to_dataset, "trainA") ...
python
def _split_generators(self, dl_manager): """Returns SplitGenerators.""" url = _DL_URLS[self.builder_config.name] data_dirs = dl_manager.download_and_extract(url) path_to_dataset = os.path.join(data_dirs, tf.io.gfile.listdir(data_dirs)[0]) train_a_path = os.path.join(path_to_dataset, "trainA") ...
[ "def", "_split_generators", "(", "self", ",", "dl_manager", ")", ":", "url", "=", "_DL_URLS", "[", "self", ".", "builder_config", ".", "name", "]", "data_dirs", "=", "dl_manager", ".", "download_and_extract", "(", "url", ")", "path_to_dataset", "=", "os", "....
Returns SplitGenerators.
[ "Returns", "SplitGenerators", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/image/cycle_gan.py#L108-L149
train
tensorflow/datasets
tensorflow_datasets/core/download/download_manager.py
_map_promise
def _map_promise(map_fn, all_inputs): """Map the function into each element and resolve the promise.""" all_promises = utils.map_nested(map_fn, all_inputs) # Apply the function res = utils.map_nested(_wait_on_promise, all_promises) return res
python
def _map_promise(map_fn, all_inputs): """Map the function into each element and resolve the promise.""" all_promises = utils.map_nested(map_fn, all_inputs) # Apply the function res = utils.map_nested(_wait_on_promise, all_promises) return res
[ "def", "_map_promise", "(", "map_fn", ",", "all_inputs", ")", ":", "all_promises", "=", "utils", ".", "map_nested", "(", "map_fn", ",", "all_inputs", ")", "# Apply the function", "res", "=", "utils", ".", "map_nested", "(", "_wait_on_promise", ",", "all_promises...
Map the function into each element and resolve the promise.
[ "Map", "the", "function", "into", "each", "element", "and", "resolve", "the", "promise", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/core/download/download_manager.py#L392-L396
train
tensorflow/datasets
tensorflow_datasets/core/download/download_manager.py
DownloadManager._handle_download_result
def _handle_download_result(self, resource, tmp_dir_path, sha256, dl_size): """Store dled file to definitive place, write INFO file, return path.""" fnames = tf.io.gfile.listdir(tmp_dir_path) if len(fnames) > 1: raise AssertionError('More than one file in %s.' % tmp_dir_path) original_fname = fnam...
python
def _handle_download_result(self, resource, tmp_dir_path, sha256, dl_size): """Store dled file to definitive place, write INFO file, return path.""" fnames = tf.io.gfile.listdir(tmp_dir_path) if len(fnames) > 1: raise AssertionError('More than one file in %s.' % tmp_dir_path) original_fname = fnam...
[ "def", "_handle_download_result", "(", "self", ",", "resource", ",", "tmp_dir_path", ",", "sha256", ",", "dl_size", ")", ":", "fnames", "=", "tf", ".", "io", ".", "gfile", ".", "listdir", "(", "tmp_dir_path", ")", "if", "len", "(", "fnames", ")", ">", ...
Store dled file to definitive place, write INFO file, return path.
[ "Store", "dled", "file", "to", "definitive", "place", "write", "INFO", "file", "return", "path", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/core/download/download_manager.py#L196-L215
train
tensorflow/datasets
tensorflow_datasets/core/download/download_manager.py
DownloadManager._download
def _download(self, resource): """Download resource, returns Promise->path to downloaded file.""" if isinstance(resource, six.string_types): resource = resource_lib.Resource(url=resource) url = resource.url if url in self._sizes_checksums: expected_sha256 = self._sizes_checksums[url][1] ...
python
def _download(self, resource): """Download resource, returns Promise->path to downloaded file.""" if isinstance(resource, six.string_types): resource = resource_lib.Resource(url=resource) url = resource.url if url in self._sizes_checksums: expected_sha256 = self._sizes_checksums[url][1] ...
[ "def", "_download", "(", "self", ",", "resource", ")", ":", "if", "isinstance", "(", "resource", ",", "six", ".", "string_types", ")", ":", "resource", "=", "resource_lib", ".", "Resource", "(", "url", "=", "resource", ")", "url", "=", "resource", ".", ...
Download resource, returns Promise->path to downloaded file.
[ "Download", "resource", "returns", "Promise", "-", ">", "path", "to", "downloaded", "file", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/core/download/download_manager.py#L221-L247
train
tensorflow/datasets
tensorflow_datasets/core/download/download_manager.py
DownloadManager._extract
def _extract(self, resource): """Extract a single archive, returns Promise->path to extraction result.""" if isinstance(resource, six.string_types): resource = resource_lib.Resource(path=resource) path = resource.path extract_method = resource.extract_method if extract_method == resource_lib.E...
python
def _extract(self, resource): """Extract a single archive, returns Promise->path to extraction result.""" if isinstance(resource, six.string_types): resource = resource_lib.Resource(path=resource) path = resource.path extract_method = resource.extract_method if extract_method == resource_lib.E...
[ "def", "_extract", "(", "self", ",", "resource", ")", ":", "if", "isinstance", "(", "resource", ",", "six", ".", "string_types", ")", ":", "resource", "=", "resource_lib", ".", "Resource", "(", "path", "=", "resource", ")", "path", "=", "resource", ".", ...
Extract a single archive, returns Promise->path to extraction result.
[ "Extract", "a", "single", "archive", "returns", "Promise", "-", ">", "path", "to", "extraction", "result", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/core/download/download_manager.py#L251-L266
train
tensorflow/datasets
tensorflow_datasets/core/download/download_manager.py
DownloadManager._download_extract
def _download_extract(self, resource): """Download-extract `Resource` or url, returns Promise->path.""" if isinstance(resource, six.string_types): resource = resource_lib.Resource(url=resource) def callback(path): resource.path = path return self._extract(resource) return self._downloa...
python
def _download_extract(self, resource): """Download-extract `Resource` or url, returns Promise->path.""" if isinstance(resource, six.string_types): resource = resource_lib.Resource(url=resource) def callback(path): resource.path = path return self._extract(resource) return self._downloa...
[ "def", "_download_extract", "(", "self", ",", "resource", ")", ":", "if", "isinstance", "(", "resource", ",", "six", ".", "string_types", ")", ":", "resource", "=", "resource_lib", ".", "Resource", "(", "url", "=", "resource", ")", "def", "callback", "(", ...
Download-extract `Resource` or url, returns Promise->path.
[ "Download", "-", "extract", "Resource", "or", "url", "returns", "Promise", "-", ">", "path", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/core/download/download_manager.py#L270-L277
train
tensorflow/datasets
tensorflow_datasets/core/download/download_manager.py
DownloadManager.download_kaggle_data
def download_kaggle_data(self, competition_name): """Download data for a given Kaggle competition.""" with self._downloader.tqdm(): kaggle_downloader = self._downloader.kaggle_downloader(competition_name) urls = kaggle_downloader.competition_urls files = kaggle_downloader.competition_files ...
python
def download_kaggle_data(self, competition_name): """Download data for a given Kaggle competition.""" with self._downloader.tqdm(): kaggle_downloader = self._downloader.kaggle_downloader(competition_name) urls = kaggle_downloader.competition_urls files = kaggle_downloader.competition_files ...
[ "def", "download_kaggle_data", "(", "self", ",", "competition_name", ")", ":", "with", "self", ".", "_downloader", ".", "tqdm", "(", ")", ":", "kaggle_downloader", "=", "self", ".", "_downloader", ".", "kaggle_downloader", "(", "competition_name", ")", "urls", ...
Download data for a given Kaggle competition.
[ "Download", "data", "for", "a", "given", "Kaggle", "competition", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/core/download/download_manager.py#L279-L286
train
tensorflow/datasets
tensorflow_datasets/core/download/download_manager.py
DownloadManager.download
def download(self, url_or_urls): """Download given url(s). Args: url_or_urls: url or `list`/`dict` of urls to download and extract. Each url can be a `str` or `tfds.download.Resource`. Returns: downloaded_path(s): `str`, The downloaded paths matching the given input url_or_urls...
python
def download(self, url_or_urls): """Download given url(s). Args: url_or_urls: url or `list`/`dict` of urls to download and extract. Each url can be a `str` or `tfds.download.Resource`. Returns: downloaded_path(s): `str`, The downloaded paths matching the given input url_or_urls...
[ "def", "download", "(", "self", ",", "url_or_urls", ")", ":", "# Add progress bar to follow the download state", "with", "self", ".", "_downloader", ".", "tqdm", "(", ")", ":", "return", "_map_promise", "(", "self", ".", "_download", ",", "url_or_urls", ")" ]
Download given url(s). Args: url_or_urls: url or `list`/`dict` of urls to download and extract. Each url can be a `str` or `tfds.download.Resource`. Returns: downloaded_path(s): `str`, The downloaded paths matching the given input url_or_urls.
[ "Download", "given", "url", "(", "s", ")", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/core/download/download_manager.py#L288-L301
train
tensorflow/datasets
tensorflow_datasets/core/download/download_manager.py
DownloadManager.iter_archive
def iter_archive(self, resource): """Returns iterator over files within archive. **Important Note**: caller should read files as they are yielded. Reading out of order is slow. Args: resource: path to archive or `tfds.download.Resource`. Returns: Generator yielding tuple (path_within_...
python
def iter_archive(self, resource): """Returns iterator over files within archive. **Important Note**: caller should read files as they are yielded. Reading out of order is slow. Args: resource: path to archive or `tfds.download.Resource`. Returns: Generator yielding tuple (path_within_...
[ "def", "iter_archive", "(", "self", ",", "resource", ")", ":", "if", "isinstance", "(", "resource", ",", "six", ".", "string_types", ")", ":", "resource", "=", "resource_lib", ".", "Resource", "(", "path", "=", "resource", ")", "return", "extractor", ".", ...
Returns iterator over files within archive. **Important Note**: caller should read files as they are yielded. Reading out of order is slow. Args: resource: path to archive or `tfds.download.Resource`. Returns: Generator yielding tuple (path_within_archive, file_obj).
[ "Returns", "iterator", "over", "files", "within", "archive", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/core/download/download_manager.py#L303-L317
train
tensorflow/datasets
tensorflow_datasets/core/download/download_manager.py
DownloadManager.extract
def extract(self, path_or_paths): """Extract given path(s). Args: path_or_paths: path or `list`/`dict` of path of file to extract. Each path can be a `str` or `tfds.download.Resource`. If not explicitly specified in `Resource`, the extraction method is deduced from downloaded file name. ...
python
def extract(self, path_or_paths): """Extract given path(s). Args: path_or_paths: path or `list`/`dict` of path of file to extract. Each path can be a `str` or `tfds.download.Resource`. If not explicitly specified in `Resource`, the extraction method is deduced from downloaded file name. ...
[ "def", "extract", "(", "self", ",", "path_or_paths", ")", ":", "# Add progress bar to follow the download state", "with", "self", ".", "_extractor", ".", "tqdm", "(", ")", ":", "return", "_map_promise", "(", "self", ".", "_extract", ",", "path_or_paths", ")" ]
Extract given path(s). Args: path_or_paths: path or `list`/`dict` of path of file to extract. Each path can be a `str` or `tfds.download.Resource`. If not explicitly specified in `Resource`, the extraction method is deduced from downloaded file name. Returns: extracted_path(s): `s...
[ "Extract", "given", "path", "(", "s", ")", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/core/download/download_manager.py#L319-L335
train
tensorflow/datasets
tensorflow_datasets/core/download/download_manager.py
DownloadManager.download_and_extract
def download_and_extract(self, url_or_urls): """Download and extract given url_or_urls. Is roughly equivalent to: ``` extracted_paths = dl_manager.extract(dl_manager.download(url_or_urls)) ``` Args: url_or_urls: url or `list`/`dict` of urls to download and extract. Each url can ...
python
def download_and_extract(self, url_or_urls): """Download and extract given url_or_urls. Is roughly equivalent to: ``` extracted_paths = dl_manager.extract(dl_manager.download(url_or_urls)) ``` Args: url_or_urls: url or `list`/`dict` of urls to download and extract. Each url can ...
[ "def", "download_and_extract", "(", "self", ",", "url_or_urls", ")", ":", "# Add progress bar to follow the download state", "with", "self", ".", "_downloader", ".", "tqdm", "(", ")", ":", "with", "self", ".", "_extractor", ".", "tqdm", "(", ")", ":", "return", ...
Download and extract given url_or_urls. Is roughly equivalent to: ``` extracted_paths = dl_manager.extract(dl_manager.download(url_or_urls)) ``` Args: url_or_urls: url or `list`/`dict` of urls to download and extract. Each url can be a `str` or `tfds.download.Resource`. If not ...
[ "Download", "and", "extract", "given", "url_or_urls", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/core/download/download_manager.py#L337-L359
train
tensorflow/datasets
tensorflow_datasets/core/download/download_manager.py
DownloadManager.manual_dir
def manual_dir(self): """Returns the directory containing the manually extracted data.""" if not tf.io.gfile.exists(self._manual_dir): raise AssertionError( 'Manual directory {} does not exist. Create it and download/extract ' 'dataset artifacts in there.'.format(self._manual_dir)) ...
python
def manual_dir(self): """Returns the directory containing the manually extracted data.""" if not tf.io.gfile.exists(self._manual_dir): raise AssertionError( 'Manual directory {} does not exist. Create it and download/extract ' 'dataset artifacts in there.'.format(self._manual_dir)) ...
[ "def", "manual_dir", "(", "self", ")", ":", "if", "not", "tf", ".", "io", ".", "gfile", ".", "exists", "(", "self", ".", "_manual_dir", ")", ":", "raise", "AssertionError", "(", "'Manual directory {} does not exist. Create it and download/extract '", "'dataset artif...
Returns the directory containing the manually extracted data.
[ "Returns", "the", "directory", "containing", "the", "manually", "extracted", "data", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/core/download/download_manager.py#L362-L368
train
tensorflow/datasets
tensorflow_datasets/image/cifar10_corrupted.py
_make_builder_configs
def _make_builder_configs(): """Construct a list of BuilderConfigs. Construct a list of 75 Cifar10CorruptedConfig objects, corresponding to the 15 corruption types and 5 severities. Returns: A list of 75 Cifar10CorruptedConfig objects. """ config_list = [] for corruption in _CORRUPTIONS: for sev...
python
def _make_builder_configs(): """Construct a list of BuilderConfigs. Construct a list of 75 Cifar10CorruptedConfig objects, corresponding to the 15 corruption types and 5 severities. Returns: A list of 75 Cifar10CorruptedConfig objects. """ config_list = [] for corruption in _CORRUPTIONS: for sev...
[ "def", "_make_builder_configs", "(", ")", ":", "config_list", "=", "[", "]", "for", "corruption", "in", "_CORRUPTIONS", ":", "for", "severity", "in", "range", "(", "1", ",", "6", ")", ":", "config_list", ".", "append", "(", "Cifar10CorruptedConfig", "(", "...
Construct a list of BuilderConfigs. Construct a list of 75 Cifar10CorruptedConfig objects, corresponding to the 15 corruption types and 5 severities. Returns: A list of 75 Cifar10CorruptedConfig objects.
[ "Construct", "a", "list", "of", "BuilderConfigs", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/image/cifar10_corrupted.py#L93-L114
train
tensorflow/datasets
tensorflow_datasets/image/cifar10_corrupted.py
Cifar10Corrupted._split_generators
def _split_generators(self, dl_manager): """Return the test split of Cifar10. Args: dl_manager: download manager object. Returns: test split. """ path = dl_manager.download_and_extract(_DOWNLOAD_URL) return [ tfds.core.SplitGenerator( name=tfds.Split.TEST, ...
python
def _split_generators(self, dl_manager): """Return the test split of Cifar10. Args: dl_manager: download manager object. Returns: test split. """ path = dl_manager.download_and_extract(_DOWNLOAD_URL) return [ tfds.core.SplitGenerator( name=tfds.Split.TEST, ...
[ "def", "_split_generators", "(", "self", ",", "dl_manager", ")", ":", "path", "=", "dl_manager", ".", "download_and_extract", "(", "_DOWNLOAD_URL", ")", "return", "[", "tfds", ".", "core", ".", "SplitGenerator", "(", "name", "=", "tfds", ".", "Split", ".", ...
Return the test split of Cifar10. Args: dl_manager: download manager object. Returns: test split.
[ "Return", "the", "test", "split", "of", "Cifar10", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/image/cifar10_corrupted.py#L138-L153
train
tensorflow/datasets
tensorflow_datasets/image/cifar10_corrupted.py
Cifar10Corrupted._generate_examples
def _generate_examples(self, data_dir): """Generate corrupted Cifar10 test data. Apply corruptions to the raw images according to self.corruption_type. Args: data_dir: root directory of downloaded dataset Yields: dictionary with image file and label. """ corruption = self.builder_...
python
def _generate_examples(self, data_dir): """Generate corrupted Cifar10 test data. Apply corruptions to the raw images according to self.corruption_type. Args: data_dir: root directory of downloaded dataset Yields: dictionary with image file and label. """ corruption = self.builder_...
[ "def", "_generate_examples", "(", "self", ",", "data_dir", ")", ":", "corruption", "=", "self", ".", "builder_config", ".", "corruption", "severity", "=", "self", ".", "builder_config", ".", "severity", "images_file", "=", "os", ".", "path", ".", "join", "("...
Generate corrupted Cifar10 test data. Apply corruptions to the raw images according to self.corruption_type. Args: data_dir: root directory of downloaded dataset Yields: dictionary with image file and label.
[ "Generate", "corrupted", "Cifar10", "test", "data", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/image/cifar10_corrupted.py#L155-L189
train
tensorflow/datasets
tensorflow_datasets/scripts/document_datasets.py
document_single_builder
def document_single_builder(builder): """Doc string for a single builder, with or without configs.""" mod_name = builder.__class__.__module__ cls_name = builder.__class__.__name__ mod_file = sys.modules[mod_name].__file__ if mod_file.endswith("pyc"): mod_file = mod_file[:-1] description_prefix = "" ...
python
def document_single_builder(builder): """Doc string for a single builder, with or without configs.""" mod_name = builder.__class__.__module__ cls_name = builder.__class__.__name__ mod_file = sys.modules[mod_name].__file__ if mod_file.endswith("pyc"): mod_file = mod_file[:-1] description_prefix = "" ...
[ "def", "document_single_builder", "(", "builder", ")", ":", "mod_name", "=", "builder", ".", "__class__", ".", "__module__", "cls_name", "=", "builder", ".", "__class__", ".", "__name__", "mod_file", "=", "sys", ".", "modules", "[", "mod_name", "]", ".", "__...
Doc string for a single builder, with or without configs.
[ "Doc", "string", "for", "a", "single", "builder", "with", "or", "without", "configs", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/scripts/document_datasets.py#L196-L265
train
tensorflow/datasets
tensorflow_datasets/scripts/document_datasets.py
make_module_to_builder_dict
def make_module_to_builder_dict(datasets=None): """Get all builders organized by module in nested dicts.""" # pylint: disable=g-long-lambda # dict to hold tfds->image->mnist->[builders] module_to_builder = collections.defaultdict( lambda: collections.defaultdict( lambda: collections.defaultdict(...
python
def make_module_to_builder_dict(datasets=None): """Get all builders organized by module in nested dicts.""" # pylint: disable=g-long-lambda # dict to hold tfds->image->mnist->[builders] module_to_builder = collections.defaultdict( lambda: collections.defaultdict( lambda: collections.defaultdict(...
[ "def", "make_module_to_builder_dict", "(", "datasets", "=", "None", ")", ":", "# pylint: disable=g-long-lambda", "# dict to hold tfds->image->mnist->[builders]", "module_to_builder", "=", "collections", ".", "defaultdict", "(", "lambda", ":", "collections", ".", "defaultdict"...
Get all builders organized by module in nested dicts.
[ "Get", "all", "builders", "organized", "by", "module", "in", "nested", "dicts", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/scripts/document_datasets.py#L275-L305
train
tensorflow/datasets
tensorflow_datasets/scripts/document_datasets.py
_pprint_features_dict
def _pprint_features_dict(features_dict, indent=0, add_prefix=True): """Pretty-print tfds.features.FeaturesDict.""" first_last_indent_str = " " * indent indent_str = " " * (indent + 4) first_line = "%s%s({" % ( first_last_indent_str if add_prefix else "", type(features_dict).__name__, ) lines = ...
python
def _pprint_features_dict(features_dict, indent=0, add_prefix=True): """Pretty-print tfds.features.FeaturesDict.""" first_last_indent_str = " " * indent indent_str = " " * (indent + 4) first_line = "%s%s({" % ( first_last_indent_str if add_prefix else "", type(features_dict).__name__, ) lines = ...
[ "def", "_pprint_features_dict", "(", "features_dict", ",", "indent", "=", "0", ",", "add_prefix", "=", "True", ")", ":", "first_last_indent_str", "=", "\" \"", "*", "indent", "indent_str", "=", "\" \"", "*", "(", "indent", "+", "4", ")", "first_line", "=", ...
Pretty-print tfds.features.FeaturesDict.
[ "Pretty", "-", "print", "tfds", ".", "features", ".", "FeaturesDict", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/scripts/document_datasets.py#L308-L325
train
tensorflow/datasets
tensorflow_datasets/scripts/document_datasets.py
make_statistics_information
def make_statistics_information(info): """Make statistics information table.""" if not info.splits.total_num_examples: # That means that we have yet to calculate the statistics for this. return "None computed" stats = [(info.splits.total_num_examples, "ALL")] for split_name, split_info in info.splits.i...
python
def make_statistics_information(info): """Make statistics information table.""" if not info.splits.total_num_examples: # That means that we have yet to calculate the statistics for this. return "None computed" stats = [(info.splits.total_num_examples, "ALL")] for split_name, split_info in info.splits.i...
[ "def", "make_statistics_information", "(", "info", ")", ":", "if", "not", "info", ".", "splits", ".", "total_num_examples", ":", "# That means that we have yet to calculate the statistics for this.", "return", "\"None computed\"", "stats", "=", "[", "(", "info", ".", "s...
Make statistics information table.
[ "Make", "statistics", "information", "table", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/scripts/document_datasets.py#L337-L351
train
tensorflow/datasets
tensorflow_datasets/scripts/document_datasets.py
dataset_docs_str
def dataset_docs_str(datasets=None): """Create dataset documentation string for given datasets. Args: datasets: list of datasets for which to create documentation. If None, then all available datasets will be used. Returns: string describing the datasets (in the MarkDown format). """ m...
python
def dataset_docs_str(datasets=None): """Create dataset documentation string for given datasets. Args: datasets: list of datasets for which to create documentation. If None, then all available datasets will be used. Returns: string describing the datasets (in the MarkDown format). """ m...
[ "def", "dataset_docs_str", "(", "datasets", "=", "None", ")", ":", "module_to_builder", "=", "make_module_to_builder_dict", "(", "datasets", ")", "sections", "=", "sorted", "(", "list", "(", "module_to_builder", ".", "keys", "(", ")", ")", ")", "section_tocs", ...
Create dataset documentation string for given datasets. Args: datasets: list of datasets for which to create documentation. If None, then all available datasets will be used. Returns: string describing the datasets (in the MarkDown format).
[ "Create", "dataset", "documentation", "string", "for", "given", "datasets", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/scripts/document_datasets.py#L354-L383
train
tensorflow/datasets
tensorflow_datasets/scripts/document_datasets.py
schema_org
def schema_org(builder): # pylint: disable=line-too-long """Builds schema.org microdata for DatasetSearch from DatasetBuilder. Markup spec: https://developers.google.com/search/docs/data-types/dataset#dataset Testing tool: https://search.google.com/structured-data/testing-tool For Google Dataset Search: http...
python
def schema_org(builder): # pylint: disable=line-too-long """Builds schema.org microdata for DatasetSearch from DatasetBuilder. Markup spec: https://developers.google.com/search/docs/data-types/dataset#dataset Testing tool: https://search.google.com/structured-data/testing-tool For Google Dataset Search: http...
[ "def", "schema_org", "(", "builder", ")", ":", "# pylint: disable=line-too-long", "# pylint: enable=line-too-long", "properties", "=", "[", "(", "lambda", "x", ":", "x", ".", "name", ",", "SCHEMA_ORG_NAME", ")", ",", "(", "lambda", "x", ":", "x", ".", "descrip...
Builds schema.org microdata for DatasetSearch from DatasetBuilder. Markup spec: https://developers.google.com/search/docs/data-types/dataset#dataset Testing tool: https://search.google.com/structured-data/testing-tool For Google Dataset Search: https://toolbox.google.com/datasetsearch Microdata format was cho...
[ "Builds", "schema", ".", "org", "microdata", "for", "DatasetSearch", "from", "DatasetBuilder", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/scripts/document_datasets.py#L414-L449
train
tensorflow/datasets
tensorflow_datasets/image/corruptions.py
disk
def disk(radius, alias_blur=0.1, dtype=np.float32): """Generating a Gaussian blurring kernel with disk shape. Generating a Gaussian blurring kernel with disk shape using cv2 API. Args: radius: integer, radius of blurring kernel. alias_blur: float, standard deviation of Gaussian blurring. dtype: data...
python
def disk(radius, alias_blur=0.1, dtype=np.float32): """Generating a Gaussian blurring kernel with disk shape. Generating a Gaussian blurring kernel with disk shape using cv2 API. Args: radius: integer, radius of blurring kernel. alias_blur: float, standard deviation of Gaussian blurring. dtype: data...
[ "def", "disk", "(", "radius", ",", "alias_blur", "=", "0.1", ",", "dtype", "=", "np", ".", "float32", ")", ":", "if", "radius", "<=", "8", ":", "length", "=", "np", ".", "arange", "(", "-", "8", ",", "8", "+", "1", ")", "ksize", "=", "(", "3"...
Generating a Gaussian blurring kernel with disk shape. Generating a Gaussian blurring kernel with disk shape using cv2 API. Args: radius: integer, radius of blurring kernel. alias_blur: float, standard deviation of Gaussian blurring. dtype: data type of kernel Returns: cv2 object of the Gaussia...
[ "Generating", "a", "Gaussian", "blurring", "kernel", "with", "disk", "shape", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/image/corruptions.py#L46-L70
train
tensorflow/datasets
tensorflow_datasets/image/corruptions.py
clipped_zoom
def clipped_zoom(img, zoom_factor): """Zoom image with clipping. Zoom the central part of the image and clip extra pixels. Args: img: numpy array, uncorrupted image. zoom_factor: numpy array, a sequence of float numbers for zoom factor. Returns: numpy array, zoomed image after clipping. """ h...
python
def clipped_zoom(img, zoom_factor): """Zoom image with clipping. Zoom the central part of the image and clip extra pixels. Args: img: numpy array, uncorrupted image. zoom_factor: numpy array, a sequence of float numbers for zoom factor. Returns: numpy array, zoomed image after clipping. """ h...
[ "def", "clipped_zoom", "(", "img", ",", "zoom_factor", ")", ":", "h", "=", "img", ".", "shape", "[", "0", "]", "ch", "=", "int", "(", "np", ".", "ceil", "(", "h", "/", "float", "(", "zoom_factor", ")", ")", ")", "top_h", "=", "(", "h", "-", "...
Zoom image with clipping. Zoom the central part of the image and clip extra pixels. Args: img: numpy array, uncorrupted image. zoom_factor: numpy array, a sequence of float numbers for zoom factor. Returns: numpy array, zoomed image after clipping.
[ "Zoom", "image", "with", "clipping", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/image/corruptions.py#L73-L101
train
tensorflow/datasets
tensorflow_datasets/image/corruptions.py
plasma_fractal
def plasma_fractal(mapsize=512, wibbledecay=3): """Generate a heightmap using diamond-square algorithm. Modification of the algorithm in https://github.com/FLHerne/mapgen/blob/master/diamondsquare.py Args: mapsize: side length of the heightmap, must be a power of two. wibbledecay: integer, decay facto...
python
def plasma_fractal(mapsize=512, wibbledecay=3): """Generate a heightmap using diamond-square algorithm. Modification of the algorithm in https://github.com/FLHerne/mapgen/blob/master/diamondsquare.py Args: mapsize: side length of the heightmap, must be a power of two. wibbledecay: integer, decay facto...
[ "def", "plasma_fractal", "(", "mapsize", "=", "512", ",", "wibbledecay", "=", "3", ")", ":", "if", "mapsize", "&", "(", "mapsize", "-", "1", ")", "!=", "0", ":", "raise", "ValueError", "(", "'mapsize must be a power of two.'", ")", "maparray", "=", "np", ...
Generate a heightmap using diamond-square algorithm. Modification of the algorithm in https://github.com/FLHerne/mapgen/blob/master/diamondsquare.py Args: mapsize: side length of the heightmap, must be a power of two. wibbledecay: integer, decay factor. Returns: numpy 2d array, side length 'mapsi...
[ "Generate", "a", "heightmap", "using", "diamond", "-", "square", "algorithm", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/image/corruptions.py#L104-L159
train
tensorflow/datasets
tensorflow_datasets/image/corruptions.py
gaussian_noise
def gaussian_noise(x, severity=1): """Gaussian noise corruption to images. Args: x: numpy array, uncorrupted image, assumed to have uint8 pixel in [0,255]. severity: integer, severity of corruption. Returns: numpy array, image with uint8 pixels in [0,255]. Added Gaussian noise. """ c = [.08, .12...
python
def gaussian_noise(x, severity=1): """Gaussian noise corruption to images. Args: x: numpy array, uncorrupted image, assumed to have uint8 pixel in [0,255]. severity: integer, severity of corruption. Returns: numpy array, image with uint8 pixels in [0,255]. Added Gaussian noise. """ c = [.08, .12...
[ "def", "gaussian_noise", "(", "x", ",", "severity", "=", "1", ")", ":", "c", "=", "[", ".08", ",", ".12", ",", "0.18", ",", "0.26", ",", "0.38", "]", "[", "severity", "-", "1", "]", "x", "=", "np", ".", "array", "(", "x", ")", "/", "255.", ...
Gaussian noise corruption to images. Args: x: numpy array, uncorrupted image, assumed to have uint8 pixel in [0,255]. severity: integer, severity of corruption. Returns: numpy array, image with uint8 pixels in [0,255]. Added Gaussian noise.
[ "Gaussian", "noise", "corruption", "to", "images", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/image/corruptions.py#L167-L180
train
tensorflow/datasets
tensorflow_datasets/image/corruptions.py
shot_noise
def shot_noise(x, severity=1): """Shot noise corruption to images. Args: x: numpy array, uncorrupted image, assumed to have uint8 pixel in [0,255]. severity: integer, severity of corruption. Returns: numpy array, image with uint8 pixels in [0,255]. Added shot noise. """ c = [60, 25, 12, 5, 3][se...
python
def shot_noise(x, severity=1): """Shot noise corruption to images. Args: x: numpy array, uncorrupted image, assumed to have uint8 pixel in [0,255]. severity: integer, severity of corruption. Returns: numpy array, image with uint8 pixels in [0,255]. Added shot noise. """ c = [60, 25, 12, 5, 3][se...
[ "def", "shot_noise", "(", "x", ",", "severity", "=", "1", ")", ":", "c", "=", "[", "60", ",", "25", ",", "12", ",", "5", ",", "3", "]", "[", "severity", "-", "1", "]", "x", "=", "np", ".", "array", "(", "x", ")", "/", "255.", "x_clip", "=...
Shot noise corruption to images. Args: x: numpy array, uncorrupted image, assumed to have uint8 pixel in [0,255]. severity: integer, severity of corruption. Returns: numpy array, image with uint8 pixels in [0,255]. Added shot noise.
[ "Shot", "noise", "corruption", "to", "images", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/image/corruptions.py#L183-L196
train
tensorflow/datasets
tensorflow_datasets/image/corruptions.py
impulse_noise
def impulse_noise(x, severity=1): """Impulse noise corruption to images. Args: x: numpy array, uncorrupted image, assumed to have uint8 pixel in [0,255]. severity: integer, severity of corruption. Returns: numpy array, image with uint8 pixels in [0,255]. Added impulse noise. """ c = [.03, .06, ....
python
def impulse_noise(x, severity=1): """Impulse noise corruption to images. Args: x: numpy array, uncorrupted image, assumed to have uint8 pixel in [0,255]. severity: integer, severity of corruption. Returns: numpy array, image with uint8 pixels in [0,255]. Added impulse noise. """ c = [.03, .06, ....
[ "def", "impulse_noise", "(", "x", ",", "severity", "=", "1", ")", ":", "c", "=", "[", ".03", ",", ".06", ",", ".09", ",", "0.17", ",", "0.27", "]", "[", "severity", "-", "1", "]", "x", "=", "tfds", ".", "core", ".", "lazy_imports", ".", "skimag...
Impulse noise corruption to images. Args: x: numpy array, uncorrupted image, assumed to have uint8 pixel in [0,255]. severity: integer, severity of corruption. Returns: numpy array, image with uint8 pixels in [0,255]. Added impulse noise.
[ "Impulse", "noise", "corruption", "to", "images", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/image/corruptions.py#L199-L213
train
tensorflow/datasets
tensorflow_datasets/image/corruptions.py
defocus_blur
def defocus_blur(x, severity=1): """Defocus blurring to images. Apply defocus blurring to images using Gaussian kernel. Args: x: numpy array, uncorrupted image, assumed to have uint8 pixel in [0,255]. severity: integer, severity of corruption. Returns: numpy array, image with uint8 pixels in [0,2...
python
def defocus_blur(x, severity=1): """Defocus blurring to images. Apply defocus blurring to images using Gaussian kernel. Args: x: numpy array, uncorrupted image, assumed to have uint8 pixel in [0,255]. severity: integer, severity of corruption. Returns: numpy array, image with uint8 pixels in [0,2...
[ "def", "defocus_blur", "(", "x", ",", "severity", "=", "1", ")", ":", "c", "=", "[", "(", "3", ",", "0.1", ")", ",", "(", "4", ",", "0.5", ")", ",", "(", "6", ",", "0.5", ")", ",", "(", "8", ",", "0.5", ")", ",", "(", "10", ",", "0.5", ...
Defocus blurring to images. Apply defocus blurring to images using Gaussian kernel. Args: x: numpy array, uncorrupted image, assumed to have uint8 pixel in [0,255]. severity: integer, severity of corruption. Returns: numpy array, image with uint8 pixels in [0,255]. Applied defocus blur.
[ "Defocus", "blurring", "to", "images", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/image/corruptions.py#L216-L236
train
tensorflow/datasets
tensorflow_datasets/image/corruptions.py
frosted_glass_blur
def frosted_glass_blur(x, severity=1): """Frosted glass blurring to images. Apply frosted glass blurring to images by shuffling pixels locally. Args: x: numpy array, uncorrupted image, assumed to have uint8 pixel in [0,255]. severity: integer, severity of corruption. Returns: numpy array, image w...
python
def frosted_glass_blur(x, severity=1): """Frosted glass blurring to images. Apply frosted glass blurring to images by shuffling pixels locally. Args: x: numpy array, uncorrupted image, assumed to have uint8 pixel in [0,255]. severity: integer, severity of corruption. Returns: numpy array, image w...
[ "def", "frosted_glass_blur", "(", "x", ",", "severity", "=", "1", ")", ":", "# sigma, max_delta, iterations", "c", "=", "[", "(", "0.7", ",", "1", ",", "2", ")", ",", "(", "0.9", ",", "2", ",", "1", ")", ",", "(", "1", ",", "2", ",", "3", ")", ...
Frosted glass blurring to images. Apply frosted glass blurring to images by shuffling pixels locally. Args: x: numpy array, uncorrupted image, assumed to have uint8 pixel in [0,255]. severity: integer, severity of corruption. Returns: numpy array, image with uint8 pixels in [0,255]. Applied frosted...
[ "Frosted", "glass", "blurring", "to", "images", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/image/corruptions.py#L239-L270
train
tensorflow/datasets
tensorflow_datasets/image/corruptions.py
zoom_blur
def zoom_blur(x, severity=1): """Zoom blurring to images. Applying zoom blurring to images by zooming the central part of the images. Args: x: numpy array, uncorrupted image, assumed to have uint8 pixel in [0,255]. severity: integer, severity of corruption. Returns: numpy array, image with uint8 ...
python
def zoom_blur(x, severity=1): """Zoom blurring to images. Applying zoom blurring to images by zooming the central part of the images. Args: x: numpy array, uncorrupted image, assumed to have uint8 pixel in [0,255]. severity: integer, severity of corruption. Returns: numpy array, image with uint8 ...
[ "def", "zoom_blur", "(", "x", ",", "severity", "=", "1", ")", ":", "c", "=", "[", "np", ".", "arange", "(", "1", ",", "1.11", ",", "0.01", ")", ",", "np", ".", "arange", "(", "1", ",", "1.16", ",", "0.01", ")", ",", "np", ".", "arange", "("...
Zoom blurring to images. Applying zoom blurring to images by zooming the central part of the images. Args: x: numpy array, uncorrupted image, assumed to have uint8 pixel in [0,255]. severity: integer, severity of corruption. Returns: numpy array, image with uint8 pixels in [0,255]. Applied zoom blu...
[ "Zoom", "blurring", "to", "images", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/image/corruptions.py#L273-L298
train
tensorflow/datasets
tensorflow_datasets/image/corruptions.py
fog
def fog(x, severity=1): """Fog corruption to images. Adding fog to images. Fog is generated by diamond-square algorithm. Args: x: numpy array, uncorrupted image, assumed to have uint8 pixel in [0,255]. severity: integer, severity of corruption. Returns: numpy array, image with uint8 pixels in [0,...
python
def fog(x, severity=1): """Fog corruption to images. Adding fog to images. Fog is generated by diamond-square algorithm. Args: x: numpy array, uncorrupted image, assumed to have uint8 pixel in [0,255]. severity: integer, severity of corruption. Returns: numpy array, image with uint8 pixels in [0,...
[ "def", "fog", "(", "x", ",", "severity", "=", "1", ")", ":", "c", "=", "[", "(", "1.5", ",", "2", ")", ",", "(", "2.", ",", "2", ")", ",", "(", "2.5", ",", "1.7", ")", ",", "(", "2.5", ",", "1.5", ")", ",", "(", "3.", ",", "1.4", ")",...
Fog corruption to images. Adding fog to images. Fog is generated by diamond-square algorithm. Args: x: numpy array, uncorrupted image, assumed to have uint8 pixel in [0,255]. severity: integer, severity of corruption. Returns: numpy array, image with uint8 pixels in [0,255]. Added fog.
[ "Fog", "corruption", "to", "images", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/image/corruptions.py#L301-L326
train
tensorflow/datasets
tensorflow_datasets/image/corruptions.py
brightness
def brightness(x, severity=1): """Change brightness of images. Args: x: numpy array, uncorrupted image, assumed to have uint8 pixel in [0,255]. severity: integer, severity of corruption. Returns: numpy array, image with uint8 pixels in [0,255]. Changed brightness. """ c = [.1, .2, .3, .4, .5][se...
python
def brightness(x, severity=1): """Change brightness of images. Args: x: numpy array, uncorrupted image, assumed to have uint8 pixel in [0,255]. severity: integer, severity of corruption. Returns: numpy array, image with uint8 pixels in [0,255]. Changed brightness. """ c = [.1, .2, .3, .4, .5][se...
[ "def", "brightness", "(", "x", ",", "severity", "=", "1", ")", ":", "c", "=", "[", ".1", ",", ".2", ",", ".3", ",", ".4", ",", ".5", "]", "[", "severity", "-", "1", "]", "x", "=", "np", ".", "array", "(", "x", ")", "/", "255.", "x", "=", ...
Change brightness of images. Args: x: numpy array, uncorrupted image, assumed to have uint8 pixel in [0,255]. severity: integer, severity of corruption. Returns: numpy array, image with uint8 pixels in [0,255]. Changed brightness.
[ "Change", "brightness", "of", "images", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/image/corruptions.py#L329-L346
train
tensorflow/datasets
tensorflow_datasets/image/corruptions.py
contrast
def contrast(x, severity=1): """Change contrast of images. Args: x: numpy array, uncorrupted image, assumed to have uint8 pixel in [0,255]. severity: integer, severity of corruption. Returns: numpy array, image with uint8 pixels in [0,255]. Changed contrast. """ c = [0.4, .3, .2, .1, .05][severi...
python
def contrast(x, severity=1): """Change contrast of images. Args: x: numpy array, uncorrupted image, assumed to have uint8 pixel in [0,255]. severity: integer, severity of corruption. Returns: numpy array, image with uint8 pixels in [0,255]. Changed contrast. """ c = [0.4, .3, .2, .1, .05][severi...
[ "def", "contrast", "(", "x", ",", "severity", "=", "1", ")", ":", "c", "=", "[", "0.4", ",", ".3", ",", ".2", ",", ".1", ",", ".05", "]", "[", "severity", "-", "1", "]", "x", "=", "np", ".", "array", "(", "x", ")", "/", "255.", "means", "...
Change contrast of images. Args: x: numpy array, uncorrupted image, assumed to have uint8 pixel in [0,255]. severity: integer, severity of corruption. Returns: numpy array, image with uint8 pixels in [0,255]. Changed contrast.
[ "Change", "contrast", "of", "images", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/image/corruptions.py#L349-L364
train
tensorflow/datasets
tensorflow_datasets/image/corruptions.py
elastic
def elastic(x, severity=1): """Conduct elastic transform to images. Elastic transform is performed on small patches of the images. Args: x: numpy array, uncorrupted image, assumed to have uint8 pixel in [0,255]. severity: integer, severity of corruption. Returns: numpy array, image with uint8 pix...
python
def elastic(x, severity=1): """Conduct elastic transform to images. Elastic transform is performed on small patches of the images. Args: x: numpy array, uncorrupted image, assumed to have uint8 pixel in [0,255]. severity: integer, severity of corruption. Returns: numpy array, image with uint8 pix...
[ "def", "elastic", "(", "x", ",", "severity", "=", "1", ")", ":", "c", "=", "[", "(", "244", "*", "2", ",", "244", "*", "0.7", ",", "244", "*", "0.1", ")", ",", "(", "244", "*", "2", ",", "244", "*", "0.08", ",", "244", "*", "0.2", ")", ...
Conduct elastic transform to images. Elastic transform is performed on small patches of the images. Args: x: numpy array, uncorrupted image, assumed to have uint8 pixel in [0,255]. severity: integer, severity of corruption. Returns: numpy array, image with uint8 pixels in [0,255]. Applied elastic t...
[ "Conduct", "elastic", "transform", "to", "images", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/image/corruptions.py#L367-L425
train
tensorflow/datasets
tensorflow_datasets/image/corruptions.py
pixelate
def pixelate(x, severity=1): """Pixelate images. Conduct pixelating corruptions to images by first shrinking the images and then resizing to original size. Args: x: numpy array, uncorrupted image, assumed to have uint8 pixel in [0,255]. severity: integer, severity of corruption. Returns: numpy ...
python
def pixelate(x, severity=1): """Pixelate images. Conduct pixelating corruptions to images by first shrinking the images and then resizing to original size. Args: x: numpy array, uncorrupted image, assumed to have uint8 pixel in [0,255]. severity: integer, severity of corruption. Returns: numpy ...
[ "def", "pixelate", "(", "x", ",", "severity", "=", "1", ")", ":", "c", "=", "[", "0.6", ",", "0.5", ",", "0.4", ",", "0.3", ",", "0.25", "]", "[", "severity", "-", "1", "]", "shape", "=", "x", ".", "shape", "x", "=", "tfds", ".", "core", "....
Pixelate images. Conduct pixelating corruptions to images by first shrinking the images and then resizing to original size. Args: x: numpy array, uncorrupted image, assumed to have uint8 pixel in [0,255]. severity: integer, severity of corruption. Returns: numpy array, image with uint8 pixels in ...
[ "Pixelate", "images", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/image/corruptions.py#L428-L447
train
tensorflow/datasets
tensorflow_datasets/image/corruptions.py
jpeg_compression
def jpeg_compression(x, severity=1): """Conduct jpeg compression to images. Args: x: numpy array, uncorrupted image, assumed to have uint8 pixel in [0,255]. severity: integer, severity of corruption. Returns: numpy array, image with uint8 pixels in [0,255]. Applied jpeg compression. """ c = [25,...
python
def jpeg_compression(x, severity=1): """Conduct jpeg compression to images. Args: x: numpy array, uncorrupted image, assumed to have uint8 pixel in [0,255]. severity: integer, severity of corruption. Returns: numpy array, image with uint8 pixels in [0,255]. Applied jpeg compression. """ c = [25,...
[ "def", "jpeg_compression", "(", "x", ",", "severity", "=", "1", ")", ":", "c", "=", "[", "25", ",", "18", ",", "15", ",", "10", ",", "7", "]", "[", "severity", "-", "1", "]", "x", "=", "tfds", ".", "core", ".", "lazy_imports", ".", "PIL_Image",...
Conduct jpeg compression to images. Args: x: numpy array, uncorrupted image, assumed to have uint8 pixel in [0,255]. severity: integer, severity of corruption. Returns: numpy array, image with uint8 pixels in [0,255]. Applied jpeg compression.
[ "Conduct", "jpeg", "compression", "to", "images", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/image/corruptions.py#L450-L466
train
tensorflow/datasets
tensorflow_datasets/core/utils/py_utils.py
temporary_assignment
def temporary_assignment(obj, attr, value): """Temporarily assign obj.attr to value.""" original = getattr(obj, attr, None) setattr(obj, attr, value) yield setattr(obj, attr, original)
python
def temporary_assignment(obj, attr, value): """Temporarily assign obj.attr to value.""" original = getattr(obj, attr, None) setattr(obj, attr, value) yield setattr(obj, attr, original)
[ "def", "temporary_assignment", "(", "obj", ",", "attr", ",", "value", ")", ":", "original", "=", "getattr", "(", "obj", ",", "attr", ",", "None", ")", "setattr", "(", "obj", ",", "attr", ",", "value", ")", "yield", "setattr", "(", "obj", ",", "attr",...
Temporarily assign obj.attr to value.
[ "Temporarily", "assign", "obj", ".", "attr", "to", "value", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/core/utils/py_utils.py#L55-L60
train
tensorflow/datasets
tensorflow_datasets/core/utils/py_utils.py
zip_dict
def zip_dict(*dicts): """Iterate over items of dictionaries grouped by their keys.""" for key in set(itertools.chain(*dicts)): # set merge all keys # Will raise KeyError if the dict don't have the same keys yield key, tuple(d[key] for d in dicts)
python
def zip_dict(*dicts): """Iterate over items of dictionaries grouped by their keys.""" for key in set(itertools.chain(*dicts)): # set merge all keys # Will raise KeyError if the dict don't have the same keys yield key, tuple(d[key] for d in dicts)
[ "def", "zip_dict", "(", "*", "dicts", ")", ":", "for", "key", "in", "set", "(", "itertools", ".", "chain", "(", "*", "dicts", ")", ")", ":", "# set merge all keys", "# Will raise KeyError if the dict don't have the same keys", "yield", "key", ",", "tuple", "(", ...
Iterate over items of dictionaries grouped by their keys.
[ "Iterate", "over", "items", "of", "dictionaries", "grouped", "by", "their", "keys", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/core/utils/py_utils.py#L63-L67
train
tensorflow/datasets
tensorflow_datasets/core/utils/py_utils.py
map_nested
def map_nested(function, data_struct, dict_only=False, map_tuple=False): """Apply a function recursively to each element of a nested data struct.""" # Could add support for more exotic data_struct, like OrderedDict if isinstance(data_struct, dict): return { k: map_nested(function, v, dict_only, map_t...
python
def map_nested(function, data_struct, dict_only=False, map_tuple=False): """Apply a function recursively to each element of a nested data struct.""" # Could add support for more exotic data_struct, like OrderedDict if isinstance(data_struct, dict): return { k: map_nested(function, v, dict_only, map_t...
[ "def", "map_nested", "(", "function", ",", "data_struct", ",", "dict_only", "=", "False", ",", "map_tuple", "=", "False", ")", ":", "# Could add support for more exotic data_struct, like OrderedDict", "if", "isinstance", "(", "data_struct", ",", "dict", ")", ":", "r...
Apply a function recursively to each element of a nested data struct.
[ "Apply", "a", "function", "recursively", "to", "each", "element", "of", "a", "nested", "data", "struct", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/core/utils/py_utils.py#L122-L143
train
tensorflow/datasets
tensorflow_datasets/core/utils/py_utils.py
zip_nested
def zip_nested(arg0, *args, **kwargs): """Zip data struct together and return a data struct with the same shape.""" # Python 2 do not support kwargs only arguments dict_only = kwargs.pop("dict_only", False) assert not kwargs # Could add support for more exotic data_struct, like OrderedDict if isinstance(ar...
python
def zip_nested(arg0, *args, **kwargs): """Zip data struct together and return a data struct with the same shape.""" # Python 2 do not support kwargs only arguments dict_only = kwargs.pop("dict_only", False) assert not kwargs # Could add support for more exotic data_struct, like OrderedDict if isinstance(ar...
[ "def", "zip_nested", "(", "arg0", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "# Python 2 do not support kwargs only arguments", "dict_only", "=", "kwargs", ".", "pop", "(", "\"dict_only\"", ",", "False", ")", "assert", "not", "kwargs", "# Could add sup...
Zip data struct together and return a data struct with the same shape.
[ "Zip", "data", "struct", "together", "and", "return", "a", "data", "struct", "with", "the", "same", "shape", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/core/utils/py_utils.py#L146-L161
train
tensorflow/datasets
tensorflow_datasets/core/utils/py_utils.py
as_proto_cls
def as_proto_cls(proto_cls): """Simulate proto inheritance. By default, protobuf do not support direct inheritance, so this decorator simulates inheritance to the class to which it is applied. Example: ``` @as_proto_class(proto.MyProto) class A(object): def custom_method(self): return self.pr...
python
def as_proto_cls(proto_cls): """Simulate proto inheritance. By default, protobuf do not support direct inheritance, so this decorator simulates inheritance to the class to which it is applied. Example: ``` @as_proto_class(proto.MyProto) class A(object): def custom_method(self): return self.pr...
[ "def", "as_proto_cls", "(", "proto_cls", ")", ":", "def", "decorator", "(", "cls", ")", ":", "\"\"\"Decorator applied to the class.\"\"\"", "class", "ProtoCls", "(", "object", ")", ":", "\"\"\"Base class simulating the protobuf.\"\"\"", "def", "__init__", "(", "self", ...
Simulate proto inheritance. By default, protobuf do not support direct inheritance, so this decorator simulates inheritance to the class to which it is applied. Example: ``` @as_proto_class(proto.MyProto) class A(object): def custom_method(self): return self.proto_field * 10 p = proto.MyProt...
[ "Simulate", "proto", "inheritance", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/core/utils/py_utils.py#L164-L229
train
tensorflow/datasets
tensorflow_datasets/core/utils/py_utils.py
tfds_dir
def tfds_dir(): """Path to tensorflow_datasets directory.""" return os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
python
def tfds_dir(): """Path to tensorflow_datasets directory.""" return os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
[ "def", "tfds_dir", "(", ")", ":", "return", "os", ".", "path", ".", "dirname", "(", "os", ".", "path", ".", "dirname", "(", "os", ".", "path", ".", "dirname", "(", "__file__", ")", ")", ")" ]
Path to tensorflow_datasets directory.
[ "Path", "to", "tensorflow_datasets", "directory", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/core/utils/py_utils.py#L232-L234
train
tensorflow/datasets
tensorflow_datasets/core/utils/py_utils.py
atomic_write
def atomic_write(path, mode): """Writes to path atomically, by writing to temp file and renaming it.""" tmp_path = "%s%s_%s" % (path, constants.INCOMPLETE_SUFFIX, uuid.uuid4().hex) with tf.io.gfile.GFile(tmp_path, mode) as file_: yield file_ tf.io.gfile.rename(tmp_path, path, overwrite=True)
python
def atomic_write(path, mode): """Writes to path atomically, by writing to temp file and renaming it.""" tmp_path = "%s%s_%s" % (path, constants.INCOMPLETE_SUFFIX, uuid.uuid4().hex) with tf.io.gfile.GFile(tmp_path, mode) as file_: yield file_ tf.io.gfile.rename(tmp_path, path, overwrite=True)
[ "def", "atomic_write", "(", "path", ",", "mode", ")", ":", "tmp_path", "=", "\"%s%s_%s\"", "%", "(", "path", ",", "constants", ".", "INCOMPLETE_SUFFIX", ",", "uuid", ".", "uuid4", "(", ")", ".", "hex", ")", "with", "tf", ".", "io", ".", "gfile", ".",...
Writes to path atomically, by writing to temp file and renaming it.
[ "Writes", "to", "path", "atomically", "by", "writing", "to", "temp", "file", "and", "renaming", "it", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/core/utils/py_utils.py#L238-L243
train
tensorflow/datasets
tensorflow_datasets/core/utils/py_utils.py
read_checksum_digest
def read_checksum_digest(path, checksum_cls=hashlib.sha256): """Given a hash constructor, returns checksum digest and size of file.""" checksum = checksum_cls() size = 0 with tf.io.gfile.GFile(path, "rb") as f: while True: block = f.read(io.DEFAULT_BUFFER_SIZE) size += len(block) if not bl...
python
def read_checksum_digest(path, checksum_cls=hashlib.sha256): """Given a hash constructor, returns checksum digest and size of file.""" checksum = checksum_cls() size = 0 with tf.io.gfile.GFile(path, "rb") as f: while True: block = f.read(io.DEFAULT_BUFFER_SIZE) size += len(block) if not bl...
[ "def", "read_checksum_digest", "(", "path", ",", "checksum_cls", "=", "hashlib", ".", "sha256", ")", ":", "checksum", "=", "checksum_cls", "(", ")", "size", "=", "0", "with", "tf", ".", "io", ".", "gfile", ".", "GFile", "(", "path", ",", "\"rb\"", ")",...
Given a hash constructor, returns checksum digest and size of file.
[ "Given", "a", "hash", "constructor", "returns", "checksum", "digest", "and", "size", "of", "file", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/core/utils/py_utils.py#L262-L273
train
tensorflow/datasets
tensorflow_datasets/core/utils/py_utils.py
reraise
def reraise(additional_msg): """Reraise an exception with an additional message.""" exc_type, exc_value, exc_traceback = sys.exc_info() msg = str(exc_value) + "\n" + additional_msg six.reraise(exc_type, exc_type(msg), exc_traceback)
python
def reraise(additional_msg): """Reraise an exception with an additional message.""" exc_type, exc_value, exc_traceback = sys.exc_info() msg = str(exc_value) + "\n" + additional_msg six.reraise(exc_type, exc_type(msg), exc_traceback)
[ "def", "reraise", "(", "additional_msg", ")", ":", "exc_type", ",", "exc_value", ",", "exc_traceback", "=", "sys", ".", "exc_info", "(", ")", "msg", "=", "str", "(", "exc_value", ")", "+", "\"\\n\"", "+", "additional_msg", "six", ".", "reraise", "(", "ex...
Reraise an exception with an additional message.
[ "Reraise", "an", "exception", "with", "an", "additional", "message", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/core/utils/py_utils.py#L276-L280
train
tensorflow/datasets
tensorflow_datasets/core/utils/py_utils.py
rgetattr
def rgetattr(obj, attr, *args): """Get attr that handles dots in attr name.""" def _getattr(obj, attr): return getattr(obj, attr, *args) return functools.reduce(_getattr, [obj] + attr.split("."))
python
def rgetattr(obj, attr, *args): """Get attr that handles dots in attr name.""" def _getattr(obj, attr): return getattr(obj, attr, *args) return functools.reduce(_getattr, [obj] + attr.split("."))
[ "def", "rgetattr", "(", "obj", ",", "attr", ",", "*", "args", ")", ":", "def", "_getattr", "(", "obj", ",", "attr", ")", ":", "return", "getattr", "(", "obj", ",", "attr", ",", "*", "args", ")", "return", "functools", ".", "reduce", "(", "_getattr"...
Get attr that handles dots in attr name.
[ "Get", "attr", "that", "handles", "dots", "in", "attr", "name", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/core/utils/py_utils.py#L283-L287
train
tensorflow/datasets
tensorflow_datasets/image/celebahq.py
CelebAHq._split_generators
def _split_generators(self, dl_manager): """Returns SplitGenerators.""" image_tar_file = os.path.join(dl_manager.manual_dir, self.builder_config.file_name) if not tf.io.gfile.exists(image_tar_file): # The current celebahq generation code depends on a concrete version ...
python
def _split_generators(self, dl_manager): """Returns SplitGenerators.""" image_tar_file = os.path.join(dl_manager.manual_dir, self.builder_config.file_name) if not tf.io.gfile.exists(image_tar_file): # The current celebahq generation code depends on a concrete version ...
[ "def", "_split_generators", "(", "self", ",", "dl_manager", ")", ":", "image_tar_file", "=", "os", ".", "path", ".", "join", "(", "dl_manager", ".", "manual_dir", ",", "self", ".", "builder_config", ".", "file_name", ")", "if", "not", "tf", ".", "io", "....
Returns SplitGenerators.
[ "Returns", "SplitGenerators", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/image/celebahq.py#L107-L124
train
tensorflow/datasets
tensorflow_datasets/translate/ted_hrlr.py
TedHrlrTranslate._generate_examples
def _generate_examples(self, source_file, target_file): """This function returns the examples in the raw (text) form.""" with tf.io.gfile.GFile(source_file) as f: source_sentences = f.read().split("\n") with tf.io.gfile.GFile(target_file) as f: target_sentences = f.read().split("\n") assert...
python
def _generate_examples(self, source_file, target_file): """This function returns the examples in the raw (text) form.""" with tf.io.gfile.GFile(source_file) as f: source_sentences = f.read().split("\n") with tf.io.gfile.GFile(target_file) as f: target_sentences = f.read().split("\n") assert...
[ "def", "_generate_examples", "(", "self", ",", "source_file", ",", "target_file", ")", ":", "with", "tf", ".", "io", ".", "gfile", ".", "GFile", "(", "source_file", ")", "as", "f", ":", "source_sentences", "=", "f", ".", "read", "(", ")", ".", "split",...
This function returns the examples in the raw (text) form.
[ "This", "function", "returns", "the", "examples", "in", "the", "raw", "(", "text", ")", "form", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/translate/ted_hrlr.py#L160-L176
train
tensorflow/datasets
tensorflow_datasets/text/xnli.py
Xnli._generate_examples
def _generate_examples(self, filepath): """This function returns the examples in the raw (text) form.""" rows_per_pair_id = collections.defaultdict(list) with tf.io.gfile.GFile(filepath) as f: reader = csv.DictReader(f, delimiter='\t', quoting=csv.QUOTE_NONE) for row in reader: rows_per...
python
def _generate_examples(self, filepath): """This function returns the examples in the raw (text) form.""" rows_per_pair_id = collections.defaultdict(list) with tf.io.gfile.GFile(filepath) as f: reader = csv.DictReader(f, delimiter='\t', quoting=csv.QUOTE_NONE) for row in reader: rows_per...
[ "def", "_generate_examples", "(", "self", ",", "filepath", ")", ":", "rows_per_pair_id", "=", "collections", ".", "defaultdict", "(", "list", ")", "with", "tf", ".", "io", ".", "gfile", ".", "GFile", "(", "filepath", ")", "as", "f", ":", "reader", "=", ...
This function returns the examples in the raw (text) form.
[ "This", "function", "returns", "the", "examples", "in", "the", "raw", "(", "text", ")", "form", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/text/xnli.py#L107-L123
train
tensorflow/datasets
tensorflow_datasets/image/voc.py
Voc2007._generate_example
def _generate_example(self, data_path, image_id): """Yields examples.""" image_filepath = os.path.join( data_path, "VOCdevkit/VOC2007/JPEGImages", "{}.jpg".format(image_id)) annon_filepath = os.path.join( data_path, "VOCdevkit/VOC2007/Annotations", "{}.xml".format(image_id)) def _get_ex...
python
def _generate_example(self, data_path, image_id): """Yields examples.""" image_filepath = os.path.join( data_path, "VOCdevkit/VOC2007/JPEGImages", "{}.jpg".format(image_id)) annon_filepath = os.path.join( data_path, "VOCdevkit/VOC2007/Annotations", "{}.xml".format(image_id)) def _get_ex...
[ "def", "_generate_example", "(", "self", ",", "data_path", ",", "image_id", ")", ":", "image_filepath", "=", "os", ".", "path", ".", "join", "(", "data_path", ",", "\"VOCdevkit/VOC2007/JPEGImages\"", ",", "\"{}.jpg\"", ".", "format", "(", "image_id", ")", ")",...
Yields examples.
[ "Yields", "examples", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/image/voc.py#L137-L186
train
tensorflow/datasets
tensorflow_datasets/core/features/image_feature.py
Image.set_encoding_format
def set_encoding_format(self, encoding_format): """Update the encoding format.""" supported = ENCODE_FN.keys() if encoding_format not in supported: raise ValueError('`encoding_format` must be one of %s.' % supported) self._encoding_format = encoding_format
python
def set_encoding_format(self, encoding_format): """Update the encoding format.""" supported = ENCODE_FN.keys() if encoding_format not in supported: raise ValueError('`encoding_format` must be one of %s.' % supported) self._encoding_format = encoding_format
[ "def", "set_encoding_format", "(", "self", ",", "encoding_format", ")", ":", "supported", "=", "ENCODE_FN", ".", "keys", "(", ")", "if", "encoding_format", "not", "in", "supported", ":", "raise", "ValueError", "(", "'`encoding_format` must be one of %s.'", "%", "s...
Update the encoding format.
[ "Update", "the", "encoding", "format", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/core/features/image_feature.py#L97-L102
train
tensorflow/datasets
tensorflow_datasets/core/features/image_feature.py
Image.set_shape
def set_shape(self, shape): """Update the shape.""" channels = shape[-1] acceptable_channels = ACCEPTABLE_CHANNELS[self._encoding_format] if channels not in acceptable_channels: raise ValueError('Acceptable `channels` for %s: %s (was %s)' % ( self._encoding_format, acceptable_channels, c...
python
def set_shape(self, shape): """Update the shape.""" channels = shape[-1] acceptable_channels = ACCEPTABLE_CHANNELS[self._encoding_format] if channels not in acceptable_channels: raise ValueError('Acceptable `channels` for %s: %s (was %s)' % ( self._encoding_format, acceptable_channels, c...
[ "def", "set_shape", "(", "self", ",", "shape", ")", ":", "channels", "=", "shape", "[", "-", "1", "]", "acceptable_channels", "=", "ACCEPTABLE_CHANNELS", "[", "self", ".", "_encoding_format", "]", "if", "channels", "not", "in", "acceptable_channels", ":", "r...
Update the shape.
[ "Update", "the", "shape", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/core/features/image_feature.py#L104-L111
train
tensorflow/datasets
tensorflow_datasets/core/features/image_feature.py
Image._encode_image
def _encode_image(self, np_image): """Returns np_image encoded as jpeg or png.""" if np_image.dtype != np.uint8: raise ValueError('Image should be uint8. Detected: %s.' % np_image.dtype) utils.assert_shape_match(np_image.shape, self._shape) return self._runner.run(ENCODE_FN[self._encoding_format],...
python
def _encode_image(self, np_image): """Returns np_image encoded as jpeg or png.""" if np_image.dtype != np.uint8: raise ValueError('Image should be uint8. Detected: %s.' % np_image.dtype) utils.assert_shape_match(np_image.shape, self._shape) return self._runner.run(ENCODE_FN[self._encoding_format],...
[ "def", "_encode_image", "(", "self", ",", "np_image", ")", ":", "if", "np_image", ".", "dtype", "!=", "np", ".", "uint8", ":", "raise", "ValueError", "(", "'Image should be uint8. Detected: %s.'", "%", "np_image", ".", "dtype", ")", "utils", ".", "assert_shape...
Returns np_image encoded as jpeg or png.
[ "Returns", "np_image", "encoded", "as", "jpeg", "or", "png", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/core/features/image_feature.py#L128-L133
train
tensorflow/datasets
tensorflow_datasets/core/features/image_feature.py
Image.encode_example
def encode_example(self, image_or_path_or_fobj): """Convert the given image into a dict convertible to tf example.""" if isinstance(image_or_path_or_fobj, np.ndarray): encoded_image = self._encode_image(image_or_path_or_fobj) elif isinstance(image_or_path_or_fobj, six.string_types): with tf.io.g...
python
def encode_example(self, image_or_path_or_fobj): """Convert the given image into a dict convertible to tf example.""" if isinstance(image_or_path_or_fobj, np.ndarray): encoded_image = self._encode_image(image_or_path_or_fobj) elif isinstance(image_or_path_or_fobj, six.string_types): with tf.io.g...
[ "def", "encode_example", "(", "self", ",", "image_or_path_or_fobj", ")", ":", "if", "isinstance", "(", "image_or_path_or_fobj", ",", "np", ".", "ndarray", ")", ":", "encoded_image", "=", "self", ".", "_encode_image", "(", "image_or_path_or_fobj", ")", "elif", "i...
Convert the given image into a dict convertible to tf example.
[ "Convert", "the", "given", "image", "into", "a", "dict", "convertible", "to", "tf", "example", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/core/features/image_feature.py#L135-L144
train
tensorflow/datasets
tensorflow_datasets/core/features/image_feature.py
Image.decode_example
def decode_example(self, example): """Reconstruct the image from the tf example.""" img = tf.image.decode_image( example, channels=self._shape[-1], dtype=tf.uint8) img.set_shape(self._shape) return img
python
def decode_example(self, example): """Reconstruct the image from the tf example.""" img = tf.image.decode_image( example, channels=self._shape[-1], dtype=tf.uint8) img.set_shape(self._shape) return img
[ "def", "decode_example", "(", "self", ",", "example", ")", ":", "img", "=", "tf", ".", "image", ".", "decode_image", "(", "example", ",", "channels", "=", "self", ".", "_shape", "[", "-", "1", "]", ",", "dtype", "=", "tf", ".", "uint8", ")", "img",...
Reconstruct the image from the tf example.
[ "Reconstruct", "the", "image", "from", "the", "tf", "example", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/core/features/image_feature.py#L146-L151
train
tensorflow/datasets
tensorflow_datasets/core/features/image_feature.py
Image.save_metadata
def save_metadata(self, data_dir, feature_name=None): """See base class for details.""" filepath = _get_metadata_filepath(data_dir, feature_name) with tf.io.gfile.GFile(filepath, 'w') as f: json.dump({ 'shape': [-1 if d is None else d for d in self._shape], 'encoding_format': self....
python
def save_metadata(self, data_dir, feature_name=None): """See base class for details.""" filepath = _get_metadata_filepath(data_dir, feature_name) with tf.io.gfile.GFile(filepath, 'w') as f: json.dump({ 'shape': [-1 if d is None else d for d in self._shape], 'encoding_format': self....
[ "def", "save_metadata", "(", "self", ",", "data_dir", ",", "feature_name", "=", "None", ")", ":", "filepath", "=", "_get_metadata_filepath", "(", "data_dir", ",", "feature_name", ")", "with", "tf", ".", "io", ".", "gfile", ".", "GFile", "(", "filepath", ",...
See base class for details.
[ "See", "base", "class", "for", "details", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/core/features/image_feature.py#L153-L160
train
tensorflow/datasets
tensorflow_datasets/core/features/image_feature.py
Image.load_metadata
def load_metadata(self, data_dir, feature_name=None): """See base class for details.""" # Restore names if defined filepath = _get_metadata_filepath(data_dir, feature_name) if tf.io.gfile.exists(filepath): with tf.io.gfile.GFile(filepath, 'r') as f: info_data = json.load(f) self.set_...
python
def load_metadata(self, data_dir, feature_name=None): """See base class for details.""" # Restore names if defined filepath = _get_metadata_filepath(data_dir, feature_name) if tf.io.gfile.exists(filepath): with tf.io.gfile.GFile(filepath, 'r') as f: info_data = json.load(f) self.set_...
[ "def", "load_metadata", "(", "self", ",", "data_dir", ",", "feature_name", "=", "None", ")", ":", "# Restore names if defined", "filepath", "=", "_get_metadata_filepath", "(", "data_dir", ",", "feature_name", ")", "if", "tf", ".", "io", ".", "gfile", ".", "exi...
See base class for details.
[ "See", "base", "class", "for", "details", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/core/features/image_feature.py#L162-L170
train
tensorflow/datasets
tensorflow_datasets/video/moving_sequence.py
_create_moving_sequence
def _create_moving_sequence(image, pad_lefts, total_padding): """Create a moving image sequence from the given image a left padding values. Args: image: [in_h, in_w, n_channels] uint8 array pad_lefts: [sequence_length, 2] int32 array of left padding values total_padding: tensor of padding values, (pad_...
python
def _create_moving_sequence(image, pad_lefts, total_padding): """Create a moving image sequence from the given image a left padding values. Args: image: [in_h, in_w, n_channels] uint8 array pad_lefts: [sequence_length, 2] int32 array of left padding values total_padding: tensor of padding values, (pad_...
[ "def", "_create_moving_sequence", "(", "image", ",", "pad_lefts", ",", "total_padding", ")", ":", "with", "tf", ".", "name_scope", "(", "\"moving_sequence\"", ")", ":", "def", "get_padded_image", "(", "args", ")", ":", "pad_left", ",", "=", "args", "pad_right"...
Create a moving image sequence from the given image a left padding values. Args: image: [in_h, in_w, n_channels] uint8 array pad_lefts: [sequence_length, 2] int32 array of left padding values total_padding: tensor of padding values, (pad_h, pad_w) Returns: [sequence_length, out_h, out_w, n_channel...
[ "Create", "a", "moving", "image", "sequence", "from", "the", "given", "image", "a", "left", "padding", "values", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/video/moving_sequence.py#L27-L53
train
tensorflow/datasets
tensorflow_datasets/video/moving_sequence.py
_get_linear_trajectory
def _get_linear_trajectory(x0, velocity, t): """Construct a linear trajectory from x0. Args: x0: N-D float tensor. velocity: N-D float tensor t: [sequence_length]-length float tensor Returns: x: [sequence_length, ndims] float tensor. """ x0 = tf.convert_to_tensor(x0) velocity = tf.convert_...
python
def _get_linear_trajectory(x0, velocity, t): """Construct a linear trajectory from x0. Args: x0: N-D float tensor. velocity: N-D float tensor t: [sequence_length]-length float tensor Returns: x: [sequence_length, ndims] float tensor. """ x0 = tf.convert_to_tensor(x0) velocity = tf.convert_...
[ "def", "_get_linear_trajectory", "(", "x0", ",", "velocity", ",", "t", ")", ":", "x0", "=", "tf", ".", "convert_to_tensor", "(", "x0", ")", "velocity", "=", "tf", ".", "convert_to_tensor", "(", "velocity", ")", "t", "=", "tf", ".", "convert_to_tensor", "...
Construct a linear trajectory from x0. Args: x0: N-D float tensor. velocity: N-D float tensor t: [sequence_length]-length float tensor Returns: x: [sequence_length, ndims] float tensor.
[ "Construct", "a", "linear", "trajectory", "from", "x0", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/video/moving_sequence.py#L56-L82
train
tensorflow/datasets
tensorflow_datasets/video/moving_sequence.py
image_as_moving_sequence
def image_as_moving_sequence( image, sequence_length=20, output_size=(64, 64), velocity=0.1, start_position=None): """Turn simple static images into sequences of the originals bouncing around. Adapted from Srivastava et al. http://www.cs.toronto.edu/~nitish/unsupervised_video/ Example usage: ```pyth...
python
def image_as_moving_sequence( image, sequence_length=20, output_size=(64, 64), velocity=0.1, start_position=None): """Turn simple static images into sequences of the originals bouncing around. Adapted from Srivastava et al. http://www.cs.toronto.edu/~nitish/unsupervised_video/ Example usage: ```pyth...
[ "def", "image_as_moving_sequence", "(", "image", ",", "sequence_length", "=", "20", ",", "output_size", "=", "(", "64", ",", "64", ")", ",", "velocity", "=", "0.1", ",", "start_position", "=", "None", ")", ":", "ndims", "=", "2", "image", "=", "tf", "....
Turn simple static images into sequences of the originals bouncing around. Adapted from Srivastava et al. http://www.cs.toronto.edu/~nitish/unsupervised_video/ Example usage: ```python import tensorflow as tf import tensorflow_datasets as tfds from tensorflow_datasets.video import moving_sequence tf.c...
[ "Turn", "simple", "static", "images", "into", "sequences", "of", "the", "originals", "bouncing", "around", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/video/moving_sequence.py#L115-L234
train
tensorflow/datasets
tensorflow_datasets/audio/nsynth.py
Nsynth._split_generators
def _split_generators(self, dl_manager): """Returns splits.""" dl_urls = { split: _BASE_DOWNLOAD_PATH + "%s.tfrecord" % split for split in _SPLITS } dl_urls["instrument_labels"] = (_BASE_DOWNLOAD_PATH + "instrument_labels.txt") dl_paths = dl_manager.downlo...
python
def _split_generators(self, dl_manager): """Returns splits.""" dl_urls = { split: _BASE_DOWNLOAD_PATH + "%s.tfrecord" % split for split in _SPLITS } dl_urls["instrument_labels"] = (_BASE_DOWNLOAD_PATH + "instrument_labels.txt") dl_paths = dl_manager.downlo...
[ "def", "_split_generators", "(", "self", ",", "dl_manager", ")", ":", "dl_urls", "=", "{", "split", ":", "_BASE_DOWNLOAD_PATH", "+", "\"%s.tfrecord\"", "%", "split", "for", "split", "in", "_SPLITS", "}", "dl_urls", "[", "\"instrument_labels\"", "]", "=", "(", ...
Returns splits.
[ "Returns", "splits", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/audio/nsynth.py#L117-L135
train
tensorflow/datasets
tensorflow_datasets/core/utils/version.py
_str_to_version
def _str_to_version(version_str, allow_wildcard=False): """Return the tuple (major, minor, patch) version extracted from the str.""" reg = _VERSION_WILDCARD_REG if allow_wildcard else _VERSION_RESOLVED_REG res = reg.match(version_str) if not res: msg = "Invalid version '{}'. Format should be x.y.z".format(v...
python
def _str_to_version(version_str, allow_wildcard=False): """Return the tuple (major, minor, patch) version extracted from the str.""" reg = _VERSION_WILDCARD_REG if allow_wildcard else _VERSION_RESOLVED_REG res = reg.match(version_str) if not res: msg = "Invalid version '{}'. Format should be x.y.z".format(v...
[ "def", "_str_to_version", "(", "version_str", ",", "allow_wildcard", "=", "False", ")", ":", "reg", "=", "_VERSION_WILDCARD_REG", "if", "allow_wildcard", "else", "_VERSION_RESOLVED_REG", "res", "=", "reg", ".", "match", "(", "version_str", ")", "if", "not", "res...
Return the tuple (major, minor, patch) version extracted from the str.
[ "Return", "the", "tuple", "(", "major", "minor", "patch", ")", "version", "extracted", "from", "the", "str", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/core/utils/version.py#L70-L83
train
tensorflow/datasets
tensorflow_datasets/core/utils/version.py
Version.match
def match(self, other_version): """Returns True if other_version matches. Args: other_version: string, of the form "x[.y[.x]]" where {x,y,z} can be a number or a wildcard. """ major, minor, patch = _str_to_version(other_version, allow_wildcard=True) return (major in [self.major, "*"] ...
python
def match(self, other_version): """Returns True if other_version matches. Args: other_version: string, of the form "x[.y[.x]]" where {x,y,z} can be a number or a wildcard. """ major, minor, patch = _str_to_version(other_version, allow_wildcard=True) return (major in [self.major, "*"] ...
[ "def", "match", "(", "self", ",", "other_version", ")", ":", "major", ",", "minor", ",", "patch", "=", "_str_to_version", "(", "other_version", ",", "allow_wildcard", "=", "True", ")", "return", "(", "major", "in", "[", "self", ".", "major", ",", "\"*\""...
Returns True if other_version matches. Args: other_version: string, of the form "x[.y[.x]]" where {x,y,z} can be a number or a wildcard.
[ "Returns", "True", "if", "other_version", "matches", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/core/utils/version.py#L58-L67
train
tensorflow/datasets
tensorflow_datasets/image/imagenet.py
Imagenet2012._get_validation_labels
def _get_validation_labels(val_path): """Returns labels for validation. Args: val_path: path to TAR file containing validation images. It is used to retrieve the name of pictures and associate them to labels. Returns: dict, mapping from image name (str) to label (str). """ labels...
python
def _get_validation_labels(val_path): """Returns labels for validation. Args: val_path: path to TAR file containing validation images. It is used to retrieve the name of pictures and associate them to labels. Returns: dict, mapping from image name (str) to label (str). """ labels...
[ "def", "_get_validation_labels", "(", "val_path", ")", ":", "labels_path", "=", "tfds", ".", "core", ".", "get_tfds_path", "(", "_VALIDATION_LABELS_FNAME", ")", "with", "tf", ".", "io", ".", "gfile", ".", "GFile", "(", "labels_path", ")", "as", "labels_f", "...
Returns labels for validation. Args: val_path: path to TAR file containing validation images. It is used to retrieve the name of pictures and associate them to labels. Returns: dict, mapping from image name (str) to label (str).
[ "Returns", "labels", "for", "validation", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/image/imagenet.py#L86-L102
train
tensorflow/datasets
tensorflow_datasets/image/imagenet.py
Imagenet2012._generate_examples
def _generate_examples(self, archive, validation_labels=None): """Yields examples.""" if validation_labels: # Validation split for example in self._generate_examples_validation(archive, validation_labels): yield example # Training split....
python
def _generate_examples(self, archive, validation_labels=None): """Yields examples.""" if validation_labels: # Validation split for example in self._generate_examples_validation(archive, validation_labels): yield example # Training split....
[ "def", "_generate_examples", "(", "self", ",", "archive", ",", "validation_labels", "=", "None", ")", ":", "if", "validation_labels", ":", "# Validation split", "for", "example", "in", "self", ".", "_generate_examples_validation", "(", "archive", ",", "validation_la...
Yields examples.
[ "Yields", "examples", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/image/imagenet.py#L131-L151
train
tensorflow/datasets
tensorflow_datasets/core/file_format_adapter.py
do_files_exist
def do_files_exist(filenames): """Whether any of the filenames exist.""" preexisting = [tf.io.gfile.exists(f) for f in filenames] return any(preexisting)
python
def do_files_exist(filenames): """Whether any of the filenames exist.""" preexisting = [tf.io.gfile.exists(f) for f in filenames] return any(preexisting)
[ "def", "do_files_exist", "(", "filenames", ")", ":", "preexisting", "=", "[", "tf", ".", "io", ".", "gfile", ".", "exists", "(", "f", ")", "for", "f", "in", "filenames", "]", "return", "any", "(", "preexisting", ")" ]
Whether any of the filenames exist.
[ "Whether", "any", "of", "the", "filenames", "exist", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/core/file_format_adapter.py#L194-L197
train
tensorflow/datasets
tensorflow_datasets/core/file_format_adapter.py
get_incomplete_path
def get_incomplete_path(filename): """Returns a temporary filename based on filename.""" random_suffix = "".join( random.choice(string.ascii_uppercase + string.digits) for _ in range(6)) return filename + ".incomplete" + random_suffix
python
def get_incomplete_path(filename): """Returns a temporary filename based on filename.""" random_suffix = "".join( random.choice(string.ascii_uppercase + string.digits) for _ in range(6)) return filename + ".incomplete" + random_suffix
[ "def", "get_incomplete_path", "(", "filename", ")", ":", "random_suffix", "=", "\"\"", ".", "join", "(", "random", ".", "choice", "(", "string", ".", "ascii_uppercase", "+", "string", ".", "digits", ")", "for", "_", "in", "range", "(", "6", ")", ")", "...
Returns a temporary filename based on filename.
[ "Returns", "a", "temporary", "filename", "based", "on", "filename", "." ]
46ceb0cf7b4690f38ecbbc689e4d659a903d08dc
https://github.com/tensorflow/datasets/blob/46ceb0cf7b4690f38ecbbc689e4d659a903d08dc/tensorflow_datasets/core/file_format_adapter.py#L210-L214
train