id int32 0 252k | repo stringlengths 7 55 | path stringlengths 4 127 | func_name stringlengths 1 88 | original_string stringlengths 75 19.8k | language stringclasses 1 value | code stringlengths 51 19.8k | code_tokens list | docstring stringlengths 3 17.3k | docstring_tokens list | sha stringlengths 40 40 | url stringlengths 87 242 |
|---|---|---|---|---|---|---|---|---|---|---|---|
243,200 | intake/intake | intake/catalog/local.py | YAMLFileCatalog._load | def _load(self, reload=False):
"""Load text of fcatalog file and pass to parse
Will do nothing if autoreload is off and reload is not explicitly
requested
"""
if self.autoreload or reload:
# First, we load from YAML, failing if syntax errors are found
options = self.storage_options or {}
if hasattr(self.path, 'path') or hasattr(self.path, 'read'):
file_open = self.path
self.path = make_path_posix(
getattr(self.path, 'path',
getattr(self.path, 'name', 'file')))
else:
file_open = open_files(self.path, mode='rb', **options)
assert len(file_open) == 1
file_open = file_open[0]
self._dir = get_dir(self.path)
with file_open as f:
text = f.read().decode()
if "!template " in text:
logger.warning("Use of '!template' deprecated - fixing")
text = text.replace('!template ', '')
self.parse(text) | python | def _load(self, reload=False):
if self.autoreload or reload:
# First, we load from YAML, failing if syntax errors are found
options = self.storage_options or {}
if hasattr(self.path, 'path') or hasattr(self.path, 'read'):
file_open = self.path
self.path = make_path_posix(
getattr(self.path, 'path',
getattr(self.path, 'name', 'file')))
else:
file_open = open_files(self.path, mode='rb', **options)
assert len(file_open) == 1
file_open = file_open[0]
self._dir = get_dir(self.path)
with file_open as f:
text = f.read().decode()
if "!template " in text:
logger.warning("Use of '!template' deprecated - fixing")
text = text.replace('!template ', '')
self.parse(text) | [
"def",
"_load",
"(",
"self",
",",
"reload",
"=",
"False",
")",
":",
"if",
"self",
".",
"autoreload",
"or",
"reload",
":",
"# First, we load from YAML, failing if syntax errors are found",
"options",
"=",
"self",
".",
"storage_options",
"or",
"{",
"}",
"if",
"has... | Load text of fcatalog file and pass to parse
Will do nothing if autoreload is off and reload is not explicitly
requested | [
"Load",
"text",
"of",
"fcatalog",
"file",
"and",
"pass",
"to",
"parse"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/catalog/local.py#L544-L569 |
243,201 | intake/intake | intake/catalog/local.py | YAMLFileCatalog.parse | def parse(self, text):
"""Create entries from catalog text
Normally the text comes from the file at self.path via the ``_load()``
method, but could be explicitly set instead. A copy of the text is
kept in attribute ``.text`` .
Parameters
----------
text : str
YAML formatted catalog spec
"""
self.text = text
data = yaml_load(self.text)
if data is None:
raise exceptions.CatalogException('No YAML data in file')
# Second, we validate the schema and semantics
context = dict(root=self._dir)
result = CatalogParser(data, context=context, getenv=self.getenv,
getshell=self.getshell)
if result.errors:
raise exceptions.ValidationError(
"Catalog '{}' has validation errors:\n\n{}"
"".format(self.path, "\n".join(result.errors)), result.errors)
cfg = result.data
self._entries = {}
for entry in cfg['data_sources']:
entry._catalog = self
self._entries[entry.name] = entry
self.metadata = cfg.get('metadata', {})
self.name = self.name or cfg.get('name') or self.name_from_path
self.description = self.description or cfg.get('description') | python | def parse(self, text):
self.text = text
data = yaml_load(self.text)
if data is None:
raise exceptions.CatalogException('No YAML data in file')
# Second, we validate the schema and semantics
context = dict(root=self._dir)
result = CatalogParser(data, context=context, getenv=self.getenv,
getshell=self.getshell)
if result.errors:
raise exceptions.ValidationError(
"Catalog '{}' has validation errors:\n\n{}"
"".format(self.path, "\n".join(result.errors)), result.errors)
cfg = result.data
self._entries = {}
for entry in cfg['data_sources']:
entry._catalog = self
self._entries[entry.name] = entry
self.metadata = cfg.get('metadata', {})
self.name = self.name or cfg.get('name') or self.name_from_path
self.description = self.description or cfg.get('description') | [
"def",
"parse",
"(",
"self",
",",
"text",
")",
":",
"self",
".",
"text",
"=",
"text",
"data",
"=",
"yaml_load",
"(",
"self",
".",
"text",
")",
"if",
"data",
"is",
"None",
":",
"raise",
"exceptions",
".",
"CatalogException",
"(",
"'No YAML data in file'",... | Create entries from catalog text
Normally the text comes from the file at self.path via the ``_load()``
method, but could be explicitly set instead. A copy of the text is
kept in attribute ``.text`` .
Parameters
----------
text : str
YAML formatted catalog spec | [
"Create",
"entries",
"from",
"catalog",
"text"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/catalog/local.py#L571-L607 |
243,202 | intake/intake | intake/catalog/local.py | YAMLFileCatalog.name_from_path | def name_from_path(self):
"""If catalog is named 'catalog' take name from parent directory"""
name = os.path.splitext(os.path.basename(self.path))[0]
if name == 'catalog':
name = os.path.basename(os.path.dirname(self.path))
return name.replace('.', '_') | python | def name_from_path(self):
name = os.path.splitext(os.path.basename(self.path))[0]
if name == 'catalog':
name = os.path.basename(os.path.dirname(self.path))
return name.replace('.', '_') | [
"def",
"name_from_path",
"(",
"self",
")",
":",
"name",
"=",
"os",
".",
"path",
".",
"splitext",
"(",
"os",
".",
"path",
".",
"basename",
"(",
"self",
".",
"path",
")",
")",
"[",
"0",
"]",
"if",
"name",
"==",
"'catalog'",
":",
"name",
"=",
"os",
... | If catalog is named 'catalog' take name from parent directory | [
"If",
"catalog",
"is",
"named",
"catalog",
"take",
"name",
"from",
"parent",
"directory"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/catalog/local.py#L610-L615 |
243,203 | intake/intake | intake/cli/server/server.py | ServerSourceHandler.get | def get(self):
"""
Access one source's info.
This is for direct access to an entry by name for random access, which
is useful to the client when the whole catalog has not first been
listed and pulled locally (e.g., in the case of pagination).
"""
head = self.request.headers
name = self.get_argument('name')
if self.auth.allow_connect(head):
if 'source_id' in head:
cat = self._cache.get(head['source_id'])
else:
cat = self._catalog
try:
source = cat[name]
except KeyError:
msg = 'No such entry'
raise tornado.web.HTTPError(status_code=404, log_message=msg,
reason=msg)
if self.auth.allow_access(head, source, self._catalog):
info = source.describe()
info['name'] = name
source_info = dict(source=info)
self.write(msgpack.packb(source_info, use_bin_type=True))
return
msg = 'Access forbidden'
raise tornado.web.HTTPError(status_code=403, log_message=msg,
reason=msg) | python | def get(self):
head = self.request.headers
name = self.get_argument('name')
if self.auth.allow_connect(head):
if 'source_id' in head:
cat = self._cache.get(head['source_id'])
else:
cat = self._catalog
try:
source = cat[name]
except KeyError:
msg = 'No such entry'
raise tornado.web.HTTPError(status_code=404, log_message=msg,
reason=msg)
if self.auth.allow_access(head, source, self._catalog):
info = source.describe()
info['name'] = name
source_info = dict(source=info)
self.write(msgpack.packb(source_info, use_bin_type=True))
return
msg = 'Access forbidden'
raise tornado.web.HTTPError(status_code=403, log_message=msg,
reason=msg) | [
"def",
"get",
"(",
"self",
")",
":",
"head",
"=",
"self",
".",
"request",
".",
"headers",
"name",
"=",
"self",
".",
"get_argument",
"(",
"'name'",
")",
"if",
"self",
".",
"auth",
".",
"allow_connect",
"(",
"head",
")",
":",
"if",
"'source_id'",
"in",... | Access one source's info.
This is for direct access to an entry by name for random access, which
is useful to the client when the whole catalog has not first been
listed and pulled locally (e.g., in the case of pagination). | [
"Access",
"one",
"source",
"s",
"info",
"."
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/cli/server/server.py#L186-L216 |
243,204 | intake/intake | intake/gui/catalog/select.py | CatSelector.preprocess | def preprocess(cls, cat):
"""Function to run on each cat input"""
if isinstance(cat, str):
cat = intake.open_catalog(cat)
return cat | python | def preprocess(cls, cat):
if isinstance(cat, str):
cat = intake.open_catalog(cat)
return cat | [
"def",
"preprocess",
"(",
"cls",
",",
"cat",
")",
":",
"if",
"isinstance",
"(",
"cat",
",",
"str",
")",
":",
"cat",
"=",
"intake",
".",
"open_catalog",
"(",
"cat",
")",
"return",
"cat"
] | Function to run on each cat input | [
"Function",
"to",
"run",
"on",
"each",
"cat",
"input"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/gui/catalog/select.py#L57-L61 |
243,205 | intake/intake | intake/gui/catalog/select.py | CatSelector.expand_nested | def expand_nested(self, cats):
"""Populate widget with nested catalogs"""
down = '│'
right = '└──'
def get_children(parent):
return [e() for e in parent._entries.values() if e._container == 'catalog']
if len(cats) == 0:
return
cat = cats[0]
old = list(self.options.items())
name = next(k for k, v in old if v == cat)
index = next(i for i, (k, v) in enumerate(old) if v == cat)
if right in name:
prefix = f'{name.split(right)[0]}{down} {right}'
else:
prefix = right
children = get_children(cat)
for i, child in enumerate(children):
old.insert(index+i+1, (f'{prefix} {child.name}', child))
self.widget.options = dict(old) | python | def expand_nested(self, cats):
down = '│'
right = '└──'
def get_children(parent):
return [e() for e in parent._entries.values() if e._container == 'catalog']
if len(cats) == 0:
return
cat = cats[0]
old = list(self.options.items())
name = next(k for k, v in old if v == cat)
index = next(i for i, (k, v) in enumerate(old) if v == cat)
if right in name:
prefix = f'{name.split(right)[0]}{down} {right}'
else:
prefix = right
children = get_children(cat)
for i, child in enumerate(children):
old.insert(index+i+1, (f'{prefix} {child.name}', child))
self.widget.options = dict(old) | [
"def",
"expand_nested",
"(",
"self",
",",
"cats",
")",
":",
"down",
"=",
"'│'",
"right",
"=",
"'└──'",
"def",
"get_children",
"(",
"parent",
")",
":",
"return",
"[",
"e",
"(",
")",
"for",
"e",
"in",
"parent",
".",
"_entries",
".",
"values",
"(",
")... | Populate widget with nested catalogs | [
"Populate",
"widget",
"with",
"nested",
"catalogs"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/gui/catalog/select.py#L91-L114 |
243,206 | intake/intake | intake/gui/catalog/select.py | CatSelector.collapse_nested | def collapse_nested(self, cats, max_nestedness=10):
"""
Collapse any items that are nested under cats.
`max_nestedness` acts as a fail-safe to prevent infinite looping.
"""
children = []
removed = set()
nestedness = max_nestedness
old = list(self.widget.options.values())
nested = [cat for cat in old if getattr(cat, 'cat') is not None]
parents = {cat.cat for cat in nested}
parents_to_remove = cats
while len(parents_to_remove) > 0 and nestedness > 0:
for cat in nested:
if cat.cat in parents_to_remove:
children.append(cat)
removed = removed.union(parents_to_remove)
nested = [cat for cat in nested if cat not in children]
parents_to_remove = {c for c in children if c in parents - removed}
nestedness -= 1
self.remove(children) | python | def collapse_nested(self, cats, max_nestedness=10):
children = []
removed = set()
nestedness = max_nestedness
old = list(self.widget.options.values())
nested = [cat for cat in old if getattr(cat, 'cat') is not None]
parents = {cat.cat for cat in nested}
parents_to_remove = cats
while len(parents_to_remove) > 0 and nestedness > 0:
for cat in nested:
if cat.cat in parents_to_remove:
children.append(cat)
removed = removed.union(parents_to_remove)
nested = [cat for cat in nested if cat not in children]
parents_to_remove = {c for c in children if c in parents - removed}
nestedness -= 1
self.remove(children) | [
"def",
"collapse_nested",
"(",
"self",
",",
"cats",
",",
"max_nestedness",
"=",
"10",
")",
":",
"children",
"=",
"[",
"]",
"removed",
"=",
"set",
"(",
")",
"nestedness",
"=",
"max_nestedness",
"old",
"=",
"list",
"(",
"self",
".",
"widget",
".",
"optio... | Collapse any items that are nested under cats.
`max_nestedness` acts as a fail-safe to prevent infinite looping. | [
"Collapse",
"any",
"items",
"that",
"are",
"nested",
"under",
"cats",
".",
"max_nestedness",
"acts",
"as",
"a",
"fail",
"-",
"safe",
"to",
"prevent",
"infinite",
"looping",
"."
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/gui/catalog/select.py#L116-L137 |
243,207 | intake/intake | intake/gui/catalog/select.py | CatSelector.remove_selected | def remove_selected(self, *args):
"""Remove the selected catalog - allow the passing of arbitrary
args so that buttons work. Also remove any nested catalogs."""
self.collapse_nested(self.selected)
self.remove(self.selected) | python | def remove_selected(self, *args):
self.collapse_nested(self.selected)
self.remove(self.selected) | [
"def",
"remove_selected",
"(",
"self",
",",
"*",
"args",
")",
":",
"self",
".",
"collapse_nested",
"(",
"self",
".",
"selected",
")",
"self",
".",
"remove",
"(",
"self",
".",
"selected",
")"
] | Remove the selected catalog - allow the passing of arbitrary
args so that buttons work. Also remove any nested catalogs. | [
"Remove",
"the",
"selected",
"catalog",
"-",
"allow",
"the",
"passing",
"of",
"arbitrary",
"args",
"so",
"that",
"buttons",
"work",
".",
"Also",
"remove",
"any",
"nested",
"catalogs",
"."
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/gui/catalog/select.py#L139-L143 |
243,208 | intake/intake | intake/container/persist.py | PersistStore.add | def add(self, key, source):
"""Add the persisted source to the store under the given key
key : str
The unique token of the un-persisted, original source
source : DataSource instance
The thing to add to the persisted catalogue, referring to persisted
data
"""
from intake.catalog.local import LocalCatalogEntry
try:
with self.fs.open(self.path, 'rb') as f:
data = yaml.safe_load(f)
except IOError:
data = {'sources': {}}
ds = source._yaml()['sources'][source.name]
data['sources'][key] = ds
with self.fs.open(self.path, 'wb') as fo:
fo.write(yaml.dump(data, default_flow_style=False).encode())
self._entries[key] = LocalCatalogEntry(
name=ds['metadata']['original_name'],
direct_access=True,
cache=[],
parameters=[],
catalog_dir=None,
**data['sources'][key]) | python | def add(self, key, source):
from intake.catalog.local import LocalCatalogEntry
try:
with self.fs.open(self.path, 'rb') as f:
data = yaml.safe_load(f)
except IOError:
data = {'sources': {}}
ds = source._yaml()['sources'][source.name]
data['sources'][key] = ds
with self.fs.open(self.path, 'wb') as fo:
fo.write(yaml.dump(data, default_flow_style=False).encode())
self._entries[key] = LocalCatalogEntry(
name=ds['metadata']['original_name'],
direct_access=True,
cache=[],
parameters=[],
catalog_dir=None,
**data['sources'][key]) | [
"def",
"add",
"(",
"self",
",",
"key",
",",
"source",
")",
":",
"from",
"intake",
".",
"catalog",
".",
"local",
"import",
"LocalCatalogEntry",
"try",
":",
"with",
"self",
".",
"fs",
".",
"open",
"(",
"self",
".",
"path",
",",
"'rb'",
")",
"as",
"f"... | Add the persisted source to the store under the given key
key : str
The unique token of the un-persisted, original source
source : DataSource instance
The thing to add to the persisted catalogue, referring to persisted
data | [
"Add",
"the",
"persisted",
"source",
"to",
"the",
"store",
"under",
"the",
"given",
"key"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/container/persist.py#L84-L109 |
243,209 | intake/intake | intake/container/persist.py | PersistStore.get_tok | def get_tok(self, source):
"""Get string token from object
Strings are assumed to already be a token; if source or entry, see
if it is a persisted thing ("original_tok" is in its metadata), else
generate its own token.
"""
if isinstance(source, str):
return source
if isinstance(source, CatalogEntry):
return source._metadata.get('original_tok', source._tok)
if isinstance(source, DataSource):
return source.metadata.get('original_tok', source._tok)
raise IndexError | python | def get_tok(self, source):
if isinstance(source, str):
return source
if isinstance(source, CatalogEntry):
return source._metadata.get('original_tok', source._tok)
if isinstance(source, DataSource):
return source.metadata.get('original_tok', source._tok)
raise IndexError | [
"def",
"get_tok",
"(",
"self",
",",
"source",
")",
":",
"if",
"isinstance",
"(",
"source",
",",
"str",
")",
":",
"return",
"source",
"if",
"isinstance",
"(",
"source",
",",
"CatalogEntry",
")",
":",
"return",
"source",
".",
"_metadata",
".",
"get",
"("... | Get string token from object
Strings are assumed to already be a token; if source or entry, see
if it is a persisted thing ("original_tok" is in its metadata), else
generate its own token. | [
"Get",
"string",
"token",
"from",
"object"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/container/persist.py#L111-L126 |
243,210 | intake/intake | intake/container/persist.py | PersistStore.remove | def remove(self, source, delfiles=True):
"""Remove a dataset from the persist store
source : str or DataSource or Lo
If a str, this is the unique ID of the original source, which is
the key of the persisted dataset within the store. If a source,
can be either the original or the persisted source.
delfiles : bool
Whether to remove the on-disc artifact
"""
source = self.get_tok(source)
with self.fs.open(self.path, 'rb') as f:
data = yaml.safe_load(f.read().decode())
data['sources'].pop(source, None)
with self.fs.open(self.path, 'wb') as fo:
fo.write(yaml.dump(data, default_flow_style=False).encode())
if delfiles:
path = posixpath.join(self.pdir, source)
try:
self.fs.rm(path, True)
except Exception as e:
logger.debug("Failed to delete persisted data dir %s" % path)
self._entries.pop(source, None) | python | def remove(self, source, delfiles=True):
source = self.get_tok(source)
with self.fs.open(self.path, 'rb') as f:
data = yaml.safe_load(f.read().decode())
data['sources'].pop(source, None)
with self.fs.open(self.path, 'wb') as fo:
fo.write(yaml.dump(data, default_flow_style=False).encode())
if delfiles:
path = posixpath.join(self.pdir, source)
try:
self.fs.rm(path, True)
except Exception as e:
logger.debug("Failed to delete persisted data dir %s" % path)
self._entries.pop(source, None) | [
"def",
"remove",
"(",
"self",
",",
"source",
",",
"delfiles",
"=",
"True",
")",
":",
"source",
"=",
"self",
".",
"get_tok",
"(",
"source",
")",
"with",
"self",
".",
"fs",
".",
"open",
"(",
"self",
".",
"path",
",",
"'rb'",
")",
"as",
"f",
":",
... | Remove a dataset from the persist store
source : str or DataSource or Lo
If a str, this is the unique ID of the original source, which is
the key of the persisted dataset within the store. If a source,
can be either the original or the persisted source.
delfiles : bool
Whether to remove the on-disc artifact | [
"Remove",
"a",
"dataset",
"from",
"the",
"persist",
"store"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/container/persist.py#L128-L150 |
243,211 | intake/intake | intake/container/persist.py | PersistStore.backtrack | def backtrack(self, source):
"""Given a unique key in the store, recreate original source"""
key = self.get_tok(source)
s = self[key]()
meta = s.metadata['original_source']
cls = meta['cls']
args = meta['args']
kwargs = meta['kwargs']
cls = import_name(cls)
sout = cls(*args, **kwargs)
sout.metadata = s.metadata['original_metadata']
sout.name = s.metadata['original_name']
return sout | python | def backtrack(self, source):
key = self.get_tok(source)
s = self[key]()
meta = s.metadata['original_source']
cls = meta['cls']
args = meta['args']
kwargs = meta['kwargs']
cls = import_name(cls)
sout = cls(*args, **kwargs)
sout.metadata = s.metadata['original_metadata']
sout.name = s.metadata['original_name']
return sout | [
"def",
"backtrack",
"(",
"self",
",",
"source",
")",
":",
"key",
"=",
"self",
".",
"get_tok",
"(",
"source",
")",
"s",
"=",
"self",
"[",
"key",
"]",
"(",
")",
"meta",
"=",
"s",
".",
"metadata",
"[",
"'original_source'",
"]",
"cls",
"=",
"meta",
"... | Given a unique key in the store, recreate original source | [
"Given",
"a",
"unique",
"key",
"in",
"the",
"store",
"recreate",
"original",
"source"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/container/persist.py#L156-L168 |
243,212 | intake/intake | intake/container/persist.py | PersistStore.refresh | def refresh(self, key):
"""Recreate and re-persist the source for the given unique ID"""
s0 = self[key]
s = self.backtrack(key)
s.persist(**s0.metadata['persist_kwargs']) | python | def refresh(self, key):
s0 = self[key]
s = self.backtrack(key)
s.persist(**s0.metadata['persist_kwargs']) | [
"def",
"refresh",
"(",
"self",
",",
"key",
")",
":",
"s0",
"=",
"self",
"[",
"key",
"]",
"s",
"=",
"self",
".",
"backtrack",
"(",
"key",
")",
"s",
".",
"persist",
"(",
"*",
"*",
"s0",
".",
"metadata",
"[",
"'persist_kwargs'",
"]",
")"
] | Recreate and re-persist the source for the given unique ID | [
"Recreate",
"and",
"re",
"-",
"persist",
"the",
"source",
"for",
"the",
"given",
"unique",
"ID"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/container/persist.py#L170-L174 |
243,213 | intake/intake | intake/gui/source/select.py | SourceSelector.cats | def cats(self, cats):
"""Set sources from a list of cats"""
sources = []
for cat in coerce_to_list(cats):
sources.extend([entry for entry in cat._entries.values() if entry._container != 'catalog'])
self.items = sources | python | def cats(self, cats):
sources = []
for cat in coerce_to_list(cats):
sources.extend([entry for entry in cat._entries.values() if entry._container != 'catalog'])
self.items = sources | [
"def",
"cats",
"(",
"self",
",",
"cats",
")",
":",
"sources",
"=",
"[",
"]",
"for",
"cat",
"in",
"coerce_to_list",
"(",
"cats",
")",
":",
"sources",
".",
"extend",
"(",
"[",
"entry",
"for",
"entry",
"in",
"cat",
".",
"_entries",
".",
"values",
"(",... | Set sources from a list of cats | [
"Set",
"sources",
"from",
"a",
"list",
"of",
"cats"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/gui/source/select.py#L88-L93 |
243,214 | intake/intake | intake/cli/client/__main__.py | main | def main(argv=None):
''' Execute the "intake" command line program.
'''
from intake.cli.bootstrap import main as _main
return _main('Intake Catalog CLI', subcommands.all, argv or sys.argv) | python | def main(argv=None):
''' Execute the "intake" command line program.
'''
from intake.cli.bootstrap import main as _main
return _main('Intake Catalog CLI', subcommands.all, argv or sys.argv) | [
"def",
"main",
"(",
"argv",
"=",
"None",
")",
":",
"from",
"intake",
".",
"cli",
".",
"bootstrap",
"import",
"main",
"as",
"_main",
"return",
"_main",
"(",
"'Intake Catalog CLI'",
",",
"subcommands",
".",
"all",
",",
"argv",
"or",
"sys",
".",
"argv",
"... | Execute the "intake" command line program. | [
"Execute",
"the",
"intake",
"command",
"line",
"program",
"."
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/cli/client/__main__.py#L27-L33 |
243,215 | intake/intake | intake/source/base.py | DataSource._load_metadata | def _load_metadata(self):
"""load metadata only if needed"""
if self._schema is None:
self._schema = self._get_schema()
self.datashape = self._schema.datashape
self.dtype = self._schema.dtype
self.shape = self._schema.shape
self.npartitions = self._schema.npartitions
self.metadata.update(self._schema.extra_metadata) | python | def _load_metadata(self):
if self._schema is None:
self._schema = self._get_schema()
self.datashape = self._schema.datashape
self.dtype = self._schema.dtype
self.shape = self._schema.shape
self.npartitions = self._schema.npartitions
self.metadata.update(self._schema.extra_metadata) | [
"def",
"_load_metadata",
"(",
"self",
")",
":",
"if",
"self",
".",
"_schema",
"is",
"None",
":",
"self",
".",
"_schema",
"=",
"self",
".",
"_get_schema",
"(",
")",
"self",
".",
"datashape",
"=",
"self",
".",
"_schema",
".",
"datashape",
"self",
".",
... | load metadata only if needed | [
"load",
"metadata",
"only",
"if",
"needed"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/source/base.py#L114-L122 |
243,216 | intake/intake | intake/source/base.py | DataSource.yaml | def yaml(self, with_plugin=False):
"""Return YAML representation of this data-source
The output may be roughly appropriate for inclusion in a YAML
catalog. This is a best-effort implementation
Parameters
----------
with_plugin: bool
If True, create a "plugins" section, for cases where this source
is created with a plugin not expected to be in the global Intake
registry.
"""
from yaml import dump
data = self._yaml(with_plugin=with_plugin)
return dump(data, default_flow_style=False) | python | def yaml(self, with_plugin=False):
from yaml import dump
data = self._yaml(with_plugin=with_plugin)
return dump(data, default_flow_style=False) | [
"def",
"yaml",
"(",
"self",
",",
"with_plugin",
"=",
"False",
")",
":",
"from",
"yaml",
"import",
"dump",
"data",
"=",
"self",
".",
"_yaml",
"(",
"with_plugin",
"=",
"with_plugin",
")",
"return",
"dump",
"(",
"data",
",",
"default_flow_style",
"=",
"Fals... | Return YAML representation of this data-source
The output may be roughly appropriate for inclusion in a YAML
catalog. This is a best-effort implementation
Parameters
----------
with_plugin: bool
If True, create a "plugins" section, for cases where this source
is created with a plugin not expected to be in the global Intake
registry. | [
"Return",
"YAML",
"representation",
"of",
"this",
"data",
"-",
"source"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/source/base.py#L145-L160 |
243,217 | intake/intake | intake/source/base.py | DataSource.discover | def discover(self):
"""Open resource and populate the source attributes."""
self._load_metadata()
return dict(datashape=self.datashape,
dtype=self.dtype,
shape=self.shape,
npartitions=self.npartitions,
metadata=self.metadata) | python | def discover(self):
self._load_metadata()
return dict(datashape=self.datashape,
dtype=self.dtype,
shape=self.shape,
npartitions=self.npartitions,
metadata=self.metadata) | [
"def",
"discover",
"(",
"self",
")",
":",
"self",
".",
"_load_metadata",
"(",
")",
"return",
"dict",
"(",
"datashape",
"=",
"self",
".",
"datashape",
",",
"dtype",
"=",
"self",
".",
"dtype",
",",
"shape",
"=",
"self",
".",
"shape",
",",
"npartitions",
... | Open resource and populate the source attributes. | [
"Open",
"resource",
"and",
"populate",
"the",
"source",
"attributes",
"."
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/source/base.py#L167-L175 |
243,218 | intake/intake | intake/source/base.py | DataSource.read_chunked | def read_chunked(self):
"""Return iterator over container fragments of data source"""
self._load_metadata()
for i in range(self.npartitions):
yield self._get_partition(i) | python | def read_chunked(self):
self._load_metadata()
for i in range(self.npartitions):
yield self._get_partition(i) | [
"def",
"read_chunked",
"(",
"self",
")",
":",
"self",
".",
"_load_metadata",
"(",
")",
"for",
"i",
"in",
"range",
"(",
"self",
".",
"npartitions",
")",
":",
"yield",
"self",
".",
"_get_partition",
"(",
"i",
")"
] | Return iterator over container fragments of data source | [
"Return",
"iterator",
"over",
"container",
"fragments",
"of",
"data",
"source"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/source/base.py#L184-L188 |
243,219 | intake/intake | intake/source/base.py | DataSource.read_partition | def read_partition(self, i):
"""Return a part of the data corresponding to i-th partition.
By default, assumes i should be an integer between zero and npartitions;
override for more complex indexing schemes.
"""
self._load_metadata()
if i < 0 or i >= self.npartitions:
raise IndexError('%d is out of range' % i)
return self._get_partition(i) | python | def read_partition(self, i):
self._load_metadata()
if i < 0 or i >= self.npartitions:
raise IndexError('%d is out of range' % i)
return self._get_partition(i) | [
"def",
"read_partition",
"(",
"self",
",",
"i",
")",
":",
"self",
".",
"_load_metadata",
"(",
")",
"if",
"i",
"<",
"0",
"or",
"i",
">=",
"self",
".",
"npartitions",
":",
"raise",
"IndexError",
"(",
"'%d is out of range'",
"%",
"i",
")",
"return",
"self... | Return a part of the data corresponding to i-th partition.
By default, assumes i should be an integer between zero and npartitions;
override for more complex indexing schemes. | [
"Return",
"a",
"part",
"of",
"the",
"data",
"corresponding",
"to",
"i",
"-",
"th",
"partition",
"."
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/source/base.py#L190-L200 |
243,220 | intake/intake | intake/source/base.py | DataSource.plot | def plot(self):
"""
Returns a hvPlot object to provide a high-level plotting API.
To display in a notebook, be sure to run ``intake.output_notebook()``
first.
"""
try:
from hvplot import hvPlot
except ImportError:
raise ImportError("The intake plotting API requires hvplot."
"hvplot may be installed with:\n\n"
"`conda install -c pyviz hvplot` or "
"`pip install hvplot`.")
metadata = self.metadata.get('plot', {})
fields = self.metadata.get('fields', {})
for attrs in fields.values():
if 'range' in attrs:
attrs['range'] = tuple(attrs['range'])
metadata['fields'] = fields
plots = self.metadata.get('plots', {})
return hvPlot(self, custom_plots=plots, **metadata) | python | def plot(self):
try:
from hvplot import hvPlot
except ImportError:
raise ImportError("The intake plotting API requires hvplot."
"hvplot may be installed with:\n\n"
"`conda install -c pyviz hvplot` or "
"`pip install hvplot`.")
metadata = self.metadata.get('plot', {})
fields = self.metadata.get('fields', {})
for attrs in fields.values():
if 'range' in attrs:
attrs['range'] = tuple(attrs['range'])
metadata['fields'] = fields
plots = self.metadata.get('plots', {})
return hvPlot(self, custom_plots=plots, **metadata) | [
"def",
"plot",
"(",
"self",
")",
":",
"try",
":",
"from",
"hvplot",
"import",
"hvPlot",
"except",
"ImportError",
":",
"raise",
"ImportError",
"(",
"\"The intake plotting API requires hvplot.\"",
"\"hvplot may be installed with:\\n\\n\"",
"\"`conda install -c pyviz hvplot` or ... | Returns a hvPlot object to provide a high-level plotting API.
To display in a notebook, be sure to run ``intake.output_notebook()``
first. | [
"Returns",
"a",
"hvPlot",
"object",
"to",
"provide",
"a",
"high",
"-",
"level",
"plotting",
"API",
"."
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/source/base.py#L231-L252 |
243,221 | intake/intake | intake/source/base.py | DataSource.persist | def persist(self, ttl=None, **kwargs):
"""Save data from this source to local persistent storage"""
from ..container import container_map
from ..container.persist import PersistStore
import time
if 'original_tok' in self.metadata:
raise ValueError('Cannot persist a source taken from the persist '
'store')
method = container_map[self.container]._persist
store = PersistStore()
out = method(self, path=store.getdir(self), **kwargs)
out.description = self.description
metadata = {'timestamp': time.time(),
'original_metadata': self.metadata,
'original_source': self.__getstate__(),
'original_name': self.name,
'original_tok': self._tok,
'persist_kwargs': kwargs,
'ttl': ttl,
'cat': {} if self.cat is None else self.cat.__getstate__()}
out.metadata = metadata
out.name = self.name
store.add(self._tok, out)
return out | python | def persist(self, ttl=None, **kwargs):
from ..container import container_map
from ..container.persist import PersistStore
import time
if 'original_tok' in self.metadata:
raise ValueError('Cannot persist a source taken from the persist '
'store')
method = container_map[self.container]._persist
store = PersistStore()
out = method(self, path=store.getdir(self), **kwargs)
out.description = self.description
metadata = {'timestamp': time.time(),
'original_metadata': self.metadata,
'original_source': self.__getstate__(),
'original_name': self.name,
'original_tok': self._tok,
'persist_kwargs': kwargs,
'ttl': ttl,
'cat': {} if self.cat is None else self.cat.__getstate__()}
out.metadata = metadata
out.name = self.name
store.add(self._tok, out)
return out | [
"def",
"persist",
"(",
"self",
",",
"ttl",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"from",
".",
".",
"container",
"import",
"container_map",
"from",
".",
".",
"container",
".",
"persist",
"import",
"PersistStore",
"import",
"time",
"if",
"'origin... | Save data from this source to local persistent storage | [
"Save",
"data",
"from",
"this",
"source",
"to",
"local",
"persistent",
"storage"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/source/base.py#L261-L284 |
243,222 | intake/intake | intake/source/base.py | DataSource.export | def export(self, path, **kwargs):
"""Save this data for sharing with other people
Creates a copy of the data in a format appropriate for its container,
in the location specified (which can be remote, e.g., s3). Returns
a YAML representation of this saved dataset, so that it can be put
into a catalog file.
"""
from ..container import container_map
import time
method = container_map[self.container]._persist
# may need to create path - access file-system method
out = method(self, path=path, **kwargs)
out.description = self.description
metadata = {'timestamp': time.time(),
'original_metadata': self.metadata,
'original_source': self.__getstate__(),
'original_name': self.name,
'original_tok': self._tok,
'persist_kwargs': kwargs}
out.metadata = metadata
out.name = self.name
return out.yaml() | python | def export(self, path, **kwargs):
from ..container import container_map
import time
method = container_map[self.container]._persist
# may need to create path - access file-system method
out = method(self, path=path, **kwargs)
out.description = self.description
metadata = {'timestamp': time.time(),
'original_metadata': self.metadata,
'original_source': self.__getstate__(),
'original_name': self.name,
'original_tok': self._tok,
'persist_kwargs': kwargs}
out.metadata = metadata
out.name = self.name
return out.yaml() | [
"def",
"export",
"(",
"self",
",",
"path",
",",
"*",
"*",
"kwargs",
")",
":",
"from",
".",
".",
"container",
"import",
"container_map",
"import",
"time",
"method",
"=",
"container_map",
"[",
"self",
".",
"container",
"]",
".",
"_persist",
"# may need to cr... | Save this data for sharing with other people
Creates a copy of the data in a format appropriate for its container,
in the location specified (which can be remote, e.g., s3). Returns
a YAML representation of this saved dataset, so that it can be put
into a catalog file. | [
"Save",
"this",
"data",
"for",
"sharing",
"with",
"other",
"people"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/source/base.py#L286-L308 |
243,223 | intake/intake | intake/catalog/default.py | load_user_catalog | def load_user_catalog():
"""Return a catalog for the platform-specific user Intake directory"""
cat_dir = user_data_dir()
if not os.path.isdir(cat_dir):
return Catalog()
else:
return YAMLFilesCatalog(cat_dir) | python | def load_user_catalog():
cat_dir = user_data_dir()
if not os.path.isdir(cat_dir):
return Catalog()
else:
return YAMLFilesCatalog(cat_dir) | [
"def",
"load_user_catalog",
"(",
")",
":",
"cat_dir",
"=",
"user_data_dir",
"(",
")",
"if",
"not",
"os",
".",
"path",
".",
"isdir",
"(",
"cat_dir",
")",
":",
"return",
"Catalog",
"(",
")",
"else",
":",
"return",
"YAMLFilesCatalog",
"(",
"cat_dir",
")"
] | Return a catalog for the platform-specific user Intake directory | [
"Return",
"a",
"catalog",
"for",
"the",
"platform",
"-",
"specific",
"user",
"Intake",
"directory"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/catalog/default.py#L19-L25 |
243,224 | intake/intake | intake/catalog/default.py | load_global_catalog | def load_global_catalog():
"""Return a catalog for the environment-specific Intake directory"""
cat_dir = global_data_dir()
if not os.path.isdir(cat_dir):
return Catalog()
else:
return YAMLFilesCatalog(cat_dir) | python | def load_global_catalog():
cat_dir = global_data_dir()
if not os.path.isdir(cat_dir):
return Catalog()
else:
return YAMLFilesCatalog(cat_dir) | [
"def",
"load_global_catalog",
"(",
")",
":",
"cat_dir",
"=",
"global_data_dir",
"(",
")",
"if",
"not",
"os",
".",
"path",
".",
"isdir",
"(",
"cat_dir",
")",
":",
"return",
"Catalog",
"(",
")",
"else",
":",
"return",
"YAMLFilesCatalog",
"(",
"cat_dir",
")... | Return a catalog for the environment-specific Intake directory | [
"Return",
"a",
"catalog",
"for",
"the",
"environment",
"-",
"specific",
"Intake",
"directory"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/catalog/default.py#L33-L39 |
243,225 | intake/intake | intake/catalog/default.py | global_data_dir | def global_data_dir():
"""Return the global Intake catalog dir for the current environment"""
prefix = False
if VIRTUALENV_VAR in os.environ:
prefix = os.environ[VIRTUALENV_VAR]
elif CONDA_VAR in os.environ:
prefix = sys.prefix
elif which('conda'):
# conda exists but is not activated
prefix = conda_prefix()
if prefix:
# conda and virtualenv use Linux-style directory pattern
return make_path_posix(os.path.join(prefix, 'share', 'intake'))
else:
return appdirs.site_data_dir(appname='intake', appauthor='intake') | python | def global_data_dir():
prefix = False
if VIRTUALENV_VAR in os.environ:
prefix = os.environ[VIRTUALENV_VAR]
elif CONDA_VAR in os.environ:
prefix = sys.prefix
elif which('conda'):
# conda exists but is not activated
prefix = conda_prefix()
if prefix:
# conda and virtualenv use Linux-style directory pattern
return make_path_posix(os.path.join(prefix, 'share', 'intake'))
else:
return appdirs.site_data_dir(appname='intake', appauthor='intake') | [
"def",
"global_data_dir",
"(",
")",
":",
"prefix",
"=",
"False",
"if",
"VIRTUALENV_VAR",
"in",
"os",
".",
"environ",
":",
"prefix",
"=",
"os",
".",
"environ",
"[",
"VIRTUALENV_VAR",
"]",
"elif",
"CONDA_VAR",
"in",
"os",
".",
"environ",
":",
"prefix",
"="... | Return the global Intake catalog dir for the current environment | [
"Return",
"the",
"global",
"Intake",
"catalog",
"dir",
"for",
"the",
"current",
"environment"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/catalog/default.py#L61-L76 |
243,226 | intake/intake | intake/catalog/default.py | load_combo_catalog | def load_combo_catalog():
"""Load a union of the user and global catalogs for convenience"""
user_dir = user_data_dir()
global_dir = global_data_dir()
desc = 'Generated from data packages found on your intake search path'
cat_dirs = []
if os.path.isdir(user_dir):
cat_dirs.append(user_dir + '/*.yaml')
cat_dirs.append(user_dir + '/*.yml')
if os.path.isdir(global_dir):
cat_dirs.append(global_dir + '/*.yaml')
cat_dirs.append(global_dir + '/*.yml')
for path_dir in conf.get('catalog_path', []):
if path_dir != '':
if not path_dir.endswith(('yaml', 'yml')):
cat_dirs.append(path_dir + '/*.yaml')
cat_dirs.append(path_dir + '/*.yml')
else:
cat_dirs.append(path_dir)
return YAMLFilesCatalog(cat_dirs, name='builtin', description=desc) | python | def load_combo_catalog():
user_dir = user_data_dir()
global_dir = global_data_dir()
desc = 'Generated from data packages found on your intake search path'
cat_dirs = []
if os.path.isdir(user_dir):
cat_dirs.append(user_dir + '/*.yaml')
cat_dirs.append(user_dir + '/*.yml')
if os.path.isdir(global_dir):
cat_dirs.append(global_dir + '/*.yaml')
cat_dirs.append(global_dir + '/*.yml')
for path_dir in conf.get('catalog_path', []):
if path_dir != '':
if not path_dir.endswith(('yaml', 'yml')):
cat_dirs.append(path_dir + '/*.yaml')
cat_dirs.append(path_dir + '/*.yml')
else:
cat_dirs.append(path_dir)
return YAMLFilesCatalog(cat_dirs, name='builtin', description=desc) | [
"def",
"load_combo_catalog",
"(",
")",
":",
"user_dir",
"=",
"user_data_dir",
"(",
")",
"global_dir",
"=",
"global_data_dir",
"(",
")",
"desc",
"=",
"'Generated from data packages found on your intake search path'",
"cat_dirs",
"=",
"[",
"]",
"if",
"os",
".",
"path"... | Load a union of the user and global catalogs for convenience | [
"Load",
"a",
"union",
"of",
"the",
"user",
"and",
"global",
"catalogs",
"for",
"convenience"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/catalog/default.py#L79-L99 |
243,227 | intake/intake | intake/catalog/base.py | Catalog.from_dict | def from_dict(cls, entries, **kwargs):
"""
Create Catalog from the given set of entries
Parameters
----------
entries : dict-like
A mapping of name:entry which supports dict-like functionality,
e.g., is derived from ``collections.abc.Mapping``.
kwargs : passed on the constructor
Things like metadata, name; see ``__init__``.
Returns
-------
Catalog instance
"""
from dask.base import tokenize
cat = cls(**kwargs)
cat._entries = entries
cat._tok = tokenize(kwargs, entries)
return cat | python | def from_dict(cls, entries, **kwargs):
from dask.base import tokenize
cat = cls(**kwargs)
cat._entries = entries
cat._tok = tokenize(kwargs, entries)
return cat | [
"def",
"from_dict",
"(",
"cls",
",",
"entries",
",",
"*",
"*",
"kwargs",
")",
":",
"from",
"dask",
".",
"base",
"import",
"tokenize",
"cat",
"=",
"cls",
"(",
"*",
"*",
"kwargs",
")",
"cat",
".",
"_entries",
"=",
"entries",
"cat",
".",
"_tok",
"=",
... | Create Catalog from the given set of entries
Parameters
----------
entries : dict-like
A mapping of name:entry which supports dict-like functionality,
e.g., is derived from ``collections.abc.Mapping``.
kwargs : passed on the constructor
Things like metadata, name; see ``__init__``.
Returns
-------
Catalog instance | [
"Create",
"Catalog",
"from",
"the",
"given",
"set",
"of",
"entries"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/catalog/base.py#L118-L138 |
243,228 | intake/intake | intake/catalog/base.py | Catalog.reload | def reload(self):
"""Reload catalog if sufficient time has passed"""
if time.time() - self.updated > self.ttl:
self.force_reload() | python | def reload(self):
if time.time() - self.updated > self.ttl:
self.force_reload() | [
"def",
"reload",
"(",
"self",
")",
":",
"if",
"time",
".",
"time",
"(",
")",
"-",
"self",
".",
"updated",
">",
"self",
".",
"ttl",
":",
"self",
".",
"force_reload",
"(",
")"
] | Reload catalog if sufficient time has passed | [
"Reload",
"catalog",
"if",
"sufficient",
"time",
"has",
"passed"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/catalog/base.py#L177-L180 |
243,229 | intake/intake | intake/catalog/base.py | Catalog.filter | def filter(self, func):
"""Create a Catalog of a subset of entries based on a condition
Note that, whatever specific class this is performed on, the return
instance is a Catalog. The entries are passed unmodified, so they
will still reference the original catalog instance and include its
details such as directory,.
Parameters
----------
func : function
This should take a CatalogEntry and return True or False. Those
items returning True will be included in the new Catalog, with the
same entry names
Returns
-------
New Catalog
"""
return Catalog.from_dict({key: entry for key, entry in self.items()
if func(entry)}) | python | def filter(self, func):
return Catalog.from_dict({key: entry for key, entry in self.items()
if func(entry)}) | [
"def",
"filter",
"(",
"self",
",",
"func",
")",
":",
"return",
"Catalog",
".",
"from_dict",
"(",
"{",
"key",
":",
"entry",
"for",
"key",
",",
"entry",
"in",
"self",
".",
"items",
"(",
")",
"if",
"func",
"(",
"entry",
")",
"}",
")"
] | Create a Catalog of a subset of entries based on a condition
Note that, whatever specific class this is performed on, the return
instance is a Catalog. The entries are passed unmodified, so they
will still reference the original catalog instance and include its
details such as directory,.
Parameters
----------
func : function
This should take a CatalogEntry and return True or False. Those
items returning True will be included in the new Catalog, with the
same entry names
Returns
-------
New Catalog | [
"Create",
"a",
"Catalog",
"of",
"a",
"subset",
"of",
"entries",
"based",
"on",
"a",
"condition"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/catalog/base.py#L208-L228 |
243,230 | intake/intake | intake/catalog/base.py | Catalog.walk | def walk(self, sofar=None, prefix=None, depth=2):
"""Get all entries in this catalog and sub-catalogs
Parameters
----------
sofar: dict or None
Within recursion, use this dict for output
prefix: list of str or None
Names of levels already visited
depth: int
Number of levels to descend; needed to truncate circular references
and for cleaner output
Returns
-------
Dict where the keys are the entry names in dotted syntax, and the
values are entry instances.
"""
out = sofar if sofar is not None else {}
prefix = [] if prefix is None else prefix
for name, item in self._entries.items():
if item._container == 'catalog' and depth > 1:
# recurse with default open parameters
try:
item().walk(out, prefix + [name], depth-1)
except Exception as e:
print(e)
pass # ignore inability to descend
n = '.'.join(prefix + [name])
out[n] = item
return out | python | def walk(self, sofar=None, prefix=None, depth=2):
out = sofar if sofar is not None else {}
prefix = [] if prefix is None else prefix
for name, item in self._entries.items():
if item._container == 'catalog' and depth > 1:
# recurse with default open parameters
try:
item().walk(out, prefix + [name], depth-1)
except Exception as e:
print(e)
pass # ignore inability to descend
n = '.'.join(prefix + [name])
out[n] = item
return out | [
"def",
"walk",
"(",
"self",
",",
"sofar",
"=",
"None",
",",
"prefix",
"=",
"None",
",",
"depth",
"=",
"2",
")",
":",
"out",
"=",
"sofar",
"if",
"sofar",
"is",
"not",
"None",
"else",
"{",
"}",
"prefix",
"=",
"[",
"]",
"if",
"prefix",
"is",
"None... | Get all entries in this catalog and sub-catalogs
Parameters
----------
sofar: dict or None
Within recursion, use this dict for output
prefix: list of str or None
Names of levels already visited
depth: int
Number of levels to descend; needed to truncate circular references
and for cleaner output
Returns
-------
Dict where the keys are the entry names in dotted syntax, and the
values are entry instances. | [
"Get",
"all",
"entries",
"in",
"this",
"catalog",
"and",
"sub",
"-",
"catalogs"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/catalog/base.py#L231-L261 |
243,231 | intake/intake | intake/catalog/base.py | Catalog.serialize | def serialize(self):
"""
Produce YAML version of this catalog.
Note that this is not the same as ``.yaml()``, which produces a YAML
block referring to this catalog.
"""
import yaml
output = {"metadata": self.metadata, "sources": {},
"name": self.name}
for key, entry in self.items():
output["sources"][key] = entry._captured_init_kwargs
return yaml.dump(output) | python | def serialize(self):
import yaml
output = {"metadata": self.metadata, "sources": {},
"name": self.name}
for key, entry in self.items():
output["sources"][key] = entry._captured_init_kwargs
return yaml.dump(output) | [
"def",
"serialize",
"(",
"self",
")",
":",
"import",
"yaml",
"output",
"=",
"{",
"\"metadata\"",
":",
"self",
".",
"metadata",
",",
"\"sources\"",
":",
"{",
"}",
",",
"\"name\"",
":",
"self",
".",
"name",
"}",
"for",
"key",
",",
"entry",
"in",
"self"... | Produce YAML version of this catalog.
Note that this is not the same as ``.yaml()``, which produces a YAML
block referring to this catalog. | [
"Produce",
"YAML",
"version",
"of",
"this",
"catalog",
"."
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/catalog/base.py#L267-L279 |
243,232 | intake/intake | intake/catalog/base.py | Catalog.save | def save(self, url, storage_options=None):
"""
Output this catalog to a file as YAML
Parameters
----------
url : str
Location to save to, perhaps remote
storage_options : dict
Extra arguments for the file-system
"""
from dask.bytes import open_files
with open_files([url], **(storage_options or {}), mode='wt')[0] as f:
f.write(self.serialize()) | python | def save(self, url, storage_options=None):
from dask.bytes import open_files
with open_files([url], **(storage_options or {}), mode='wt')[0] as f:
f.write(self.serialize()) | [
"def",
"save",
"(",
"self",
",",
"url",
",",
"storage_options",
"=",
"None",
")",
":",
"from",
"dask",
".",
"bytes",
"import",
"open_files",
"with",
"open_files",
"(",
"[",
"url",
"]",
",",
"*",
"*",
"(",
"storage_options",
"or",
"{",
"}",
")",
",",
... | Output this catalog to a file as YAML
Parameters
----------
url : str
Location to save to, perhaps remote
storage_options : dict
Extra arguments for the file-system | [
"Output",
"this",
"catalog",
"to",
"a",
"file",
"as",
"YAML"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/catalog/base.py#L281-L294 |
243,233 | intake/intake | intake/catalog/base.py | Entries.reset | def reset(self):
"Clear caches to force a reload."
self._page_cache.clear()
self._direct_lookup_cache.clear()
self._page_offset = 0
self.complete = self._catalog.page_size is None | python | def reset(self):
"Clear caches to force a reload."
self._page_cache.clear()
self._direct_lookup_cache.clear()
self._page_offset = 0
self.complete = self._catalog.page_size is None | [
"def",
"reset",
"(",
"self",
")",
":",
"self",
".",
"_page_cache",
".",
"clear",
"(",
")",
"self",
".",
"_direct_lookup_cache",
".",
"clear",
"(",
")",
"self",
".",
"_page_offset",
"=",
"0",
"self",
".",
"complete",
"=",
"self",
".",
"_catalog",
".",
... | Clear caches to force a reload. | [
"Clear",
"caches",
"to",
"force",
"a",
"reload",
"."
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/catalog/base.py#L444-L449 |
243,234 | intake/intake | intake/catalog/base.py | Entries.cached_items | def cached_items(self):
"""
Iterate over items that are already cached. Perform no requests.
"""
for item in six.iteritems(self._page_cache):
yield item
for item in six.iteritems(self._direct_lookup_cache):
yield item | python | def cached_items(self):
for item in six.iteritems(self._page_cache):
yield item
for item in six.iteritems(self._direct_lookup_cache):
yield item | [
"def",
"cached_items",
"(",
"self",
")",
":",
"for",
"item",
"in",
"six",
".",
"iteritems",
"(",
"self",
".",
"_page_cache",
")",
":",
"yield",
"item",
"for",
"item",
"in",
"six",
".",
"iteritems",
"(",
"self",
".",
"_direct_lookup_cache",
")",
":",
"y... | Iterate over items that are already cached. Perform no requests. | [
"Iterate",
"over",
"items",
"that",
"are",
"already",
"cached",
".",
"Perform",
"no",
"requests",
"."
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/catalog/base.py#L473-L480 |
243,235 | intake/intake | intake/catalog/base.py | RemoteCatalog._get_http_args | def _get_http_args(self, params):
"""
Return a copy of the http_args
Adds auth headers and 'source_id', merges in params.
"""
# Add the auth headers to any other headers
headers = self.http_args.get('headers', {})
if self.auth is not None:
auth_headers = self.auth.get_headers()
headers.update(auth_headers)
# build new http args with these headers
http_args = self.http_args.copy()
if self._source_id is not None:
headers['source_id'] = self._source_id
http_args['headers'] = headers
# Merge in any params specified by the caller.
merged_params = http_args.get('params', {})
merged_params.update(params)
http_args['params'] = merged_params
return http_args | python | def _get_http_args(self, params):
# Add the auth headers to any other headers
headers = self.http_args.get('headers', {})
if self.auth is not None:
auth_headers = self.auth.get_headers()
headers.update(auth_headers)
# build new http args with these headers
http_args = self.http_args.copy()
if self._source_id is not None:
headers['source_id'] = self._source_id
http_args['headers'] = headers
# Merge in any params specified by the caller.
merged_params = http_args.get('params', {})
merged_params.update(params)
http_args['params'] = merged_params
return http_args | [
"def",
"_get_http_args",
"(",
"self",
",",
"params",
")",
":",
"# Add the auth headers to any other headers",
"headers",
"=",
"self",
".",
"http_args",
".",
"get",
"(",
"'headers'",
",",
"{",
"}",
")",
"if",
"self",
".",
"auth",
"is",
"not",
"None",
":",
"... | Return a copy of the http_args
Adds auth headers and 'source_id', merges in params. | [
"Return",
"a",
"copy",
"of",
"the",
"http_args"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/catalog/base.py#L650-L672 |
243,236 | intake/intake | intake/catalog/base.py | RemoteCatalog._load | def _load(self):
"""Fetch metadata from remote. Entries are fetched lazily."""
# This will not immediately fetch any sources (entries). It will lazily
# fetch sources from the server in paginated blocks when this Catalog
# is iterated over. It will fetch specific sources when they are
# accessed in this Catalog via __getitem__.
if self.page_size is None:
# Fetch all source info.
params = {}
else:
# Just fetch the metadata now; fetch source info later in pages.
params = {'page_offset': 0, 'page_size': 0}
http_args = self._get_http_args(params)
response = requests.get(self.info_url, **http_args)
try:
response.raise_for_status()
except requests.HTTPError as err:
six.raise_from(RemoteCatalogError(
"Failed to fetch metadata."), err)
info = msgpack.unpackb(response.content, **unpack_kwargs)
self.metadata = info['metadata']
# The intake server now always provides a length, but the server may be
# running an older version of intake.
self._len = info.get('length')
self._entries.reset()
# If we are paginating (page_size is not None) and the server we are
# working with is new enough to support pagination, info['sources']
# should be empty. If either of those things is not true,
# info['sources'] will contain all the entries and we should cache them
# now.
if info['sources']:
# Signal that we are not paginating, even if we were asked to.
self._page_size = None
self._entries._page_cache.update(
{source['name']: RemoteCatalogEntry(
url=self.url,
getenv=self.getenv,
getshell=self.getshell,
auth=self.auth,
http_args=self.http_args, **source)
for source in info['sources']}) | python | def _load(self):
# This will not immediately fetch any sources (entries). It will lazily
# fetch sources from the server in paginated blocks when this Catalog
# is iterated over. It will fetch specific sources when they are
# accessed in this Catalog via __getitem__.
if self.page_size is None:
# Fetch all source info.
params = {}
else:
# Just fetch the metadata now; fetch source info later in pages.
params = {'page_offset': 0, 'page_size': 0}
http_args = self._get_http_args(params)
response = requests.get(self.info_url, **http_args)
try:
response.raise_for_status()
except requests.HTTPError as err:
six.raise_from(RemoteCatalogError(
"Failed to fetch metadata."), err)
info = msgpack.unpackb(response.content, **unpack_kwargs)
self.metadata = info['metadata']
# The intake server now always provides a length, but the server may be
# running an older version of intake.
self._len = info.get('length')
self._entries.reset()
# If we are paginating (page_size is not None) and the server we are
# working with is new enough to support pagination, info['sources']
# should be empty. If either of those things is not true,
# info['sources'] will contain all the entries and we should cache them
# now.
if info['sources']:
# Signal that we are not paginating, even if we were asked to.
self._page_size = None
self._entries._page_cache.update(
{source['name']: RemoteCatalogEntry(
url=self.url,
getenv=self.getenv,
getshell=self.getshell,
auth=self.auth,
http_args=self.http_args, **source)
for source in info['sources']}) | [
"def",
"_load",
"(",
"self",
")",
":",
"# This will not immediately fetch any sources (entries). It will lazily",
"# fetch sources from the server in paginated blocks when this Catalog",
"# is iterated over. It will fetch specific sources when they are",
"# accessed in this Catalog via __getitem__... | Fetch metadata from remote. Entries are fetched lazily. | [
"Fetch",
"metadata",
"from",
"remote",
".",
"Entries",
"are",
"fetched",
"lazily",
"."
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/catalog/base.py#L674-L715 |
243,237 | intake/intake | intake/cli/util.py | nice_join | def nice_join(seq, sep=", ", conjunction="or"):
''' Join together sequences of strings into English-friendly phrases using
a conjunction when appropriate.
Args:
seq (seq[str]) : a sequence of strings to nicely join
sep (str, optional) : a sequence delimiter to use (default: ", ")
conjunction (str or None, optional) : a conjunction to use for the last
two items, or None to reproduce basic join behavior (default: "or")
Returns:
a joined string
Examples:
>>> nice_join(["a", "b", "c"])
'a, b or c'
'''
seq = [str(x) for x in seq]
if len(seq) <= 1 or conjunction is None:
return sep.join(seq)
else:
return "%s %s %s" % (sep.join(seq[:-1]), conjunction, seq[-1]) | python | def nice_join(seq, sep=", ", conjunction="or"):
''' Join together sequences of strings into English-friendly phrases using
a conjunction when appropriate.
Args:
seq (seq[str]) : a sequence of strings to nicely join
sep (str, optional) : a sequence delimiter to use (default: ", ")
conjunction (str or None, optional) : a conjunction to use for the last
two items, or None to reproduce basic join behavior (default: "or")
Returns:
a joined string
Examples:
>>> nice_join(["a", "b", "c"])
'a, b or c'
'''
seq = [str(x) for x in seq]
if len(seq) <= 1 or conjunction is None:
return sep.join(seq)
else:
return "%s %s %s" % (sep.join(seq[:-1]), conjunction, seq[-1]) | [
"def",
"nice_join",
"(",
"seq",
",",
"sep",
"=",
"\", \"",
",",
"conjunction",
"=",
"\"or\"",
")",
":",
"seq",
"=",
"[",
"str",
"(",
"x",
")",
"for",
"x",
"in",
"seq",
"]",
"if",
"len",
"(",
"seq",
")",
"<=",
"1",
"or",
"conjunction",
"is",
"No... | Join together sequences of strings into English-friendly phrases using
a conjunction when appropriate.
Args:
seq (seq[str]) : a sequence of strings to nicely join
sep (str, optional) : a sequence delimiter to use (default: ", ")
conjunction (str or None, optional) : a conjunction to use for the last
two items, or None to reproduce basic join behavior (default: "or")
Returns:
a joined string
Examples:
>>> nice_join(["a", "b", "c"])
'a, b or c' | [
"Join",
"together",
"sequences",
"of",
"strings",
"into",
"English",
"-",
"friendly",
"phrases",
"using",
"a",
"conjunction",
"when",
"appropriate",
"."
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/cli/util.py#L46-L71 |
243,238 | intake/intake | intake/__init__.py | output_notebook | def output_notebook(inline=True, logo=False):
"""
Load the notebook extension
Parameters
----------
inline : boolean (optional)
Whether to inline JS code or load it from a CDN
logo : boolean (optional)
Whether to show the logo(s)
"""
try:
import hvplot
except ImportError:
raise ImportError("The intake plotting API requires hvplot."
"hvplot may be installed with:\n\n"
"`conda install -c pyviz hvplot` or "
"`pip install hvplot`.")
import holoviews as hv
return hv.extension('bokeh', inline=inline, logo=logo) | python | def output_notebook(inline=True, logo=False):
try:
import hvplot
except ImportError:
raise ImportError("The intake plotting API requires hvplot."
"hvplot may be installed with:\n\n"
"`conda install -c pyviz hvplot` or "
"`pip install hvplot`.")
import holoviews as hv
return hv.extension('bokeh', inline=inline, logo=logo) | [
"def",
"output_notebook",
"(",
"inline",
"=",
"True",
",",
"logo",
"=",
"False",
")",
":",
"try",
":",
"import",
"hvplot",
"except",
"ImportError",
":",
"raise",
"ImportError",
"(",
"\"The intake plotting API requires hvplot.\"",
"\"hvplot may be installed with:\\n\\n\"... | Load the notebook extension
Parameters
----------
inline : boolean (optional)
Whether to inline JS code or load it from a CDN
logo : boolean (optional)
Whether to show the logo(s) | [
"Load",
"the",
"notebook",
"extension"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/__init__.py#L57-L76 |
243,239 | intake/intake | intake/__init__.py | open_catalog | def open_catalog(uri=None, **kwargs):
"""Create a Catalog object
Can load YAML catalog files, connect to an intake server, or create any
arbitrary Catalog subclass instance. In the general case, the user should
supply ``driver=`` with a value from the plugins registry which has a
container type of catalog. File locations can generally be remote, if
specifying a URL protocol.
The default behaviour if not specifying the driver is as follows:
- if ``uri`` is a a single string ending in "yml" or "yaml", open it as a
catalog file
- if ``uri`` is a list of strings, a string containing a glob character
("*") or a string not ending in "y(a)ml", open as a set of catalog
files. In the latter case, assume it is a directory.
- if ``uri`` beings with protocol ``"intake:"``, connect to a remote
Intake server
- otherwise, create a base Catalog object without entries.
Parameters
----------
uri: str
Designator for the location of the catalog.
kwargs:
passed to subclass instance, see documentation of the individual
catalog classes. For example, ``yaml_files_cat`` (when specifying
multiple uris or a glob string) takes the additional
parameter ``flatten=True|False``, specifying whether all data sources
are merged in a single namespace, or each file becomes
a sub-catalog.
See also
--------
intake.open_yaml_files_cat, intake.open_yaml_file_cat,
intake.open_intake_remote
"""
driver = kwargs.pop('driver', None)
if driver is None:
if uri:
if ((isinstance(uri, str) and "*" in uri)
or ((isinstance(uri, (list, tuple))) and len(uri) > 1)):
# glob string or list of files/globs
driver = 'yaml_files_cat'
elif isinstance(uri, (list, tuple)) and len(uri) == 1:
uri = uri[0]
if "*" in uri[0]:
# single glob string in a list
driver = 'yaml_files_cat'
else:
# single filename in a list
driver = 'yaml_file_cat'
elif isinstance(uri, str):
# single URL
if uri.startswith('intake:'):
# server
driver = 'intake_remote'
else:
if uri.endswith(('.yml', '.yaml')):
driver = 'yaml_file_cat'
else:
uri = uri.rstrip('/') + '/*.y*ml'
driver = 'yaml_files_cat'
else:
# empty cat
driver = 'catalog'
if driver not in registry:
raise ValueError('Unknown catalog driver (%s), supply one of: %s'
% (driver, list(sorted(registry))))
return registry[driver](uri, **kwargs) | python | def open_catalog(uri=None, **kwargs):
driver = kwargs.pop('driver', None)
if driver is None:
if uri:
if ((isinstance(uri, str) and "*" in uri)
or ((isinstance(uri, (list, tuple))) and len(uri) > 1)):
# glob string or list of files/globs
driver = 'yaml_files_cat'
elif isinstance(uri, (list, tuple)) and len(uri) == 1:
uri = uri[0]
if "*" in uri[0]:
# single glob string in a list
driver = 'yaml_files_cat'
else:
# single filename in a list
driver = 'yaml_file_cat'
elif isinstance(uri, str):
# single URL
if uri.startswith('intake:'):
# server
driver = 'intake_remote'
else:
if uri.endswith(('.yml', '.yaml')):
driver = 'yaml_file_cat'
else:
uri = uri.rstrip('/') + '/*.y*ml'
driver = 'yaml_files_cat'
else:
# empty cat
driver = 'catalog'
if driver not in registry:
raise ValueError('Unknown catalog driver (%s), supply one of: %s'
% (driver, list(sorted(registry))))
return registry[driver](uri, **kwargs) | [
"def",
"open_catalog",
"(",
"uri",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"driver",
"=",
"kwargs",
".",
"pop",
"(",
"'driver'",
",",
"None",
")",
"if",
"driver",
"is",
"None",
":",
"if",
"uri",
":",
"if",
"(",
"(",
"isinstance",
"(",
"ur... | Create a Catalog object
Can load YAML catalog files, connect to an intake server, or create any
arbitrary Catalog subclass instance. In the general case, the user should
supply ``driver=`` with a value from the plugins registry which has a
container type of catalog. File locations can generally be remote, if
specifying a URL protocol.
The default behaviour if not specifying the driver is as follows:
- if ``uri`` is a a single string ending in "yml" or "yaml", open it as a
catalog file
- if ``uri`` is a list of strings, a string containing a glob character
("*") or a string not ending in "y(a)ml", open as a set of catalog
files. In the latter case, assume it is a directory.
- if ``uri`` beings with protocol ``"intake:"``, connect to a remote
Intake server
- otherwise, create a base Catalog object without entries.
Parameters
----------
uri: str
Designator for the location of the catalog.
kwargs:
passed to subclass instance, see documentation of the individual
catalog classes. For example, ``yaml_files_cat`` (when specifying
multiple uris or a glob string) takes the additional
parameter ``flatten=True|False``, specifying whether all data sources
are merged in a single namespace, or each file becomes
a sub-catalog.
See also
--------
intake.open_yaml_files_cat, intake.open_yaml_file_cat,
intake.open_intake_remote | [
"Create",
"a",
"Catalog",
"object"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/__init__.py#L83-L152 |
243,240 | intake/intake | intake/container/dataframe.py | RemoteDataFrame._persist | def _persist(source, path, **kwargs):
"""Save dataframe to local persistent store
Makes a parquet dataset out of the data using dask.dataframe.to_parquet.
This then becomes a data entry in the persisted datasets catalog.
Parameters
----------
source: a DataSource instance to save
name: str or None
Key to refer to this persisted dataset by. If not given, will
attempt to get from the source's name
kwargs: passed on to dask.dataframe.to_parquet
"""
try:
from intake_parquet import ParquetSource
except ImportError:
raise ImportError("Please install intake-parquet to use persistence"
" on dataframe container sources.")
try:
df = source.to_dask()
except NotImplementedError:
import dask.dataframe as dd
df = dd.from_pandas(source.read(), 1)
df.to_parquet(path, **kwargs)
source = ParquetSource(path, meta={})
return source | python | def _persist(source, path, **kwargs):
try:
from intake_parquet import ParquetSource
except ImportError:
raise ImportError("Please install intake-parquet to use persistence"
" on dataframe container sources.")
try:
df = source.to_dask()
except NotImplementedError:
import dask.dataframe as dd
df = dd.from_pandas(source.read(), 1)
df.to_parquet(path, **kwargs)
source = ParquetSource(path, meta={})
return source | [
"def",
"_persist",
"(",
"source",
",",
"path",
",",
"*",
"*",
"kwargs",
")",
":",
"try",
":",
"from",
"intake_parquet",
"import",
"ParquetSource",
"except",
"ImportError",
":",
"raise",
"ImportError",
"(",
"\"Please install intake-parquet to use persistence\"",
"\" ... | Save dataframe to local persistent store
Makes a parquet dataset out of the data using dask.dataframe.to_parquet.
This then becomes a data entry in the persisted datasets catalog.
Parameters
----------
source: a DataSource instance to save
name: str or None
Key to refer to this persisted dataset by. If not given, will
attempt to get from the source's name
kwargs: passed on to dask.dataframe.to_parquet | [
"Save",
"dataframe",
"to",
"local",
"persistent",
"store"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/container/dataframe.py#L61-L87 |
243,241 | intake/intake | intake/config.py | save_conf | def save_conf(fn=None):
"""Save current configuration to file as YAML
If not given, uses current config directory, ``confdir``, which can be
set by INTAKE_CONF_DIR.
"""
if fn is None:
fn = cfile()
try:
os.makedirs(os.path.dirname(fn))
except (OSError, IOError):
pass
with open(fn, 'w') as f:
yaml.dump(conf, f) | python | def save_conf(fn=None):
if fn is None:
fn = cfile()
try:
os.makedirs(os.path.dirname(fn))
except (OSError, IOError):
pass
with open(fn, 'w') as f:
yaml.dump(conf, f) | [
"def",
"save_conf",
"(",
"fn",
"=",
"None",
")",
":",
"if",
"fn",
"is",
"None",
":",
"fn",
"=",
"cfile",
"(",
")",
"try",
":",
"os",
".",
"makedirs",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"fn",
")",
")",
"except",
"(",
"OSError",
",",
... | Save current configuration to file as YAML
If not given, uses current config directory, ``confdir``, which can be
set by INTAKE_CONF_DIR. | [
"Save",
"current",
"configuration",
"to",
"file",
"as",
"YAML"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/config.py#L46-L59 |
243,242 | intake/intake | intake/config.py | load_conf | def load_conf(fn=None):
"""Update global config from YAML file
If fn is None, looks in global config directory, which is either defined
by the INTAKE_CONF_DIR env-var or is ~/.intake/ .
"""
if fn is None:
fn = cfile()
if os.path.isfile(fn):
with open(fn) as f:
try:
conf.update(yaml_load(f))
except Exception as e:
logger.warning('Failure to load config file "{fn}": {e}'
''.format(fn=fn, e=e)) | python | def load_conf(fn=None):
if fn is None:
fn = cfile()
if os.path.isfile(fn):
with open(fn) as f:
try:
conf.update(yaml_load(f))
except Exception as e:
logger.warning('Failure to load config file "{fn}": {e}'
''.format(fn=fn, e=e)) | [
"def",
"load_conf",
"(",
"fn",
"=",
"None",
")",
":",
"if",
"fn",
"is",
"None",
":",
"fn",
"=",
"cfile",
"(",
")",
"if",
"os",
".",
"path",
".",
"isfile",
"(",
"fn",
")",
":",
"with",
"open",
"(",
"fn",
")",
"as",
"f",
":",
"try",
":",
"con... | Update global config from YAML file
If fn is None, looks in global config directory, which is either defined
by the INTAKE_CONF_DIR env-var or is ~/.intake/ . | [
"Update",
"global",
"config",
"from",
"YAML",
"file"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/config.py#L62-L76 |
243,243 | intake/intake | intake/config.py | intake_path_dirs | def intake_path_dirs(path):
"""Return a list of directories from the intake path.
If a string, perhaps taken from an environment variable, then the
list of paths will be split on the character ":" for posix of ";" for
windows. Protocol indicators ("protocol://") will be ignored.
"""
if isinstance(path, (list, tuple)):
return path
import re
pattern = re.compile(";" if os.name == 'nt' else r"(?<!:):(?![:/])")
return pattern.split(path) | python | def intake_path_dirs(path):
if isinstance(path, (list, tuple)):
return path
import re
pattern = re.compile(";" if os.name == 'nt' else r"(?<!:):(?![:/])")
return pattern.split(path) | [
"def",
"intake_path_dirs",
"(",
"path",
")",
":",
"if",
"isinstance",
"(",
"path",
",",
"(",
"list",
",",
"tuple",
")",
")",
":",
"return",
"path",
"import",
"re",
"pattern",
"=",
"re",
".",
"compile",
"(",
"\";\"",
"if",
"os",
".",
"name",
"==",
"... | Return a list of directories from the intake path.
If a string, perhaps taken from an environment variable, then the
list of paths will be split on the character ":" for posix of ";" for
windows. Protocol indicators ("protocol://") will be ignored. | [
"Return",
"a",
"list",
"of",
"directories",
"from",
"the",
"intake",
"path",
"."
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/config.py#L79-L90 |
243,244 | intake/intake | intake/config.py | load_env | def load_env():
"""Analyse enviroment variables and update conf accordingly"""
# environment variables take precedence over conf file
for key, envvar in [['cache_dir', 'INTAKE_CACHE_DIR'],
['catalog_path', 'INTAKE_PATH'],
['persist_path', 'INTAKE_PERSIST_PATH']]:
if envvar in os.environ:
conf[key] = make_path_posix(os.environ[envvar])
conf['catalog_path'] = intake_path_dirs(conf['catalog_path'])
for key, envvar in [['cache_disabled', 'INTAKE_DISABLE_CACHING'],
['cache_download_progress', 'INTAKE_CACHE_PROGRESS']]:
if envvar in os.environ:
conf[key] = os.environ[envvar].lower() in ['true', 't', 'y', 'yes']
if 'INTAKE_LOG_LEVEL' in os.environ:
conf['logging'] = os.environ['INTAKE_LOG_LEVEL'] | python | def load_env():
# environment variables take precedence over conf file
for key, envvar in [['cache_dir', 'INTAKE_CACHE_DIR'],
['catalog_path', 'INTAKE_PATH'],
['persist_path', 'INTAKE_PERSIST_PATH']]:
if envvar in os.environ:
conf[key] = make_path_posix(os.environ[envvar])
conf['catalog_path'] = intake_path_dirs(conf['catalog_path'])
for key, envvar in [['cache_disabled', 'INTAKE_DISABLE_CACHING'],
['cache_download_progress', 'INTAKE_CACHE_PROGRESS']]:
if envvar in os.environ:
conf[key] = os.environ[envvar].lower() in ['true', 't', 'y', 'yes']
if 'INTAKE_LOG_LEVEL' in os.environ:
conf['logging'] = os.environ['INTAKE_LOG_LEVEL'] | [
"def",
"load_env",
"(",
")",
":",
"# environment variables take precedence over conf file",
"for",
"key",
",",
"envvar",
"in",
"[",
"[",
"'cache_dir'",
",",
"'INTAKE_CACHE_DIR'",
"]",
",",
"[",
"'catalog_path'",
",",
"'INTAKE_PATH'",
"]",
",",
"[",
"'persist_path'",... | Analyse enviroment variables and update conf accordingly | [
"Analyse",
"enviroment",
"variables",
"and",
"update",
"conf",
"accordingly"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/config.py#L93-L107 |
243,245 | intake/intake | intake/gui/source/description.py | Description.source | def source(self, source):
"""When the source gets updated, update the pane object"""
BaseView.source.fset(self, source)
if self.main_pane:
self.main_pane.object = self.contents
self.label_pane.object = self.label | python | def source(self, source):
BaseView.source.fset(self, source)
if self.main_pane:
self.main_pane.object = self.contents
self.label_pane.object = self.label | [
"def",
"source",
"(",
"self",
",",
"source",
")",
":",
"BaseView",
".",
"source",
".",
"fset",
"(",
"self",
",",
"source",
")",
"if",
"self",
".",
"main_pane",
":",
"self",
".",
"main_pane",
".",
"object",
"=",
"self",
".",
"contents",
"self",
".",
... | When the source gets updated, update the pane object | [
"When",
"the",
"source",
"gets",
"updated",
"update",
"the",
"pane",
"object"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/gui/source/description.py#L52-L57 |
243,246 | intake/intake | intake/gui/source/description.py | Description.contents | def contents(self):
"""String representation of the source's description"""
if not self._source:
return ' ' * 100 # HACK - make sure that area is big
contents = self.source.describe()
return pretty_describe(contents) | python | def contents(self):
if not self._source:
return ' ' * 100 # HACK - make sure that area is big
contents = self.source.describe()
return pretty_describe(contents) | [
"def",
"contents",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"_source",
":",
"return",
"' '",
"*",
"100",
"# HACK - make sure that area is big",
"contents",
"=",
"self",
".",
"source",
".",
"describe",
"(",
")",
"return",
"pretty_describe",
"(",
"cont... | String representation of the source's description | [
"String",
"representation",
"of",
"the",
"source",
"s",
"description"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/gui/source/description.py#L60-L65 |
243,247 | intake/intake | intake/source/utils.py | _get_parts_of_format_string | def _get_parts_of_format_string(resolved_string, literal_texts, format_specs):
"""
Inner function of reverse_format, returns the resolved value for each
field in pattern.
"""
_text = resolved_string
bits = []
if literal_texts[-1] != '' and _text.endswith(literal_texts[-1]):
_text = _text[:-len(literal_texts[-1])]
literal_texts = literal_texts[:-1]
format_specs = format_specs[:-1]
for i, literal_text in enumerate(literal_texts):
if literal_text != '':
if literal_text not in _text:
raise ValueError(("Resolved string must match pattern. "
"'{}' not found.".format(literal_text)))
bit, _text = _text.split(literal_text, 1)
if bit:
bits.append(bit)
elif i == 0:
continue
else:
try:
format_spec = _validate_format_spec(format_specs[i-1])
bits.append(_text[0:format_spec])
_text = _text[format_spec:]
except:
if i == len(format_specs) - 1:
format_spec = _validate_format_spec(format_specs[i])
bits.append(_text[:-format_spec])
bits.append(_text[-format_spec:])
_text = []
else:
_validate_format_spec(format_specs[i-1])
if _text:
bits.append(_text)
if len(bits) > len([fs for fs in format_specs if fs is not None]):
bits = bits[1:]
return bits | python | def _get_parts_of_format_string(resolved_string, literal_texts, format_specs):
_text = resolved_string
bits = []
if literal_texts[-1] != '' and _text.endswith(literal_texts[-1]):
_text = _text[:-len(literal_texts[-1])]
literal_texts = literal_texts[:-1]
format_specs = format_specs[:-1]
for i, literal_text in enumerate(literal_texts):
if literal_text != '':
if literal_text not in _text:
raise ValueError(("Resolved string must match pattern. "
"'{}' not found.".format(literal_text)))
bit, _text = _text.split(literal_text, 1)
if bit:
bits.append(bit)
elif i == 0:
continue
else:
try:
format_spec = _validate_format_spec(format_specs[i-1])
bits.append(_text[0:format_spec])
_text = _text[format_spec:]
except:
if i == len(format_specs) - 1:
format_spec = _validate_format_spec(format_specs[i])
bits.append(_text[:-format_spec])
bits.append(_text[-format_spec:])
_text = []
else:
_validate_format_spec(format_specs[i-1])
if _text:
bits.append(_text)
if len(bits) > len([fs for fs in format_specs if fs is not None]):
bits = bits[1:]
return bits | [
"def",
"_get_parts_of_format_string",
"(",
"resolved_string",
",",
"literal_texts",
",",
"format_specs",
")",
":",
"_text",
"=",
"resolved_string",
"bits",
"=",
"[",
"]",
"if",
"literal_texts",
"[",
"-",
"1",
"]",
"!=",
"''",
"and",
"_text",
".",
"endswith",
... | Inner function of reverse_format, returns the resolved value for each
field in pattern. | [
"Inner",
"function",
"of",
"reverse_format",
"returns",
"the",
"resolved",
"value",
"for",
"each",
"field",
"in",
"pattern",
"."
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/source/utils.py#L26-L66 |
243,248 | intake/intake | intake/source/utils.py | reverse_formats | def reverse_formats(format_string, resolved_strings):
"""
Reverse the string method format for a list of strings.
Given format_string and resolved_strings, for each resolved string
find arguments that would give
``format_string.format(**arguments) == resolved_string``.
Each item in the output corresponds to a new column with the key setting
the name and the values representing a mapping from list of resolved_strings
to the related value.
Parameters
----------
format_strings : str
Format template string as used with str.format method
resolved_strings : list
List of strings with same pattern as format_string but with fields
filled out.
Returns
-------
args : dict
Dict of the form ``{field: [value_0, ..., value_n], ...}`` where values are in
the same order as resolved_strings, so:
``format_sting.format(**{f: v[0] for f, v in args.items()}) == resolved_strings[0]``
Examples
--------
>>> paths = ['data_2014_01_03.csv', 'data_2014_02_03.csv', 'data_2015_12_03.csv']
>>> reverse_formats('data_{year}_{month}_{day}.csv', paths)
{'year': ['2014', '2014', '2015'],
'month': ['01', '02', '12'],
'day': ['03', '03', '03']}
>>> reverse_formats('data_{year:d}_{month:d}_{day:d}.csv', paths)
{'year': [2014, 2014, 2015], 'month': [1, 2, 12], 'day': [3, 3, 3]}
>>> reverse_formats('data_{date:%Y_%m_%d}.csv', paths)
{'date': [datetime.datetime(2014, 1, 3, 0, 0),
datetime.datetime(2014, 2, 3, 0, 0),
datetime.datetime(2015, 12, 3, 0, 0)]}
>>> reverse_formats('{state:2}{zip:5}', ['PA19104', 'PA19143', 'MA02534'])
{'state': ['PA', 'PA', 'MA'], 'zip': ['19104', '19143', '02534']}
See also
--------
str.format : method that this reverses
reverse_format : method for reversing just one string using a pattern
"""
from string import Formatter
fmt = Formatter()
# get the fields from the format_string
field_names = [i[1] for i in fmt.parse(format_string) if i[1]]
# itialize the args dict with an empty dict for each field
args = {field_name: [] for field_name in field_names}
for resolved_string in resolved_strings:
for field, value in reverse_format(format_string, resolved_string).items():
args[field].append(value)
return args | python | def reverse_formats(format_string, resolved_strings):
from string import Formatter
fmt = Formatter()
# get the fields from the format_string
field_names = [i[1] for i in fmt.parse(format_string) if i[1]]
# itialize the args dict with an empty dict for each field
args = {field_name: [] for field_name in field_names}
for resolved_string in resolved_strings:
for field, value in reverse_format(format_string, resolved_string).items():
args[field].append(value)
return args | [
"def",
"reverse_formats",
"(",
"format_string",
",",
"resolved_strings",
")",
":",
"from",
"string",
"import",
"Formatter",
"fmt",
"=",
"Formatter",
"(",
")",
"# get the fields from the format_string",
"field_names",
"=",
"[",
"i",
"[",
"1",
"]",
"for",
"i",
"in... | Reverse the string method format for a list of strings.
Given format_string and resolved_strings, for each resolved string
find arguments that would give
``format_string.format(**arguments) == resolved_string``.
Each item in the output corresponds to a new column with the key setting
the name and the values representing a mapping from list of resolved_strings
to the related value.
Parameters
----------
format_strings : str
Format template string as used with str.format method
resolved_strings : list
List of strings with same pattern as format_string but with fields
filled out.
Returns
-------
args : dict
Dict of the form ``{field: [value_0, ..., value_n], ...}`` where values are in
the same order as resolved_strings, so:
``format_sting.format(**{f: v[0] for f, v in args.items()}) == resolved_strings[0]``
Examples
--------
>>> paths = ['data_2014_01_03.csv', 'data_2014_02_03.csv', 'data_2015_12_03.csv']
>>> reverse_formats('data_{year}_{month}_{day}.csv', paths)
{'year': ['2014', '2014', '2015'],
'month': ['01', '02', '12'],
'day': ['03', '03', '03']}
>>> reverse_formats('data_{year:d}_{month:d}_{day:d}.csv', paths)
{'year': [2014, 2014, 2015], 'month': [1, 2, 12], 'day': [3, 3, 3]}
>>> reverse_formats('data_{date:%Y_%m_%d}.csv', paths)
{'date': [datetime.datetime(2014, 1, 3, 0, 0),
datetime.datetime(2014, 2, 3, 0, 0),
datetime.datetime(2015, 12, 3, 0, 0)]}
>>> reverse_formats('{state:2}{zip:5}', ['PA19104', 'PA19143', 'MA02534'])
{'state': ['PA', 'PA', 'MA'], 'zip': ['19104', '19143', '02534']}
See also
--------
str.format : method that this reverses
reverse_format : method for reversing just one string using a pattern | [
"Reverse",
"the",
"string",
"method",
"format",
"for",
"a",
"list",
"of",
"strings",
"."
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/source/utils.py#L69-L131 |
243,249 | intake/intake | intake/source/utils.py | reverse_format | def reverse_format(format_string, resolved_string):
"""
Reverse the string method format.
Given format_string and resolved_string, find arguments that would
give ``format_string.format(**arguments) == resolved_string``
Parameters
----------
format_string : str
Format template string as used with str.format method
resolved_string : str
String with same pattern as format_string but with fields
filled out.
Returns
-------
args : dict
Dict of the form {field_name: value} such that
``format_string.(**args) == resolved_string``
Examples
--------
>>> reverse_format('data_{year}_{month}_{day}.csv', 'data_2014_01_03.csv')
{'year': '2014', 'month': '01', 'day': '03'}
>>> reverse_format('data_{year:d}_{month:d}_{day:d}.csv', 'data_2014_01_03.csv')
{'year': 2014, 'month': 1, 'day': 3}
>>> reverse_format('data_{date:%Y_%m_%d}.csv', 'data_2016_10_01.csv')
{'date': datetime.datetime(2016, 10, 1, 0, 0)}
>>> reverse_format('{state:2}{zip:5}', 'PA19104')
{'state': 'PA', 'zip': '19104'}
See also
--------
str.format : method that this reverses
reverse_formats : method for reversing a list of strings using one pattern
"""
from string import Formatter
from datetime import datetime
fmt = Formatter()
args = {}
# ensure that format_string is in posix format
format_string = make_path_posix(format_string)
# split the string into bits
literal_texts, field_names, format_specs, conversions = zip(*fmt.parse(format_string))
if not any(field_names):
return {}
for i, conversion in enumerate(conversions):
if conversion:
raise ValueError(('Conversion not allowed. Found on {}.'
.format(field_names[i])))
# ensure that resolved string is in posix format
resolved_string = make_path_posix(resolved_string)
# get a list of the parts that matter
bits = _get_parts_of_format_string(resolved_string, literal_texts, format_specs)
for i, (field_name, format_spec) in enumerate(zip(field_names, format_specs)):
if field_name:
try:
if format_spec.startswith('%'):
args[field_name] = datetime.strptime(bits[i], format_spec)
elif format_spec[-1] in list('bcdoxX'):
args[field_name] = int(bits[i])
elif format_spec[-1] in list('eEfFgGn'):
args[field_name] = float(bits[i])
elif format_spec[-1] == '%':
args[field_name] = float(bits[i][:-1])/100
else:
args[field_name] = fmt.format_field(bits[i], format_spec)
except:
args[field_name] = bits[i]
return args | python | def reverse_format(format_string, resolved_string):
from string import Formatter
from datetime import datetime
fmt = Formatter()
args = {}
# ensure that format_string is in posix format
format_string = make_path_posix(format_string)
# split the string into bits
literal_texts, field_names, format_specs, conversions = zip(*fmt.parse(format_string))
if not any(field_names):
return {}
for i, conversion in enumerate(conversions):
if conversion:
raise ValueError(('Conversion not allowed. Found on {}.'
.format(field_names[i])))
# ensure that resolved string is in posix format
resolved_string = make_path_posix(resolved_string)
# get a list of the parts that matter
bits = _get_parts_of_format_string(resolved_string, literal_texts, format_specs)
for i, (field_name, format_spec) in enumerate(zip(field_names, format_specs)):
if field_name:
try:
if format_spec.startswith('%'):
args[field_name] = datetime.strptime(bits[i], format_spec)
elif format_spec[-1] in list('bcdoxX'):
args[field_name] = int(bits[i])
elif format_spec[-1] in list('eEfFgGn'):
args[field_name] = float(bits[i])
elif format_spec[-1] == '%':
args[field_name] = float(bits[i][:-1])/100
else:
args[field_name] = fmt.format_field(bits[i], format_spec)
except:
args[field_name] = bits[i]
return args | [
"def",
"reverse_format",
"(",
"format_string",
",",
"resolved_string",
")",
":",
"from",
"string",
"import",
"Formatter",
"from",
"datetime",
"import",
"datetime",
"fmt",
"=",
"Formatter",
"(",
")",
"args",
"=",
"{",
"}",
"# ensure that format_string is in posix for... | Reverse the string method format.
Given format_string and resolved_string, find arguments that would
give ``format_string.format(**arguments) == resolved_string``
Parameters
----------
format_string : str
Format template string as used with str.format method
resolved_string : str
String with same pattern as format_string but with fields
filled out.
Returns
-------
args : dict
Dict of the form {field_name: value} such that
``format_string.(**args) == resolved_string``
Examples
--------
>>> reverse_format('data_{year}_{month}_{day}.csv', 'data_2014_01_03.csv')
{'year': '2014', 'month': '01', 'day': '03'}
>>> reverse_format('data_{year:d}_{month:d}_{day:d}.csv', 'data_2014_01_03.csv')
{'year': 2014, 'month': 1, 'day': 3}
>>> reverse_format('data_{date:%Y_%m_%d}.csv', 'data_2016_10_01.csv')
{'date': datetime.datetime(2016, 10, 1, 0, 0)}
>>> reverse_format('{state:2}{zip:5}', 'PA19104')
{'state': 'PA', 'zip': '19104'}
See also
--------
str.format : method that this reverses
reverse_formats : method for reversing a list of strings using one pattern | [
"Reverse",
"the",
"string",
"method",
"format",
"."
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/source/utils.py#L134-L213 |
243,250 | intake/intake | intake/source/utils.py | path_to_glob | def path_to_glob(path):
"""
Convert pattern style paths to glob style paths
Returns path if path is not str
Parameters
----------
path : str
Path to data optionally containing format_strings
Returns
-------
glob : str
Path with any format strings replaced with *
Examples
--------
>>> path_to_glob('{year}/{month}/{day}.csv')
'*/*/*.csv'
>>> path_to_glob('data/{year:4}{month:02}{day:02}.csv')
'data/*.csv'
>>> path_to_glob('data/*.csv')
'data/*.csv'
"""
from string import Formatter
fmt = Formatter()
if not isinstance(path, str):
return path
# Get just the real bits of the urlpath
literal_texts = [i[0] for i in fmt.parse(path)]
# Only use a star for first empty string in literal_texts
index_of_empty = [i for i, lt in enumerate(literal_texts) if lt == '' and i != 0]
glob = '*'.join([literal_texts[i] for i in range(len(literal_texts)) if i not in index_of_empty])
return glob | python | def path_to_glob(path):
from string import Formatter
fmt = Formatter()
if not isinstance(path, str):
return path
# Get just the real bits of the urlpath
literal_texts = [i[0] for i in fmt.parse(path)]
# Only use a star for first empty string in literal_texts
index_of_empty = [i for i, lt in enumerate(literal_texts) if lt == '' and i != 0]
glob = '*'.join([literal_texts[i] for i in range(len(literal_texts)) if i not in index_of_empty])
return glob | [
"def",
"path_to_glob",
"(",
"path",
")",
":",
"from",
"string",
"import",
"Formatter",
"fmt",
"=",
"Formatter",
"(",
")",
"if",
"not",
"isinstance",
"(",
"path",
",",
"str",
")",
":",
"return",
"path",
"# Get just the real bits of the urlpath",
"literal_texts",
... | Convert pattern style paths to glob style paths
Returns path if path is not str
Parameters
----------
path : str
Path to data optionally containing format_strings
Returns
-------
glob : str
Path with any format strings replaced with *
Examples
--------
>>> path_to_glob('{year}/{month}/{day}.csv')
'*/*/*.csv'
>>> path_to_glob('data/{year:4}{month:02}{day:02}.csv')
'data/*.csv'
>>> path_to_glob('data/*.csv')
'data/*.csv' | [
"Convert",
"pattern",
"style",
"paths",
"to",
"glob",
"style",
"paths"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/source/utils.py#L215-L255 |
243,251 | intake/intake | intake/source/utils.py | path_to_pattern | def path_to_pattern(path, metadata=None):
"""
Remove source information from path when using chaching
Returns None if path is not str
Parameters
----------
path : str
Path to data optionally containing format_strings
metadata : dict, optional
Extra arguments to the class, contains any cache information
Returns
-------
pattern : str
Pattern style path stripped of everything to the left of cache regex.
"""
if not isinstance(path, str):
return
pattern = path
if metadata:
cache = metadata.get('cache')
if cache:
regex = next(c.get('regex') for c in cache if c.get('argkey') == 'urlpath')
pattern = pattern.split(regex)[-1]
return pattern | python | def path_to_pattern(path, metadata=None):
if not isinstance(path, str):
return
pattern = path
if metadata:
cache = metadata.get('cache')
if cache:
regex = next(c.get('regex') for c in cache if c.get('argkey') == 'urlpath')
pattern = pattern.split(regex)[-1]
return pattern | [
"def",
"path_to_pattern",
"(",
"path",
",",
"metadata",
"=",
"None",
")",
":",
"if",
"not",
"isinstance",
"(",
"path",
",",
"str",
")",
":",
"return",
"pattern",
"=",
"path",
"if",
"metadata",
":",
"cache",
"=",
"metadata",
".",
"get",
"(",
"'cache'",
... | Remove source information from path when using chaching
Returns None if path is not str
Parameters
----------
path : str
Path to data optionally containing format_strings
metadata : dict, optional
Extra arguments to the class, contains any cache information
Returns
-------
pattern : str
Pattern style path stripped of everything to the left of cache regex. | [
"Remove",
"source",
"information",
"from",
"path",
"when",
"using",
"chaching"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/source/utils.py#L258-L285 |
243,252 | intake/intake | intake/container/base.py | get_partition | def get_partition(url, headers, source_id, container, partition):
"""Serializable function for fetching a data source partition
Parameters
----------
url: str
Server address
headers: dict
HTTP header parameters
source_id: str
ID of the source in the server's cache (unique per user)
container: str
Type of data, like "dataframe" one of ``intake.container.container_map``
partition: serializable
Part of data to fetch, e.g., an integer for a dataframe.
"""
accepted_formats = list(serializer.format_registry.keys())
accepted_compression = list(serializer.compression_registry.keys())
payload = dict(action='read',
source_id=source_id,
accepted_formats=accepted_formats,
accepted_compression=accepted_compression)
if partition is not None:
payload['partition'] = partition
try:
resp = requests.post(urljoin(url, '/v1/source'),
data=msgpack.packb(payload, use_bin_type=True),
**headers)
if resp.status_code != 200:
raise Exception('Error reading data')
msg = msgpack.unpackb(resp.content, **unpack_kwargs)
format = msg['format']
compression = msg['compression']
compressor = serializer.compression_registry[compression]
encoder = serializer.format_registry[format]
chunk = encoder.decode(compressor.decompress(msg['data']),
container)
return chunk
finally:
if resp is not None:
resp.close() | python | def get_partition(url, headers, source_id, container, partition):
accepted_formats = list(serializer.format_registry.keys())
accepted_compression = list(serializer.compression_registry.keys())
payload = dict(action='read',
source_id=source_id,
accepted_formats=accepted_formats,
accepted_compression=accepted_compression)
if partition is not None:
payload['partition'] = partition
try:
resp = requests.post(urljoin(url, '/v1/source'),
data=msgpack.packb(payload, use_bin_type=True),
**headers)
if resp.status_code != 200:
raise Exception('Error reading data')
msg = msgpack.unpackb(resp.content, **unpack_kwargs)
format = msg['format']
compression = msg['compression']
compressor = serializer.compression_registry[compression]
encoder = serializer.format_registry[format]
chunk = encoder.decode(compressor.decompress(msg['data']),
container)
return chunk
finally:
if resp is not None:
resp.close() | [
"def",
"get_partition",
"(",
"url",
",",
"headers",
",",
"source_id",
",",
"container",
",",
"partition",
")",
":",
"accepted_formats",
"=",
"list",
"(",
"serializer",
".",
"format_registry",
".",
"keys",
"(",
")",
")",
"accepted_compression",
"=",
"list",
"... | Serializable function for fetching a data source partition
Parameters
----------
url: str
Server address
headers: dict
HTTP header parameters
source_id: str
ID of the source in the server's cache (unique per user)
container: str
Type of data, like "dataframe" one of ``intake.container.container_map``
partition: serializable
Part of data to fetch, e.g., an integer for a dataframe. | [
"Serializable",
"function",
"for",
"fetching",
"a",
"data",
"source",
"partition"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/container/base.py#L81-L124 |
243,253 | intake/intake | intake/catalog/utils.py | flatten | def flatten(iterable):
"""Flatten an arbitrarily deep list"""
# likely not used
iterable = iter(iterable)
while True:
try:
item = next(iterable)
except StopIteration:
break
if isinstance(item, six.string_types):
yield item
continue
try:
data = iter(item)
iterable = itertools.chain(data, iterable)
except:
yield item | python | def flatten(iterable):
# likely not used
iterable = iter(iterable)
while True:
try:
item = next(iterable)
except StopIteration:
break
if isinstance(item, six.string_types):
yield item
continue
try:
data = iter(item)
iterable = itertools.chain(data, iterable)
except:
yield item | [
"def",
"flatten",
"(",
"iterable",
")",
":",
"# likely not used",
"iterable",
"=",
"iter",
"(",
"iterable",
")",
"while",
"True",
":",
"try",
":",
"item",
"=",
"next",
"(",
"iterable",
")",
"except",
"StopIteration",
":",
"break",
"if",
"isinstance",
"(",
... | Flatten an arbitrarily deep list | [
"Flatten",
"an",
"arbitrarily",
"deep",
"list"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/catalog/utils.py#L20-L38 |
243,254 | intake/intake | intake/catalog/utils.py | clamp | def clamp(value, lower=0, upper=sys.maxsize):
"""Clamp float between given range"""
return max(lower, min(upper, value)) | python | def clamp(value, lower=0, upper=sys.maxsize):
return max(lower, min(upper, value)) | [
"def",
"clamp",
"(",
"value",
",",
"lower",
"=",
"0",
",",
"upper",
"=",
"sys",
".",
"maxsize",
")",
":",
"return",
"max",
"(",
"lower",
",",
"min",
"(",
"upper",
",",
"value",
")",
")"
] | Clamp float between given range | [
"Clamp",
"float",
"between",
"given",
"range"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/catalog/utils.py#L50-L52 |
243,255 | intake/intake | intake/catalog/utils.py | expand_templates | def expand_templates(pars, context, return_left=False, client=False,
getenv=True, getshell=True):
"""
Render variables in context into the set of parameters with jinja2.
For variables that are not strings, nothing happens.
Parameters
----------
pars: dict
values are strings containing some jinja2 controls
context: dict
values to use while rendering
return_left: bool
whether to return the set of variables in context that were not used
in rendering parameters
Returns
-------
dict with the same keys as ``pars``, but updated values; optionally also
return set of unused parameter names.
"""
all_vars = set(context)
out = _expand(pars, context, all_vars, client, getenv, getshell)
if return_left:
return out, all_vars
return out | python | def expand_templates(pars, context, return_left=False, client=False,
getenv=True, getshell=True):
all_vars = set(context)
out = _expand(pars, context, all_vars, client, getenv, getshell)
if return_left:
return out, all_vars
return out | [
"def",
"expand_templates",
"(",
"pars",
",",
"context",
",",
"return_left",
"=",
"False",
",",
"client",
"=",
"False",
",",
"getenv",
"=",
"True",
",",
"getshell",
"=",
"True",
")",
":",
"all_vars",
"=",
"set",
"(",
"context",
")",
"out",
"=",
"_expand... | Render variables in context into the set of parameters with jinja2.
For variables that are not strings, nothing happens.
Parameters
----------
pars: dict
values are strings containing some jinja2 controls
context: dict
values to use while rendering
return_left: bool
whether to return the set of variables in context that were not used
in rendering parameters
Returns
-------
dict with the same keys as ``pars``, but updated values; optionally also
return set of unused parameter names. | [
"Render",
"variables",
"in",
"context",
"into",
"the",
"set",
"of",
"parameters",
"with",
"jinja2",
"."
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/catalog/utils.py#L109-L135 |
243,256 | intake/intake | intake/catalog/utils.py | merge_pars | def merge_pars(params, user_inputs, spec_pars, client=False, getenv=True,
getshell=True):
"""Produce open arguments by merging various inputs
This function is called in the context of a catalog entry, when finalising
the arguments for instantiating the corresponding data source.
The three sets of inputs to be considered are:
- the arguments section of the original spec (params)
- UserParameters associated with the entry (spec_pars)
- explicit arguments provided at instantiation time, like entry(arg=value)
(user_inputs)
Both spec_pars and user_inputs can be considered as template variables and
used in expanding string values in params.
The default value of a spec_par, if given, may have embedded env and shell
functions, which will be evaluated before use, if the default is used and
the corresponding getenv/getsgell are set. Similarly, string value params
will also have access to these functions within jinja template groups,
as well as full jinja processing.
Where a key exists in both the spec_pars and the user_inputs, the
user_input wins. Where user_inputs contains keys not seen elsewhere, they
are regarded as extra kwargs to pass to the data source.
Where spec pars have the same name as keys in params, their type, max/min
and allowed fields are used to validate the final values of the
corresponding arguments.
Parameters
----------
params : dict
From the entry's original spec
user_inputs : dict
Provided by the user/calling function
spec_pars : list of UserParameters
Default and validation instances
client : bool
Whether this is all running on a client to a remote server - sets
which of the env/shell functions are in operation.
getenv : bool
Whether to allow pulling environment variables. If False, the
template blocks will pass through unevaluated
getshell : bool
Whether or not to allow executing of shell commands. If False, the
template blocks will pass through unevaluated
Returns
-------
Final parameter dict
"""
context = params.copy()
for par in spec_pars:
val = user_inputs.get(par.name, par.default)
if val is not None:
if isinstance(val, six.string_types):
val = expand_defaults(val, getenv=getenv, getshell=getshell,
client=client)
context[par.name] = par.validate(val)
context.update({k: v for k, v in user_inputs.items() if k not in context})
out, left = expand_templates(params, context, True, client, getenv,
getshell)
context = {k: v for k, v in context.items() if k in left}
for par in spec_pars:
if par.name in context:
# coerces to type
context[par.name] = par.validate(context[par.name])
left.remove(par.name)
params.update(out)
user_inputs = expand_templates(user_inputs, context, False, client, getenv,
getshell)
params.update({k: v for k, v in user_inputs.items() if k in left})
params.pop('CATALOG_DIR')
for k, v in params.copy().items():
# final validation/coersion
for sp in [p for p in spec_pars if p.name == k]:
params[k] = sp.validate(params[k])
return params | python | def merge_pars(params, user_inputs, spec_pars, client=False, getenv=True,
getshell=True):
context = params.copy()
for par in spec_pars:
val = user_inputs.get(par.name, par.default)
if val is not None:
if isinstance(val, six.string_types):
val = expand_defaults(val, getenv=getenv, getshell=getshell,
client=client)
context[par.name] = par.validate(val)
context.update({k: v for k, v in user_inputs.items() if k not in context})
out, left = expand_templates(params, context, True, client, getenv,
getshell)
context = {k: v for k, v in context.items() if k in left}
for par in spec_pars:
if par.name in context:
# coerces to type
context[par.name] = par.validate(context[par.name])
left.remove(par.name)
params.update(out)
user_inputs = expand_templates(user_inputs, context, False, client, getenv,
getshell)
params.update({k: v for k, v in user_inputs.items() if k in left})
params.pop('CATALOG_DIR')
for k, v in params.copy().items():
# final validation/coersion
for sp in [p for p in spec_pars if p.name == k]:
params[k] = sp.validate(params[k])
return params | [
"def",
"merge_pars",
"(",
"params",
",",
"user_inputs",
",",
"spec_pars",
",",
"client",
"=",
"False",
",",
"getenv",
"=",
"True",
",",
"getshell",
"=",
"True",
")",
":",
"context",
"=",
"params",
".",
"copy",
"(",
")",
"for",
"par",
"in",
"spec_pars",... | Produce open arguments by merging various inputs
This function is called in the context of a catalog entry, when finalising
the arguments for instantiating the corresponding data source.
The three sets of inputs to be considered are:
- the arguments section of the original spec (params)
- UserParameters associated with the entry (spec_pars)
- explicit arguments provided at instantiation time, like entry(arg=value)
(user_inputs)
Both spec_pars and user_inputs can be considered as template variables and
used in expanding string values in params.
The default value of a spec_par, if given, may have embedded env and shell
functions, which will be evaluated before use, if the default is used and
the corresponding getenv/getsgell are set. Similarly, string value params
will also have access to these functions within jinja template groups,
as well as full jinja processing.
Where a key exists in both the spec_pars and the user_inputs, the
user_input wins. Where user_inputs contains keys not seen elsewhere, they
are regarded as extra kwargs to pass to the data source.
Where spec pars have the same name as keys in params, their type, max/min
and allowed fields are used to validate the final values of the
corresponding arguments.
Parameters
----------
params : dict
From the entry's original spec
user_inputs : dict
Provided by the user/calling function
spec_pars : list of UserParameters
Default and validation instances
client : bool
Whether this is all running on a client to a remote server - sets
which of the env/shell functions are in operation.
getenv : bool
Whether to allow pulling environment variables. If False, the
template blocks will pass through unevaluated
getshell : bool
Whether or not to allow executing of shell commands. If False, the
template blocks will pass through unevaluated
Returns
-------
Final parameter dict | [
"Produce",
"open",
"arguments",
"by",
"merging",
"various",
"inputs"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/catalog/utils.py#L177-L257 |
243,257 | intake/intake | intake/catalog/utils.py | coerce | def coerce(dtype, value):
"""
Convert a value to a specific type.
If the value is already the given type, then the original value is
returned. If the value is None, then the default value given by the
type constructor is returned. Otherwise, the type constructor converts
and returns the value.
"""
if dtype is None:
return value
if type(value).__name__ == dtype:
return value
op = COERCION_RULES[dtype]
return op() if value is None else op(value) | python | def coerce(dtype, value):
if dtype is None:
return value
if type(value).__name__ == dtype:
return value
op = COERCION_RULES[dtype]
return op() if value is None else op(value) | [
"def",
"coerce",
"(",
"dtype",
",",
"value",
")",
":",
"if",
"dtype",
"is",
"None",
":",
"return",
"value",
"if",
"type",
"(",
"value",
")",
".",
"__name__",
"==",
"dtype",
":",
"return",
"value",
"op",
"=",
"COERCION_RULES",
"[",
"dtype",
"]",
"retu... | Convert a value to a specific type.
If the value is already the given type, then the original value is
returned. If the value is None, then the default value given by the
type constructor is returned. Otherwise, the type constructor converts
and returns the value. | [
"Convert",
"a",
"value",
"to",
"a",
"specific",
"type",
"."
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/catalog/utils.py#L276-L290 |
243,258 | intake/intake | intake/catalog/remote.py | open_remote | def open_remote(url, entry, container, user_parameters, description, http_args,
page_size=None, auth=None, getenv=None, getshell=None):
"""Create either local direct data source or remote streamed source"""
from intake.container import container_map
if url.startswith('intake://'):
url = url[len('intake://'):]
payload = dict(action='open',
name=entry,
parameters=user_parameters,
available_plugins=list(plugin_registry.keys()))
req = requests.post(urljoin(url, '/v1/source'),
data=msgpack.packb(payload, use_bin_type=True),
**http_args)
if req.ok:
response = msgpack.unpackb(req.content, **unpack_kwargs)
if 'plugin' in response:
pl = response['plugin']
pl = [pl] if isinstance(pl, str) else pl
# Direct access
for p in pl:
if p in plugin_registry:
source = plugin_registry[p](**response['args'])
proxy = False
break
else:
proxy = True
else:
proxy = True
if proxy:
response.pop('container')
response.update({'name': entry, 'parameters': user_parameters})
if container == 'catalog':
response.update({'auth': auth,
'getenv': getenv,
'getshell': getshell,
'page_size': page_size
# TODO ttl?
# TODO storage_options?
})
source = container_map[container](url, http_args, **response)
source.description = description
return source
else:
raise Exception('Server error: %d, %s' % (req.status_code, req.reason)) | python | def open_remote(url, entry, container, user_parameters, description, http_args,
page_size=None, auth=None, getenv=None, getshell=None):
from intake.container import container_map
if url.startswith('intake://'):
url = url[len('intake://'):]
payload = dict(action='open',
name=entry,
parameters=user_parameters,
available_plugins=list(plugin_registry.keys()))
req = requests.post(urljoin(url, '/v1/source'),
data=msgpack.packb(payload, use_bin_type=True),
**http_args)
if req.ok:
response = msgpack.unpackb(req.content, **unpack_kwargs)
if 'plugin' in response:
pl = response['plugin']
pl = [pl] if isinstance(pl, str) else pl
# Direct access
for p in pl:
if p in plugin_registry:
source = plugin_registry[p](**response['args'])
proxy = False
break
else:
proxy = True
else:
proxy = True
if proxy:
response.pop('container')
response.update({'name': entry, 'parameters': user_parameters})
if container == 'catalog':
response.update({'auth': auth,
'getenv': getenv,
'getshell': getshell,
'page_size': page_size
# TODO ttl?
# TODO storage_options?
})
source = container_map[container](url, http_args, **response)
source.description = description
return source
else:
raise Exception('Server error: %d, %s' % (req.status_code, req.reason)) | [
"def",
"open_remote",
"(",
"url",
",",
"entry",
",",
"container",
",",
"user_parameters",
",",
"description",
",",
"http_args",
",",
"page_size",
"=",
"None",
",",
"auth",
"=",
"None",
",",
"getenv",
"=",
"None",
",",
"getshell",
"=",
"None",
")",
":",
... | Create either local direct data source or remote streamed source | [
"Create",
"either",
"local",
"direct",
"data",
"source",
"or",
"remote",
"streamed",
"source"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/catalog/remote.py#L92-L137 |
243,259 | intake/intake | intake/container/semistructured.py | RemoteSequenceSource._persist | def _persist(source, path, encoder=None):
"""Save list to files using encoding
encoder : None or one of str|json|pickle
None is equivalent to str
"""
import posixpath
from dask.bytes import open_files
import dask
import pickle
import json
from intake.source.textfiles import TextFilesSource
encoder = {None: str, 'str': str, 'json': json.dumps,
'pickle': pickle.dumps}[encoder]
try:
b = source.to_dask()
except NotImplementedError:
import dask.bag as db
b = db.from_sequence(source.read(), npartitions=1)
files = open_files(posixpath.join(path, 'part.*'), mode='wt',
num=b.npartitions)
dwrite = dask.delayed(write_file)
out = [dwrite(part, f, encoder)
for part, f in zip(b.to_delayed(), files)]
dask.compute(out)
s = TextFilesSource(posixpath.join(path, 'part.*'))
return s | python | def _persist(source, path, encoder=None):
import posixpath
from dask.bytes import open_files
import dask
import pickle
import json
from intake.source.textfiles import TextFilesSource
encoder = {None: str, 'str': str, 'json': json.dumps,
'pickle': pickle.dumps}[encoder]
try:
b = source.to_dask()
except NotImplementedError:
import dask.bag as db
b = db.from_sequence(source.read(), npartitions=1)
files = open_files(posixpath.join(path, 'part.*'), mode='wt',
num=b.npartitions)
dwrite = dask.delayed(write_file)
out = [dwrite(part, f, encoder)
for part, f in zip(b.to_delayed(), files)]
dask.compute(out)
s = TextFilesSource(posixpath.join(path, 'part.*'))
return s | [
"def",
"_persist",
"(",
"source",
",",
"path",
",",
"encoder",
"=",
"None",
")",
":",
"import",
"posixpath",
"from",
"dask",
".",
"bytes",
"import",
"open_files",
"import",
"dask",
"import",
"pickle",
"import",
"json",
"from",
"intake",
".",
"source",
".",... | Save list to files using encoding
encoder : None or one of str|json|pickle
None is equivalent to str | [
"Save",
"list",
"to",
"files",
"using",
"encoding"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/container/semistructured.py#L55-L81 |
243,260 | intake/intake | intake/catalog/entry.py | CatalogEntry._ipython_display_ | def _ipython_display_(self):
"""Display the entry as a rich object in an IPython session."""
contents = self.describe()
display({ # noqa: F821
'application/json': contents,
'text/plain': pretty_describe(contents)
}, metadata={
'application/json': {'root': contents["name"]}
}, raw=True) | python | def _ipython_display_(self):
contents = self.describe()
display({ # noqa: F821
'application/json': contents,
'text/plain': pretty_describe(contents)
}, metadata={
'application/json': {'root': contents["name"]}
}, raw=True) | [
"def",
"_ipython_display_",
"(",
"self",
")",
":",
"contents",
"=",
"self",
".",
"describe",
"(",
")",
"display",
"(",
"{",
"# noqa: F821",
"'application/json'",
":",
"contents",
",",
"'text/plain'",
":",
"pretty_describe",
"(",
"contents",
")",
"}",
",",
"m... | Display the entry as a rich object in an IPython session. | [
"Display",
"the",
"entry",
"as",
"a",
"rich",
"object",
"in",
"an",
"IPython",
"session",
"."
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/catalog/entry.py#L103-L111 |
243,261 | intake/intake | intake/source/discovery.py | autodiscover | def autodiscover(path=None, plugin_prefix='intake_'):
"""Scan for Intake plugin packages and return a dict of plugins.
This function searches path (or sys.path) for packages with names that
start with plugin_prefix. Those modules will be imported and scanned for
subclasses of intake.source.base.Plugin. Any subclasses found will be
instantiated and returned in a dictionary, with the plugin's name attribute
as the key.
"""
plugins = {}
for importer, name, ispkg in pkgutil.iter_modules(path=path):
if name.startswith(plugin_prefix):
t = time.time()
new_plugins = load_plugins_from_module(name)
for plugin_name, plugin in new_plugins.items():
if plugin_name in plugins:
orig_path = inspect.getfile(plugins[plugin_name])
new_path = inspect.getfile(plugin)
warnings.warn('Plugin name collision for "%s" from'
'\n %s'
'\nand'
'\n %s'
'\nKeeping plugin from first location.'
% (plugin_name, orig_path, new_path))
else:
plugins[plugin_name] = plugin
logger.debug("Import %s took: %7.2f s" % (name, time.time() - t))
return plugins | python | def autodiscover(path=None, plugin_prefix='intake_'):
plugins = {}
for importer, name, ispkg in pkgutil.iter_modules(path=path):
if name.startswith(plugin_prefix):
t = time.time()
new_plugins = load_plugins_from_module(name)
for plugin_name, plugin in new_plugins.items():
if plugin_name in plugins:
orig_path = inspect.getfile(plugins[plugin_name])
new_path = inspect.getfile(plugin)
warnings.warn('Plugin name collision for "%s" from'
'\n %s'
'\nand'
'\n %s'
'\nKeeping plugin from first location.'
% (plugin_name, orig_path, new_path))
else:
plugins[plugin_name] = plugin
logger.debug("Import %s took: %7.2f s" % (name, time.time() - t))
return plugins | [
"def",
"autodiscover",
"(",
"path",
"=",
"None",
",",
"plugin_prefix",
"=",
"'intake_'",
")",
":",
"plugins",
"=",
"{",
"}",
"for",
"importer",
",",
"name",
",",
"ispkg",
"in",
"pkgutil",
".",
"iter_modules",
"(",
"path",
"=",
"path",
")",
":",
"if",
... | Scan for Intake plugin packages and return a dict of plugins.
This function searches path (or sys.path) for packages with names that
start with plugin_prefix. Those modules will be imported and scanned for
subclasses of intake.source.base.Plugin. Any subclasses found will be
instantiated and returned in a dictionary, with the plugin's name attribute
as the key. | [
"Scan",
"for",
"Intake",
"plugin",
"packages",
"and",
"return",
"a",
"dict",
"of",
"plugins",
"."
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/source/discovery.py#L20-L51 |
243,262 | intake/intake | intake/source/discovery.py | load_plugins_from_module | def load_plugins_from_module(module_name):
"""Imports a module and returns dictionary of discovered Intake plugins.
Plugin classes are instantiated and added to the dictionary, keyed by the
name attribute of the plugin object.
"""
plugins = {}
try:
if module_name.endswith('.py'):
import imp
mod = imp.load_source('module.name', module_name)
else:
mod = importlib.import_module(module_name)
except Exception as e:
logger.debug("Import module <{}> failed: {}".format(module_name, e))
return {}
for _, cls in inspect.getmembers(mod, inspect.isclass):
# Don't try to register plugins imported into this module elsewhere
if issubclass(cls, (Catalog, DataSource)):
plugins[cls.name] = cls
return plugins | python | def load_plugins_from_module(module_name):
plugins = {}
try:
if module_name.endswith('.py'):
import imp
mod = imp.load_source('module.name', module_name)
else:
mod = importlib.import_module(module_name)
except Exception as e:
logger.debug("Import module <{}> failed: {}".format(module_name, e))
return {}
for _, cls in inspect.getmembers(mod, inspect.isclass):
# Don't try to register plugins imported into this module elsewhere
if issubclass(cls, (Catalog, DataSource)):
plugins[cls.name] = cls
return plugins | [
"def",
"load_plugins_from_module",
"(",
"module_name",
")",
":",
"plugins",
"=",
"{",
"}",
"try",
":",
"if",
"module_name",
".",
"endswith",
"(",
"'.py'",
")",
":",
"import",
"imp",
"mod",
"=",
"imp",
".",
"load_source",
"(",
"'module.name'",
",",
"module_... | Imports a module and returns dictionary of discovered Intake plugins.
Plugin classes are instantiated and added to the dictionary, keyed by the
name attribute of the plugin object. | [
"Imports",
"a",
"module",
"and",
"returns",
"dictionary",
"of",
"discovered",
"Intake",
"plugins",
"."
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/source/discovery.py#L54-L76 |
243,263 | intake/intake | intake/source/csv.py | CSVSource._set_pattern_columns | def _set_pattern_columns(self, path_column):
"""Get a column of values for each field in pattern
"""
try:
# CategoricalDtype allows specifying known categories when
# creating objects. It was added in pandas 0.21.0.
from pandas.api.types import CategoricalDtype
_HAS_CDT = True
except ImportError:
_HAS_CDT = False
col = self._dataframe[path_column]
paths = col.cat.categories
column_by_field = {field:
col.cat.codes.map(dict(enumerate(values))).astype(
"category" if not _HAS_CDT else CategoricalDtype(set(values))
) for field, values in reverse_formats(self.pattern, paths).items()
}
self._dataframe = self._dataframe.assign(**column_by_field) | python | def _set_pattern_columns(self, path_column):
try:
# CategoricalDtype allows specifying known categories when
# creating objects. It was added in pandas 0.21.0.
from pandas.api.types import CategoricalDtype
_HAS_CDT = True
except ImportError:
_HAS_CDT = False
col = self._dataframe[path_column]
paths = col.cat.categories
column_by_field = {field:
col.cat.codes.map(dict(enumerate(values))).astype(
"category" if not _HAS_CDT else CategoricalDtype(set(values))
) for field, values in reverse_formats(self.pattern, paths).items()
}
self._dataframe = self._dataframe.assign(**column_by_field) | [
"def",
"_set_pattern_columns",
"(",
"self",
",",
"path_column",
")",
":",
"try",
":",
"# CategoricalDtype allows specifying known categories when",
"# creating objects. It was added in pandas 0.21.0.",
"from",
"pandas",
".",
"api",
".",
"types",
"import",
"CategoricalDtype",
... | Get a column of values for each field in pattern | [
"Get",
"a",
"column",
"of",
"values",
"for",
"each",
"field",
"in",
"pattern"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/source/csv.py#L58-L77 |
243,264 | intake/intake | intake/source/csv.py | CSVSource._path_column | def _path_column(self):
"""Set ``include_path_column`` in csv_kwargs and returns path column name
"""
path_column = self._csv_kwargs.get('include_path_column')
if path_column is None:
# if path column name is not set by user, set to a unique string to
# avoid conflicts
path_column = unique_string()
self._csv_kwargs['include_path_column'] = path_column
elif isinstance(path_column, bool):
path_column = 'path'
self._csv_kwargs['include_path_column'] = path_column
return path_column | python | def _path_column(self):
path_column = self._csv_kwargs.get('include_path_column')
if path_column is None:
# if path column name is not set by user, set to a unique string to
# avoid conflicts
path_column = unique_string()
self._csv_kwargs['include_path_column'] = path_column
elif isinstance(path_column, bool):
path_column = 'path'
self._csv_kwargs['include_path_column'] = path_column
return path_column | [
"def",
"_path_column",
"(",
"self",
")",
":",
"path_column",
"=",
"self",
".",
"_csv_kwargs",
".",
"get",
"(",
"'include_path_column'",
")",
"if",
"path_column",
"is",
"None",
":",
"# if path column name is not set by user, set to a unique string to",
"# avoid conflicts",... | Set ``include_path_column`` in csv_kwargs and returns path column name | [
"Set",
"include_path_column",
"in",
"csv_kwargs",
"and",
"returns",
"path",
"column",
"name"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/source/csv.py#L79-L92 |
243,265 | intake/intake | intake/source/csv.py | CSVSource._open_dataset | def _open_dataset(self, urlpath):
"""Open dataset using dask and use pattern fields to set new columns
"""
import dask.dataframe
if self.pattern is None:
self._dataframe = dask.dataframe.read_csv(
urlpath, storage_options=self._storage_options,
**self._csv_kwargs)
return
if not (DASK_VERSION >= '0.19.0'):
raise ValueError("Your version of dask is '{}'. "
"The ability to include filenames in read_csv output "
"(``include_path_column``) was added in 0.19.0, so "
"pattern urlpaths are not supported.".format(DASK_VERSION))
drop_path_column = 'include_path_column' not in self._csv_kwargs
path_column = self._path_column()
self._dataframe = dask.dataframe.read_csv(
urlpath, storage_options=self._storage_options, **self._csv_kwargs)
# add the new columns to the dataframe
self._set_pattern_columns(path_column)
if drop_path_column:
self._dataframe = self._dataframe.drop([path_column], axis=1) | python | def _open_dataset(self, urlpath):
import dask.dataframe
if self.pattern is None:
self._dataframe = dask.dataframe.read_csv(
urlpath, storage_options=self._storage_options,
**self._csv_kwargs)
return
if not (DASK_VERSION >= '0.19.0'):
raise ValueError("Your version of dask is '{}'. "
"The ability to include filenames in read_csv output "
"(``include_path_column``) was added in 0.19.0, so "
"pattern urlpaths are not supported.".format(DASK_VERSION))
drop_path_column = 'include_path_column' not in self._csv_kwargs
path_column = self._path_column()
self._dataframe = dask.dataframe.read_csv(
urlpath, storage_options=self._storage_options, **self._csv_kwargs)
# add the new columns to the dataframe
self._set_pattern_columns(path_column)
if drop_path_column:
self._dataframe = self._dataframe.drop([path_column], axis=1) | [
"def",
"_open_dataset",
"(",
"self",
",",
"urlpath",
")",
":",
"import",
"dask",
".",
"dataframe",
"if",
"self",
".",
"pattern",
"is",
"None",
":",
"self",
".",
"_dataframe",
"=",
"dask",
".",
"dataframe",
".",
"read_csv",
"(",
"urlpath",
",",
"storage_o... | Open dataset using dask and use pattern fields to set new columns | [
"Open",
"dataset",
"using",
"dask",
"and",
"use",
"pattern",
"fields",
"to",
"set",
"new",
"columns"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/source/csv.py#L94-L121 |
243,266 | intake/intake | intake/gui/catalog/search.py | Search.do_search | def do_search(self, arg=None):
"""Do search and close panel"""
new_cats = []
for cat in self.cats:
new_cat = cat.search(self.inputs.text,
depth=self.inputs.depth)
if len(list(new_cat)) > 0:
new_cats.append(new_cat)
if len(new_cats) > 0:
self.done_callback(new_cats)
self.visible = False | python | def do_search(self, arg=None):
new_cats = []
for cat in self.cats:
new_cat = cat.search(self.inputs.text,
depth=self.inputs.depth)
if len(list(new_cat)) > 0:
new_cats.append(new_cat)
if len(new_cats) > 0:
self.done_callback(new_cats)
self.visible = False | [
"def",
"do_search",
"(",
"self",
",",
"arg",
"=",
"None",
")",
":",
"new_cats",
"=",
"[",
"]",
"for",
"cat",
"in",
"self",
".",
"cats",
":",
"new_cat",
"=",
"cat",
".",
"search",
"(",
"self",
".",
"inputs",
".",
"text",
",",
"depth",
"=",
"self",... | Do search and close panel | [
"Do",
"search",
"and",
"close",
"panel"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/gui/catalog/search.py#L126-L136 |
243,267 | intake/intake | intake/container/ndarray.py | RemoteArray._persist | def _persist(source, path, component=None, storage_options=None,
**kwargs):
"""Save array to local persistent store
Makes a parquet dataset out of the data using zarr.
This then becomes a data entry in the persisted datasets catalog.
Only works locally for the moment.
Parameters
----------
source: a DataSource instance to save
name: str or None
Key to refer to this persisted dataset by. If not given, will
attempt to get from the source's name
kwargs: passed on to zarr array creation, see
"""
from dask.array import to_zarr, from_array
from ..source.zarr import ZarrArraySource
try:
arr = source.to_dask()
except NotImplementedError:
arr = from_array(source.read(), chunks=-1).rechunk('auto')
to_zarr(arr, path, component=None,
storage_options=storage_options, **kwargs)
source = ZarrArraySource(path, storage_options, component)
return source | python | def _persist(source, path, component=None, storage_options=None,
**kwargs):
from dask.array import to_zarr, from_array
from ..source.zarr import ZarrArraySource
try:
arr = source.to_dask()
except NotImplementedError:
arr = from_array(source.read(), chunks=-1).rechunk('auto')
to_zarr(arr, path, component=None,
storage_options=storage_options, **kwargs)
source = ZarrArraySource(path, storage_options, component)
return source | [
"def",
"_persist",
"(",
"source",
",",
"path",
",",
"component",
"=",
"None",
",",
"storage_options",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"from",
"dask",
".",
"array",
"import",
"to_zarr",
",",
"from_array",
"from",
".",
".",
"source",
".",... | Save array to local persistent store
Makes a parquet dataset out of the data using zarr.
This then becomes a data entry in the persisted datasets catalog.
Only works locally for the moment.
Parameters
----------
source: a DataSource instance to save
name: str or None
Key to refer to this persisted dataset by. If not given, will
attempt to get from the source's name
kwargs: passed on to zarr array creation, see | [
"Save",
"array",
"to",
"local",
"persistent",
"store"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/container/ndarray.py#L68-L94 |
243,268 | intake/intake | intake/gui/source/defined_plots.py | DefinedPlots.source | def source(self, source):
"""When the source gets updated, update the the options in the selector"""
BaseView.source.fset(self, source)
if self.select:
self.select.options = self.options | python | def source(self, source):
BaseView.source.fset(self, source)
if self.select:
self.select.options = self.options | [
"def",
"source",
"(",
"self",
",",
"source",
")",
":",
"BaseView",
".",
"source",
".",
"fset",
"(",
"self",
",",
"source",
")",
"if",
"self",
".",
"select",
":",
"self",
".",
"select",
".",
"options",
"=",
"self",
".",
"options"
] | When the source gets updated, update the the options in the selector | [
"When",
"the",
"source",
"gets",
"updated",
"update",
"the",
"the",
"options",
"in",
"the",
"selector"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/gui/source/defined_plots.py#L87-L91 |
243,269 | intake/intake | intake/source/textfiles.py | get_file | def get_file(f, decoder, read):
"""Serializable function to take an OpenFile object and read lines"""
with f as f:
if decoder is None:
return list(f)
else:
d = f.read() if read else f
out = decoder(d)
if isinstance(out, (tuple, list)):
return out
else:
return [out] | python | def get_file(f, decoder, read):
with f as f:
if decoder is None:
return list(f)
else:
d = f.read() if read else f
out = decoder(d)
if isinstance(out, (tuple, list)):
return out
else:
return [out] | [
"def",
"get_file",
"(",
"f",
",",
"decoder",
",",
"read",
")",
":",
"with",
"f",
"as",
"f",
":",
"if",
"decoder",
"is",
"None",
":",
"return",
"list",
"(",
"f",
")",
"else",
":",
"d",
"=",
"f",
".",
"read",
"(",
")",
"if",
"read",
"else",
"f"... | Serializable function to take an OpenFile object and read lines | [
"Serializable",
"function",
"to",
"take",
"an",
"OpenFile",
"object",
"and",
"read",
"lines"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/source/textfiles.py#L113-L124 |
243,270 | intake/intake | intake/auth/base.py | BaseAuth.get_case_insensitive | def get_case_insensitive(self, dictionary, key, default=None):
"""Case-insensitive search of a dictionary for key.
Returns the value if key match is found, otherwise default.
"""
lower_key = key.lower()
for k, v in dictionary.items():
if lower_key == k.lower():
return v
else:
return default | python | def get_case_insensitive(self, dictionary, key, default=None):
lower_key = key.lower()
for k, v in dictionary.items():
if lower_key == k.lower():
return v
else:
return default | [
"def",
"get_case_insensitive",
"(",
"self",
",",
"dictionary",
",",
"key",
",",
"default",
"=",
"None",
")",
":",
"lower_key",
"=",
"key",
".",
"lower",
"(",
")",
"for",
"k",
",",
"v",
"in",
"dictionary",
".",
"items",
"(",
")",
":",
"if",
"lower_key... | Case-insensitive search of a dictionary for key.
Returns the value if key match is found, otherwise default. | [
"Case",
"-",
"insensitive",
"search",
"of",
"a",
"dictionary",
"for",
"key",
"."
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/auth/base.py#L46-L56 |
243,271 | intake/intake | intake/gui/catalog/add.py | FileSelector.url | def url(self):
"""Path to local catalog file"""
return os.path.join(self.path, self.main.value[0]) | python | def url(self):
return os.path.join(self.path, self.main.value[0]) | [
"def",
"url",
"(",
"self",
")",
":",
"return",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"path",
",",
"self",
".",
"main",
".",
"value",
"[",
"0",
"]",
")"
] | Path to local catalog file | [
"Path",
"to",
"local",
"catalog",
"file"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/gui/catalog/add.py#L81-L83 |
243,272 | intake/intake | intake/gui/catalog/add.py | FileSelector.validate | def validate(self, arg=None):
"""Check that inputted path is valid - set validator accordingly"""
if os.path.isdir(self.path):
self.validator.object = None
else:
self.validator.object = ICONS['error'] | python | def validate(self, arg=None):
if os.path.isdir(self.path):
self.validator.object = None
else:
self.validator.object = ICONS['error'] | [
"def",
"validate",
"(",
"self",
",",
"arg",
"=",
"None",
")",
":",
"if",
"os",
".",
"path",
".",
"isdir",
"(",
"self",
".",
"path",
")",
":",
"self",
".",
"validator",
".",
"object",
"=",
"None",
"else",
":",
"self",
".",
"validator",
".",
"objec... | Check that inputted path is valid - set validator accordingly | [
"Check",
"that",
"inputted",
"path",
"is",
"valid",
"-",
"set",
"validator",
"accordingly"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/gui/catalog/add.py#L91-L96 |
243,273 | intake/intake | intake/gui/catalog/add.py | CatAdder.add_cat | def add_cat(self, arg=None):
"""Add cat and close panel"""
try:
self.done_callback(self.cat)
self.visible = False
except Exception as e:
self.validator.object = ICONS['error']
raise e | python | def add_cat(self, arg=None):
try:
self.done_callback(self.cat)
self.visible = False
except Exception as e:
self.validator.object = ICONS['error']
raise e | [
"def",
"add_cat",
"(",
"self",
",",
"arg",
"=",
"None",
")",
":",
"try",
":",
"self",
".",
"done_callback",
"(",
"self",
".",
"cat",
")",
"self",
".",
"visible",
"=",
"False",
"except",
"Exception",
"as",
"e",
":",
"self",
".",
"validator",
".",
"o... | Add cat and close panel | [
"Add",
"cat",
"and",
"close",
"panel"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/gui/catalog/add.py#L248-L255 |
243,274 | intake/intake | intake/gui/catalog/add.py | CatAdder.tab_change | def tab_change(self, event):
"""When tab changes remove error, and enable widget if on url tab"""
self.remove_error()
if event.new == 1:
self.widget.disabled = False | python | def tab_change(self, event):
self.remove_error()
if event.new == 1:
self.widget.disabled = False | [
"def",
"tab_change",
"(",
"self",
",",
"event",
")",
":",
"self",
".",
"remove_error",
"(",
")",
"if",
"event",
".",
"new",
"==",
"1",
":",
"self",
".",
"widget",
".",
"disabled",
"=",
"False"
] | When tab changes remove error, and enable widget if on url tab | [
"When",
"tab",
"changes",
"remove",
"error",
"and",
"enable",
"widget",
"if",
"on",
"url",
"tab"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/gui/catalog/add.py#L261-L265 |
243,275 | intake/intake | intake/gui/catalog/gui.py | CatGUI.callback | def callback(self, cats):
"""When a catalog is selected, enable widgets that depend on that condition
and do done_callback"""
enable = bool(cats)
if not enable:
# close search if it is visible
self.search.visible = False
enable_widget(self.search_widget, enable)
enable_widget(self.remove_widget, enable)
if self.done_callback:
self.done_callback(cats) | python | def callback(self, cats):
enable = bool(cats)
if not enable:
# close search if it is visible
self.search.visible = False
enable_widget(self.search_widget, enable)
enable_widget(self.remove_widget, enable)
if self.done_callback:
self.done_callback(cats) | [
"def",
"callback",
"(",
"self",
",",
"cats",
")",
":",
"enable",
"=",
"bool",
"(",
"cats",
")",
"if",
"not",
"enable",
":",
"# close search if it is visible",
"self",
".",
"search",
".",
"visible",
"=",
"False",
"enable_widget",
"(",
"self",
".",
"search_w... | When a catalog is selected, enable widgets that depend on that condition
and do done_callback | [
"When",
"a",
"catalog",
"is",
"selected",
"enable",
"widgets",
"that",
"depend",
"on",
"that",
"condition",
"and",
"do",
"done_callback"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/gui/catalog/gui.py#L119-L130 |
243,276 | intake/intake | intake/gui/catalog/gui.py | CatGUI.on_click_search_widget | def on_click_search_widget(self, event):
""" When the search control is toggled, set visibility and hand down cats"""
self.search.cats = self.cats
self.search.visible = event.new
if self.search.visible:
self.search.watchers.append(
self.select.widget.link(self.search, value='cats')) | python | def on_click_search_widget(self, event):
self.search.cats = self.cats
self.search.visible = event.new
if self.search.visible:
self.search.watchers.append(
self.select.widget.link(self.search, value='cats')) | [
"def",
"on_click_search_widget",
"(",
"self",
",",
"event",
")",
":",
"self",
".",
"search",
".",
"cats",
"=",
"self",
".",
"cats",
"self",
".",
"search",
".",
"visible",
"=",
"event",
".",
"new",
"if",
"self",
".",
"search",
".",
"visible",
":",
"se... | When the search control is toggled, set visibility and hand down cats | [
"When",
"the",
"search",
"control",
"is",
"toggled",
"set",
"visibility",
"and",
"hand",
"down",
"cats"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/gui/catalog/gui.py#L132-L138 |
243,277 | intake/intake | intake/utils.py | no_duplicates_constructor | def no_duplicates_constructor(loader, node, deep=False):
"""Check for duplicate keys while loading YAML
https://gist.github.com/pypt/94d747fe5180851196eb
"""
mapping = {}
for key_node, value_node in node.value:
key = loader.construct_object(key_node, deep=deep)
value = loader.construct_object(value_node, deep=deep)
if key in mapping:
from intake.catalog.exceptions import DuplicateKeyError
raise DuplicateKeyError("while constructing a mapping",
node.start_mark,
"found duplicate key (%s)" % key,
key_node.start_mark)
mapping[key] = value
return loader.construct_mapping(node, deep) | python | def no_duplicates_constructor(loader, node, deep=False):
mapping = {}
for key_node, value_node in node.value:
key = loader.construct_object(key_node, deep=deep)
value = loader.construct_object(value_node, deep=deep)
if key in mapping:
from intake.catalog.exceptions import DuplicateKeyError
raise DuplicateKeyError("while constructing a mapping",
node.start_mark,
"found duplicate key (%s)" % key,
key_node.start_mark)
mapping[key] = value
return loader.construct_mapping(node, deep) | [
"def",
"no_duplicates_constructor",
"(",
"loader",
",",
"node",
",",
"deep",
"=",
"False",
")",
":",
"mapping",
"=",
"{",
"}",
"for",
"key_node",
",",
"value_node",
"in",
"node",
".",
"value",
":",
"key",
"=",
"loader",
".",
"construct_object",
"(",
"key... | Check for duplicate keys while loading YAML
https://gist.github.com/pypt/94d747fe5180851196eb | [
"Check",
"for",
"duplicate",
"keys",
"while",
"loading",
"YAML"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/utils.py#L20-L39 |
243,278 | intake/intake | intake/utils.py | classname | def classname(ob):
"""Get the object's class's name as package.module.Class"""
import inspect
if inspect.isclass(ob):
return '.'.join([ob.__module__, ob.__name__])
else:
return '.'.join([ob.__class__.__module__, ob.__class__.__name__]) | python | def classname(ob):
import inspect
if inspect.isclass(ob):
return '.'.join([ob.__module__, ob.__name__])
else:
return '.'.join([ob.__class__.__module__, ob.__class__.__name__]) | [
"def",
"classname",
"(",
"ob",
")",
":",
"import",
"inspect",
"if",
"inspect",
".",
"isclass",
"(",
"ob",
")",
":",
"return",
"'.'",
".",
"join",
"(",
"[",
"ob",
".",
"__module__",
",",
"ob",
".",
"__name__",
"]",
")",
"else",
":",
"return",
"'.'",... | Get the object's class's name as package.module.Class | [
"Get",
"the",
"object",
"s",
"class",
"s",
"name",
"as",
"package",
".",
"module",
".",
"Class"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/utils.py#L62-L68 |
243,279 | intake/intake | intake/utils.py | pretty_describe | def pretty_describe(object, nestedness=0, indent=2):
"""Maintain dict ordering - but make string version prettier"""
if not isinstance(object, dict):
return str(object)
sep = f'\n{" " * nestedness * indent}'
out = sep.join((f'{k}: {pretty_describe(v, nestedness + 1)}' for k, v in object.items()))
if nestedness > 0 and out:
return f'{sep}{out}'
return out | python | def pretty_describe(object, nestedness=0, indent=2):
if not isinstance(object, dict):
return str(object)
sep = f'\n{" " * nestedness * indent}'
out = sep.join((f'{k}: {pretty_describe(v, nestedness + 1)}' for k, v in object.items()))
if nestedness > 0 and out:
return f'{sep}{out}'
return out | [
"def",
"pretty_describe",
"(",
"object",
",",
"nestedness",
"=",
"0",
",",
"indent",
"=",
"2",
")",
":",
"if",
"not",
"isinstance",
"(",
"object",
",",
"dict",
")",
":",
"return",
"str",
"(",
"object",
")",
"sep",
"=",
"f'\\n{\" \" * nestedness * indent}'"... | Maintain dict ordering - but make string version prettier | [
"Maintain",
"dict",
"ordering",
"-",
"but",
"make",
"string",
"version",
"prettier"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/utils.py#L125-L133 |
243,280 | intake/intake | intake/gui/gui.py | GUI.add | def add(self, *args, **kwargs):
"""Add to list of cats"""
return self.cat.select.add(*args, **kwargs) | python | def add(self, *args, **kwargs):
return self.cat.select.add(*args, **kwargs) | [
"def",
"add",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"self",
".",
"cat",
".",
"select",
".",
"add",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | Add to list of cats | [
"Add",
"to",
"list",
"of",
"cats"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/gui/gui.py#L74-L76 |
243,281 | intake/intake | intake/gui/base.py | coerce_to_list | def coerce_to_list(items, preprocess=None):
"""Given an instance or list, coerce to list.
With optional preprocessing.
"""
if not isinstance(items, list):
items = [items]
if preprocess:
items = list(map(preprocess, items))
return items | python | def coerce_to_list(items, preprocess=None):
if not isinstance(items, list):
items = [items]
if preprocess:
items = list(map(preprocess, items))
return items | [
"def",
"coerce_to_list",
"(",
"items",
",",
"preprocess",
"=",
"None",
")",
":",
"if",
"not",
"isinstance",
"(",
"items",
",",
"list",
")",
":",
"items",
"=",
"[",
"items",
"]",
"if",
"preprocess",
":",
"items",
"=",
"list",
"(",
"map",
"(",
"preproc... | Given an instance or list, coerce to list.
With optional preprocessing. | [
"Given",
"an",
"instance",
"or",
"list",
"coerce",
"to",
"list",
"."
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/gui/base.py#L25-L34 |
243,282 | intake/intake | intake/gui/base.py | Base._repr_mimebundle_ | def _repr_mimebundle_(self, *args, **kwargs):
"""Display in a notebook or a server"""
try:
if self.logo:
p = pn.Row(
self.logo_panel,
self.panel,
margin=0)
return p._repr_mimebundle_(*args, **kwargs)
else:
return self.panel._repr_mimebundle_(*args, **kwargs)
except:
raise RuntimeError("Panel does not seem to be set up properly") | python | def _repr_mimebundle_(self, *args, **kwargs):
try:
if self.logo:
p = pn.Row(
self.logo_panel,
self.panel,
margin=0)
return p._repr_mimebundle_(*args, **kwargs)
else:
return self.panel._repr_mimebundle_(*args, **kwargs)
except:
raise RuntimeError("Panel does not seem to be set up properly") | [
"def",
"_repr_mimebundle_",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"try",
":",
"if",
"self",
".",
"logo",
":",
"p",
"=",
"pn",
".",
"Row",
"(",
"self",
".",
"logo_panel",
",",
"self",
".",
"panel",
",",
"margin",
"=",
... | Display in a notebook or a server | [
"Display",
"in",
"a",
"notebook",
"or",
"a",
"server"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/gui/base.py#L78-L90 |
243,283 | intake/intake | intake/gui/base.py | Base.unwatch | def unwatch(self):
"""Get rid of any lingering watchers and remove from list"""
if self.watchers is not None:
unwatched = []
for watcher in self.watchers:
watcher.inst.param.unwatch(watcher)
unwatched.append(watcher)
self.watchers = [w for w in self.watchers if w not in unwatched] | python | def unwatch(self):
if self.watchers is not None:
unwatched = []
for watcher in self.watchers:
watcher.inst.param.unwatch(watcher)
unwatched.append(watcher)
self.watchers = [w for w in self.watchers if w not in unwatched] | [
"def",
"unwatch",
"(",
"self",
")",
":",
"if",
"self",
".",
"watchers",
"is",
"not",
"None",
":",
"unwatched",
"=",
"[",
"]",
"for",
"watcher",
"in",
"self",
".",
"watchers",
":",
"watcher",
".",
"inst",
".",
"param",
".",
"unwatch",
"(",
"watcher",
... | Get rid of any lingering watchers and remove from list | [
"Get",
"rid",
"of",
"any",
"lingering",
"watchers",
"and",
"remove",
"from",
"list"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/gui/base.py#L114-L121 |
243,284 | intake/intake | intake/gui/base.py | BaseSelector._create_options | def _create_options(self, items):
"""Helper method to create options from list, or instance.
Applies preprocess method if available to create a uniform
output
"""
return OrderedDict(map(lambda x: (x.name, x),
coerce_to_list(items, self.preprocess))) | python | def _create_options(self, items):
return OrderedDict(map(lambda x: (x.name, x),
coerce_to_list(items, self.preprocess))) | [
"def",
"_create_options",
"(",
"self",
",",
"items",
")",
":",
"return",
"OrderedDict",
"(",
"map",
"(",
"lambda",
"x",
":",
"(",
"x",
".",
"name",
",",
"x",
")",
",",
"coerce_to_list",
"(",
"items",
",",
"self",
".",
"preprocess",
")",
")",
")"
] | Helper method to create options from list, or instance.
Applies preprocess method if available to create a uniform
output | [
"Helper",
"method",
"to",
"create",
"options",
"from",
"list",
"or",
"instance",
"."
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/gui/base.py#L175-L182 |
243,285 | intake/intake | intake/gui/base.py | BaseSelector.options | def options(self, new):
"""Set options from list, or instance of named item
Over-writes old options
"""
options = self._create_options(new)
if self.widget.value:
self.widget.set_param(options=options, value=list(options.values())[:1])
else:
self.widget.options = options
self.widget.value = list(options.values())[:1] | python | def options(self, new):
options = self._create_options(new)
if self.widget.value:
self.widget.set_param(options=options, value=list(options.values())[:1])
else:
self.widget.options = options
self.widget.value = list(options.values())[:1] | [
"def",
"options",
"(",
"self",
",",
"new",
")",
":",
"options",
"=",
"self",
".",
"_create_options",
"(",
"new",
")",
"if",
"self",
".",
"widget",
".",
"value",
":",
"self",
".",
"widget",
".",
"set_param",
"(",
"options",
"=",
"options",
",",
"value... | Set options from list, or instance of named item
Over-writes old options | [
"Set",
"options",
"from",
"list",
"or",
"instance",
"of",
"named",
"item"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/gui/base.py#L190-L200 |
243,286 | intake/intake | intake/gui/base.py | BaseSelector.add | def add(self, items):
"""Add items to options"""
options = self._create_options(items)
for k, v in options.items():
if k in self.labels and v not in self.items:
options.pop(k)
count = 0
while f'{k}_{count}' in self.labels:
count += 1
options[f'{k}_{count}'] = v
self.widget.options.update(options)
self.widget.param.trigger('options')
self.widget.value = list(options.values())[:1] | python | def add(self, items):
options = self._create_options(items)
for k, v in options.items():
if k in self.labels and v not in self.items:
options.pop(k)
count = 0
while f'{k}_{count}' in self.labels:
count += 1
options[f'{k}_{count}'] = v
self.widget.options.update(options)
self.widget.param.trigger('options')
self.widget.value = list(options.values())[:1] | [
"def",
"add",
"(",
"self",
",",
"items",
")",
":",
"options",
"=",
"self",
".",
"_create_options",
"(",
"items",
")",
"for",
"k",
",",
"v",
"in",
"options",
".",
"items",
"(",
")",
":",
"if",
"k",
"in",
"self",
".",
"labels",
"and",
"v",
"not",
... | Add items to options | [
"Add",
"items",
"to",
"options"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/gui/base.py#L202-L214 |
243,287 | intake/intake | intake/gui/base.py | BaseSelector.remove | def remove(self, items):
"""Remove items from options"""
items = coerce_to_list(items)
new_options = {k: v for k, v in self.options.items() if v not in items}
self.widget.options = new_options
self.widget.param.trigger('options') | python | def remove(self, items):
items = coerce_to_list(items)
new_options = {k: v for k, v in self.options.items() if v not in items}
self.widget.options = new_options
self.widget.param.trigger('options') | [
"def",
"remove",
"(",
"self",
",",
"items",
")",
":",
"items",
"=",
"coerce_to_list",
"(",
"items",
")",
"new_options",
"=",
"{",
"k",
":",
"v",
"for",
"k",
",",
"v",
"in",
"self",
".",
"options",
".",
"items",
"(",
")",
"if",
"v",
"not",
"in",
... | Remove items from options | [
"Remove",
"items",
"from",
"options"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/gui/base.py#L216-L221 |
243,288 | intake/intake | intake/gui/base.py | BaseSelector.selected | def selected(self, new):
"""Set selected from list or instance of object or name.
Over-writes existing selection
"""
def preprocess(item):
if isinstance(item, str):
return self.options[item]
return item
items = coerce_to_list(new, preprocess)
self.widget.value = items | python | def selected(self, new):
def preprocess(item):
if isinstance(item, str):
return self.options[item]
return item
items = coerce_to_list(new, preprocess)
self.widget.value = items | [
"def",
"selected",
"(",
"self",
",",
"new",
")",
":",
"def",
"preprocess",
"(",
"item",
")",
":",
"if",
"isinstance",
"(",
"item",
",",
"str",
")",
":",
"return",
"self",
".",
"options",
"[",
"item",
"]",
"return",
"item",
"items",
"=",
"coerce_to_li... | Set selected from list or instance of object or name.
Over-writes existing selection | [
"Set",
"selected",
"from",
"list",
"or",
"instance",
"of",
"object",
"or",
"name",
"."
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/gui/base.py#L229-L239 |
243,289 | intake/intake | intake/gui/base.py | BaseView.source | def source(self, source):
"""When the source gets updated, update the select widget"""
if isinstance(source, list):
# if source is a list, get first item or None
source = source[0] if len(source) > 0 else None
self._source = source | python | def source(self, source):
if isinstance(source, list):
# if source is a list, get first item or None
source = source[0] if len(source) > 0 else None
self._source = source | [
"def",
"source",
"(",
"self",
",",
"source",
")",
":",
"if",
"isinstance",
"(",
"source",
",",
"list",
")",
":",
"# if source is a list, get first item or None",
"source",
"=",
"source",
"[",
"0",
"]",
"if",
"len",
"(",
"source",
")",
">",
"0",
"else",
"... | When the source gets updated, update the select widget | [
"When",
"the",
"source",
"gets",
"updated",
"update",
"the",
"select",
"widget"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/gui/base.py#L268-L273 |
243,290 | intake/intake | intake/gui/source/gui.py | SourceGUI.callback | def callback(self, sources):
"""When a source is selected, enable widgets that depend on that condition
and do done_callback"""
enable = bool(sources)
if not enable:
self.plot_widget.value = False
enable_widget(self.plot_widget, enable)
if self.done_callback:
self.done_callback(sources) | python | def callback(self, sources):
enable = bool(sources)
if not enable:
self.plot_widget.value = False
enable_widget(self.plot_widget, enable)
if self.done_callback:
self.done_callback(sources) | [
"def",
"callback",
"(",
"self",
",",
"sources",
")",
":",
"enable",
"=",
"bool",
"(",
"sources",
")",
"if",
"not",
"enable",
":",
"self",
".",
"plot_widget",
".",
"value",
"=",
"False",
"enable_widget",
"(",
"self",
".",
"plot_widget",
",",
"enable",
"... | When a source is selected, enable widgets that depend on that condition
and do done_callback | [
"When",
"a",
"source",
"is",
"selected",
"enable",
"widgets",
"that",
"depend",
"on",
"that",
"condition",
"and",
"do",
"done_callback"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/gui/source/gui.py#L112-L121 |
243,291 | intake/intake | intake/gui/source/gui.py | SourceGUI.on_click_plot_widget | def on_click_plot_widget(self, event):
""" When the plot control is toggled, set visibility and hand down source"""
self.plot.source = self.sources
self.plot.visible = event.new
if self.plot.visible:
self.plot.watchers.append(
self.select.widget.link(self.plot, value='source')) | python | def on_click_plot_widget(self, event):
self.plot.source = self.sources
self.plot.visible = event.new
if self.plot.visible:
self.plot.watchers.append(
self.select.widget.link(self.plot, value='source')) | [
"def",
"on_click_plot_widget",
"(",
"self",
",",
"event",
")",
":",
"self",
".",
"plot",
".",
"source",
"=",
"self",
".",
"sources",
"self",
".",
"plot",
".",
"visible",
"=",
"event",
".",
"new",
"if",
"self",
".",
"plot",
".",
"visible",
":",
"self"... | When the plot control is toggled, set visibility and hand down source | [
"When",
"the",
"plot",
"control",
"is",
"toggled",
"set",
"visibility",
"and",
"hand",
"down",
"source"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/gui/source/gui.py#L123-L129 |
243,292 | intake/intake | intake/source/cache.py | sanitize_path | def sanitize_path(path):
"""Utility for cleaning up paths."""
storage_option = infer_storage_options(path)
protocol = storage_option['protocol']
if protocol in ('http', 'https'):
# Most FSs remove the protocol but not HTTPFS. We need to strip
# it to match properly.
path = os.path.normpath(path.replace("{}://".format(protocol), ''))
elif protocol == 'file':
# Remove trailing slashes from file paths.
path = os.path.normpath(path)
# Remove colons
path = path.replace(':', '')
# Otherwise we just make sure that path is posix
return make_path_posix(path) | python | def sanitize_path(path):
storage_option = infer_storage_options(path)
protocol = storage_option['protocol']
if protocol in ('http', 'https'):
# Most FSs remove the protocol but not HTTPFS. We need to strip
# it to match properly.
path = os.path.normpath(path.replace("{}://".format(protocol), ''))
elif protocol == 'file':
# Remove trailing slashes from file paths.
path = os.path.normpath(path)
# Remove colons
path = path.replace(':', '')
# Otherwise we just make sure that path is posix
return make_path_posix(path) | [
"def",
"sanitize_path",
"(",
"path",
")",
":",
"storage_option",
"=",
"infer_storage_options",
"(",
"path",
")",
"protocol",
"=",
"storage_option",
"[",
"'protocol'",
"]",
"if",
"protocol",
"in",
"(",
"'http'",
",",
"'https'",
")",
":",
"# Most FSs remove the pr... | Utility for cleaning up paths. | [
"Utility",
"for",
"cleaning",
"up",
"paths",
"."
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/source/cache.py#L27-L43 |
243,293 | intake/intake | intake/source/cache.py | _download | def _download(file_in, file_out, blocksize, output=False):
"""Read from input and write to output file in blocks"""
with warnings.catch_warnings():
warnings.filterwarnings('ignore')
if output:
try:
from tqdm.autonotebook import tqdm
except ImportError:
logger.warn("Cache progress bar requires tqdm to be installed:"
" conda/pip install tqdm")
output = False
if output:
try:
file_size = file_in.fs.size(file_in.path)
pbar_disabled = False
except ValueError as err:
logger.debug("File system error requesting size: {}".format(err))
file_size = 0
pbar_disabled = True
for i in range(100):
if i not in display:
display.add(i)
out = i
break
pbar = tqdm(total=file_size // 2 ** 20, leave=False,
disable=pbar_disabled,
position=out, desc=os.path.basename(file_out.path),
mininterval=0.1,
bar_format=r'{n}/|/{l_bar}')
logger.debug("Caching {}".format(file_in.path))
with file_in as f1:
with file_out as f2:
data = True
while data:
data = f1.read(blocksize)
f2.write(data)
if output:
pbar.update(len(data) // 2**20)
if output:
try:
pbar.update(pbar.total - pbar.n) # force to full
pbar.close()
except Exception as e:
logger.debug('tqdm exception: %s' % e)
finally:
display.remove(out) | python | def _download(file_in, file_out, blocksize, output=False):
with warnings.catch_warnings():
warnings.filterwarnings('ignore')
if output:
try:
from tqdm.autonotebook import tqdm
except ImportError:
logger.warn("Cache progress bar requires tqdm to be installed:"
" conda/pip install tqdm")
output = False
if output:
try:
file_size = file_in.fs.size(file_in.path)
pbar_disabled = False
except ValueError as err:
logger.debug("File system error requesting size: {}".format(err))
file_size = 0
pbar_disabled = True
for i in range(100):
if i not in display:
display.add(i)
out = i
break
pbar = tqdm(total=file_size // 2 ** 20, leave=False,
disable=pbar_disabled,
position=out, desc=os.path.basename(file_out.path),
mininterval=0.1,
bar_format=r'{n}/|/{l_bar}')
logger.debug("Caching {}".format(file_in.path))
with file_in as f1:
with file_out as f2:
data = True
while data:
data = f1.read(blocksize)
f2.write(data)
if output:
pbar.update(len(data) // 2**20)
if output:
try:
pbar.update(pbar.total - pbar.n) # force to full
pbar.close()
except Exception as e:
logger.debug('tqdm exception: %s' % e)
finally:
display.remove(out) | [
"def",
"_download",
"(",
"file_in",
",",
"file_out",
",",
"blocksize",
",",
"output",
"=",
"False",
")",
":",
"with",
"warnings",
".",
"catch_warnings",
"(",
")",
":",
"warnings",
".",
"filterwarnings",
"(",
"'ignore'",
")",
"if",
"output",
":",
"try",
"... | Read from input and write to output file in blocks | [
"Read",
"from",
"input",
"and",
"write",
"to",
"output",
"file",
"in",
"blocks"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/source/cache.py#L259-L306 |
243,294 | intake/intake | intake/source/cache.py | make_caches | def make_caches(driver, specs, catdir=None, cache_dir=None, storage_options={}):
"""
Creates Cache objects from the cache_specs provided in the catalog yaml file
Parameters
----------
driver: str
Name of the plugin that can load catalog entry
specs: list
Specification for caching the data source.
"""
if specs is None:
return []
return [registry.get(spec['type'], FileCache)(
driver, spec, catdir=catdir, cache_dir=cache_dir,
storage_options=storage_options)
for spec in specs] | python | def make_caches(driver, specs, catdir=None, cache_dir=None, storage_options={}):
if specs is None:
return []
return [registry.get(spec['type'], FileCache)(
driver, spec, catdir=catdir, cache_dir=cache_dir,
storage_options=storage_options)
for spec in specs] | [
"def",
"make_caches",
"(",
"driver",
",",
"specs",
",",
"catdir",
"=",
"None",
",",
"cache_dir",
"=",
"None",
",",
"storage_options",
"=",
"{",
"}",
")",
":",
"if",
"specs",
"is",
"None",
":",
"return",
"[",
"]",
"return",
"[",
"registry",
".",
"get"... | Creates Cache objects from the cache_specs provided in the catalog yaml file
Parameters
----------
driver: str
Name of the plugin that can load catalog entry
specs: list
Specification for caching the data source. | [
"Creates",
"Cache",
"objects",
"from",
"the",
"cache_specs",
"provided",
"in",
"the",
"catalog",
"yaml",
"file"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/source/cache.py#L540-L557 |
243,295 | intake/intake | intake/source/cache.py | BaseCache.load | def load(self, urlpath, output=None, **kwargs):
"""
Downloads data from a given url, generates a hashed filename,
logs metadata, and caches it locally.
Parameters
----------
urlpath: str, location of data
May be a local path, or remote path if including a protocol specifier
such as ``'s3://'``. May include glob wildcards.
output: bool
Whether to show progress bars; turn off for testing
Returns
-------
List of local cache_paths to be opened instead of the remote file(s). If
caching is disable, the urlpath is returned.
"""
if conf.get('cache_disabled', False):
return [urlpath]
self.output = output if output is not None else conf.get(
'cache_download_progress', True)
cache_paths = self._from_metadata(urlpath)
if cache_paths is None:
files_in, files_out = self._make_files(urlpath)
self._load(files_in, files_out, urlpath)
cache_paths = self._from_metadata(urlpath)
return cache_paths | python | def load(self, urlpath, output=None, **kwargs):
if conf.get('cache_disabled', False):
return [urlpath]
self.output = output if output is not None else conf.get(
'cache_download_progress', True)
cache_paths = self._from_metadata(urlpath)
if cache_paths is None:
files_in, files_out = self._make_files(urlpath)
self._load(files_in, files_out, urlpath)
cache_paths = self._from_metadata(urlpath)
return cache_paths | [
"def",
"load",
"(",
"self",
",",
"urlpath",
",",
"output",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"conf",
".",
"get",
"(",
"'cache_disabled'",
",",
"False",
")",
":",
"return",
"[",
"urlpath",
"]",
"self",
".",
"output",
"=",
"output... | Downloads data from a given url, generates a hashed filename,
logs metadata, and caches it locally.
Parameters
----------
urlpath: str, location of data
May be a local path, or remote path if including a protocol specifier
such as ``'s3://'``. May include glob wildcards.
output: bool
Whether to show progress bars; turn off for testing
Returns
-------
List of local cache_paths to be opened instead of the remote file(s). If
caching is disable, the urlpath is returned. | [
"Downloads",
"data",
"from",
"a",
"given",
"url",
"generates",
"a",
"hashed",
"filename",
"logs",
"metadata",
"and",
"caches",
"it",
"locally",
"."
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/source/cache.py#L133-L162 |
243,296 | intake/intake | intake/source/cache.py | BaseCache._load | def _load(self, files_in, files_out, urlpath, meta=True):
"""Download a set of files"""
import dask
out = []
outnames = []
for file_in, file_out in zip(files_in, files_out):
cache_path = file_out.path
outnames.append(cache_path)
# If `_munge_path` did not find a match we want to avoid
# writing to the urlpath.
if cache_path == urlpath:
continue
if not os.path.isfile(cache_path):
logger.debug("Caching file: {}".format(file_in.path))
logger.debug("Original path: {}".format(urlpath))
logger.debug("Cached at: {}".format(cache_path))
if meta:
self._log_metadata(urlpath, file_in.path, cache_path)
ddown = dask.delayed(_download)
out.append(ddown(file_in, file_out, self.blocksize,
self.output))
dask.compute(*out)
return outnames | python | def _load(self, files_in, files_out, urlpath, meta=True):
import dask
out = []
outnames = []
for file_in, file_out in zip(files_in, files_out):
cache_path = file_out.path
outnames.append(cache_path)
# If `_munge_path` did not find a match we want to avoid
# writing to the urlpath.
if cache_path == urlpath:
continue
if not os.path.isfile(cache_path):
logger.debug("Caching file: {}".format(file_in.path))
logger.debug("Original path: {}".format(urlpath))
logger.debug("Cached at: {}".format(cache_path))
if meta:
self._log_metadata(urlpath, file_in.path, cache_path)
ddown = dask.delayed(_download)
out.append(ddown(file_in, file_out, self.blocksize,
self.output))
dask.compute(*out)
return outnames | [
"def",
"_load",
"(",
"self",
",",
"files_in",
",",
"files_out",
",",
"urlpath",
",",
"meta",
"=",
"True",
")",
":",
"import",
"dask",
"out",
"=",
"[",
"]",
"outnames",
"=",
"[",
"]",
"for",
"file_in",
",",
"file_out",
"in",
"zip",
"(",
"files_in",
... | Download a set of files | [
"Download",
"a",
"set",
"of",
"files"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/source/cache.py#L170-L194 |
243,297 | intake/intake | intake/source/cache.py | BaseCache.clear_cache | def clear_cache(self, urlpath):
"""
Clears cache and metadata for a given urlpath.
Parameters
----------
urlpath: str, location of data
May be a local path, or remote path if including a protocol specifier
such as ``'s3://'``. May include glob wildcards.
"""
cache_entries = self._metadata.pop(urlpath, []) # ignore if missing
for cache_entry in cache_entries:
try:
os.remove(cache_entry['cache_path'])
except (OSError, IOError):
pass
try:
fn = os.path.dirname(cache_entry['cache_path'])
os.rmdir(fn)
except (OSError, IOError):
logger.debug("Failed to remove cache directory: %s" % fn) | python | def clear_cache(self, urlpath):
cache_entries = self._metadata.pop(urlpath, []) # ignore if missing
for cache_entry in cache_entries:
try:
os.remove(cache_entry['cache_path'])
except (OSError, IOError):
pass
try:
fn = os.path.dirname(cache_entry['cache_path'])
os.rmdir(fn)
except (OSError, IOError):
logger.debug("Failed to remove cache directory: %s" % fn) | [
"def",
"clear_cache",
"(",
"self",
",",
"urlpath",
")",
":",
"cache_entries",
"=",
"self",
".",
"_metadata",
".",
"pop",
"(",
"urlpath",
",",
"[",
"]",
")",
"# ignore if missing",
"for",
"cache_entry",
"in",
"cache_entries",
":",
"try",
":",
"os",
".",
"... | Clears cache and metadata for a given urlpath.
Parameters
----------
urlpath: str, location of data
May be a local path, or remote path if including a protocol specifier
such as ``'s3://'``. May include glob wildcards. | [
"Clears",
"cache",
"and",
"metadata",
"for",
"a",
"given",
"urlpath",
"."
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/source/cache.py#L215-L236 |
243,298 | intake/intake | intake/source/cache.py | BaseCache.clear_all | def clear_all(self):
"""
Clears all cache and metadata.
"""
for urlpath in self._metadata.keys():
self.clear_cache(urlpath)
# Safely clean up anything else.
if not os.path.isdir(self._cache_dir):
return
for subdir in os.listdir(self._cache_dir):
try:
fn = posixpath.join(self._cache_dir, subdir)
if os.path.isdir(fn):
shutil.rmtree(fn)
if os.path.isfile(fn):
os.remove(fn)
except (OSError, IOError) as e:
logger.warning(str(e)) | python | def clear_all(self):
for urlpath in self._metadata.keys():
self.clear_cache(urlpath)
# Safely clean up anything else.
if not os.path.isdir(self._cache_dir):
return
for subdir in os.listdir(self._cache_dir):
try:
fn = posixpath.join(self._cache_dir, subdir)
if os.path.isdir(fn):
shutil.rmtree(fn)
if os.path.isfile(fn):
os.remove(fn)
except (OSError, IOError) as e:
logger.warning(str(e)) | [
"def",
"clear_all",
"(",
"self",
")",
":",
"for",
"urlpath",
"in",
"self",
".",
"_metadata",
".",
"keys",
"(",
")",
":",
"self",
".",
"clear_cache",
"(",
"urlpath",
")",
"# Safely clean up anything else.",
"if",
"not",
"os",
".",
"path",
".",
"isdir",
"(... | Clears all cache and metadata. | [
"Clears",
"all",
"cache",
"and",
"metadata",
"."
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/source/cache.py#L238-L256 |
243,299 | mottosso/Qt.py | membership.py | write_json | def write_json(dictionary, filename):
"""Write dictionary to JSON"""
with open(filename, 'w') as data_file:
json.dump(dictionary, data_file, indent=4, sort_keys=True)
print('--> Wrote ' + os.path.basename(filename)) | python | def write_json(dictionary, filename):
with open(filename, 'w') as data_file:
json.dump(dictionary, data_file, indent=4, sort_keys=True)
print('--> Wrote ' + os.path.basename(filename)) | [
"def",
"write_json",
"(",
"dictionary",
",",
"filename",
")",
":",
"with",
"open",
"(",
"filename",
",",
"'w'",
")",
"as",
"data_file",
":",
"json",
".",
"dump",
"(",
"dictionary",
",",
"data_file",
",",
"indent",
"=",
"4",
",",
"sort_keys",
"=",
"True... | Write dictionary to JSON | [
"Write",
"dictionary",
"to",
"JSON"
] | d88a0c1762ad90d1965008cc14c53504bbcc0061 | https://github.com/mottosso/Qt.py/blob/d88a0c1762ad90d1965008cc14c53504bbcc0061/membership.py#L35-L39 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.