repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
listlengths
20
707
docstring
stringlengths
3
17.3k
docstring_tokens
listlengths
3
222
sha
stringlengths
40
40
url
stringlengths
87
242
partition
stringclasses
1 value
idx
int64
0
252k
intake/intake
intake/source/discovery.py
load_plugins_from_module
def load_plugins_from_module(module_name): """Imports a module and returns dictionary of discovered Intake plugins. Plugin classes are instantiated and added to the dictionary, keyed by the name attribute of the plugin object. """ plugins = {} try: if module_name.endswith('.py'): import imp mod = imp.load_source('module.name', module_name) else: mod = importlib.import_module(module_name) except Exception as e: logger.debug("Import module <{}> failed: {}".format(module_name, e)) return {} for _, cls in inspect.getmembers(mod, inspect.isclass): # Don't try to register plugins imported into this module elsewhere if issubclass(cls, (Catalog, DataSource)): plugins[cls.name] = cls return plugins
python
def load_plugins_from_module(module_name): """Imports a module and returns dictionary of discovered Intake plugins. Plugin classes are instantiated and added to the dictionary, keyed by the name attribute of the plugin object. """ plugins = {} try: if module_name.endswith('.py'): import imp mod = imp.load_source('module.name', module_name) else: mod = importlib.import_module(module_name) except Exception as e: logger.debug("Import module <{}> failed: {}".format(module_name, e)) return {} for _, cls in inspect.getmembers(mod, inspect.isclass): # Don't try to register plugins imported into this module elsewhere if issubclass(cls, (Catalog, DataSource)): plugins[cls.name] = cls return plugins
[ "def", "load_plugins_from_module", "(", "module_name", ")", ":", "plugins", "=", "{", "}", "try", ":", "if", "module_name", ".", "endswith", "(", "'.py'", ")", ":", "import", "imp", "mod", "=", "imp", ".", "load_source", "(", "'module.name'", ",", "module_...
Imports a module and returns dictionary of discovered Intake plugins. Plugin classes are instantiated and added to the dictionary, keyed by the name attribute of the plugin object.
[ "Imports", "a", "module", "and", "returns", "dictionary", "of", "discovered", "Intake", "plugins", "." ]
277b96bfdee39d8a3048ea5408c6d6716d568336
https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/source/discovery.py#L54-L76
train
224,300
intake/intake
intake/source/csv.py
CSVSource._set_pattern_columns
def _set_pattern_columns(self, path_column): """Get a column of values for each field in pattern """ try: # CategoricalDtype allows specifying known categories when # creating objects. It was added in pandas 0.21.0. from pandas.api.types import CategoricalDtype _HAS_CDT = True except ImportError: _HAS_CDT = False col = self._dataframe[path_column] paths = col.cat.categories column_by_field = {field: col.cat.codes.map(dict(enumerate(values))).astype( "category" if not _HAS_CDT else CategoricalDtype(set(values)) ) for field, values in reverse_formats(self.pattern, paths).items() } self._dataframe = self._dataframe.assign(**column_by_field)
python
def _set_pattern_columns(self, path_column): """Get a column of values for each field in pattern """ try: # CategoricalDtype allows specifying known categories when # creating objects. It was added in pandas 0.21.0. from pandas.api.types import CategoricalDtype _HAS_CDT = True except ImportError: _HAS_CDT = False col = self._dataframe[path_column] paths = col.cat.categories column_by_field = {field: col.cat.codes.map(dict(enumerate(values))).astype( "category" if not _HAS_CDT else CategoricalDtype(set(values)) ) for field, values in reverse_formats(self.pattern, paths).items() } self._dataframe = self._dataframe.assign(**column_by_field)
[ "def", "_set_pattern_columns", "(", "self", ",", "path_column", ")", ":", "try", ":", "# CategoricalDtype allows specifying known categories when", "# creating objects. It was added in pandas 0.21.0.", "from", "pandas", ".", "api", ".", "types", "import", "CategoricalDtype", ...
Get a column of values for each field in pattern
[ "Get", "a", "column", "of", "values", "for", "each", "field", "in", "pattern" ]
277b96bfdee39d8a3048ea5408c6d6716d568336
https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/source/csv.py#L58-L77
train
224,301
intake/intake
intake/source/csv.py
CSVSource._path_column
def _path_column(self): """Set ``include_path_column`` in csv_kwargs and returns path column name """ path_column = self._csv_kwargs.get('include_path_column') if path_column is None: # if path column name is not set by user, set to a unique string to # avoid conflicts path_column = unique_string() self._csv_kwargs['include_path_column'] = path_column elif isinstance(path_column, bool): path_column = 'path' self._csv_kwargs['include_path_column'] = path_column return path_column
python
def _path_column(self): """Set ``include_path_column`` in csv_kwargs and returns path column name """ path_column = self._csv_kwargs.get('include_path_column') if path_column is None: # if path column name is not set by user, set to a unique string to # avoid conflicts path_column = unique_string() self._csv_kwargs['include_path_column'] = path_column elif isinstance(path_column, bool): path_column = 'path' self._csv_kwargs['include_path_column'] = path_column return path_column
[ "def", "_path_column", "(", "self", ")", ":", "path_column", "=", "self", ".", "_csv_kwargs", ".", "get", "(", "'include_path_column'", ")", "if", "path_column", "is", "None", ":", "# if path column name is not set by user, set to a unique string to", "# avoid conflicts",...
Set ``include_path_column`` in csv_kwargs and returns path column name
[ "Set", "include_path_column", "in", "csv_kwargs", "and", "returns", "path", "column", "name" ]
277b96bfdee39d8a3048ea5408c6d6716d568336
https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/source/csv.py#L79-L92
train
224,302
intake/intake
intake/source/csv.py
CSVSource._open_dataset
def _open_dataset(self, urlpath): """Open dataset using dask and use pattern fields to set new columns """ import dask.dataframe if self.pattern is None: self._dataframe = dask.dataframe.read_csv( urlpath, storage_options=self._storage_options, **self._csv_kwargs) return if not (DASK_VERSION >= '0.19.0'): raise ValueError("Your version of dask is '{}'. " "The ability to include filenames in read_csv output " "(``include_path_column``) was added in 0.19.0, so " "pattern urlpaths are not supported.".format(DASK_VERSION)) drop_path_column = 'include_path_column' not in self._csv_kwargs path_column = self._path_column() self._dataframe = dask.dataframe.read_csv( urlpath, storage_options=self._storage_options, **self._csv_kwargs) # add the new columns to the dataframe self._set_pattern_columns(path_column) if drop_path_column: self._dataframe = self._dataframe.drop([path_column], axis=1)
python
def _open_dataset(self, urlpath): """Open dataset using dask and use pattern fields to set new columns """ import dask.dataframe if self.pattern is None: self._dataframe = dask.dataframe.read_csv( urlpath, storage_options=self._storage_options, **self._csv_kwargs) return if not (DASK_VERSION >= '0.19.0'): raise ValueError("Your version of dask is '{}'. " "The ability to include filenames in read_csv output " "(``include_path_column``) was added in 0.19.0, so " "pattern urlpaths are not supported.".format(DASK_VERSION)) drop_path_column = 'include_path_column' not in self._csv_kwargs path_column = self._path_column() self._dataframe = dask.dataframe.read_csv( urlpath, storage_options=self._storage_options, **self._csv_kwargs) # add the new columns to the dataframe self._set_pattern_columns(path_column) if drop_path_column: self._dataframe = self._dataframe.drop([path_column], axis=1)
[ "def", "_open_dataset", "(", "self", ",", "urlpath", ")", ":", "import", "dask", ".", "dataframe", "if", "self", ".", "pattern", "is", "None", ":", "self", ".", "_dataframe", "=", "dask", ".", "dataframe", ".", "read_csv", "(", "urlpath", ",", "storage_o...
Open dataset using dask and use pattern fields to set new columns
[ "Open", "dataset", "using", "dask", "and", "use", "pattern", "fields", "to", "set", "new", "columns" ]
277b96bfdee39d8a3048ea5408c6d6716d568336
https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/source/csv.py#L94-L121
train
224,303
intake/intake
intake/gui/catalog/search.py
Search.do_search
def do_search(self, arg=None): """Do search and close panel""" new_cats = [] for cat in self.cats: new_cat = cat.search(self.inputs.text, depth=self.inputs.depth) if len(list(new_cat)) > 0: new_cats.append(new_cat) if len(new_cats) > 0: self.done_callback(new_cats) self.visible = False
python
def do_search(self, arg=None): """Do search and close panel""" new_cats = [] for cat in self.cats: new_cat = cat.search(self.inputs.text, depth=self.inputs.depth) if len(list(new_cat)) > 0: new_cats.append(new_cat) if len(new_cats) > 0: self.done_callback(new_cats) self.visible = False
[ "def", "do_search", "(", "self", ",", "arg", "=", "None", ")", ":", "new_cats", "=", "[", "]", "for", "cat", "in", "self", ".", "cats", ":", "new_cat", "=", "cat", ".", "search", "(", "self", ".", "inputs", ".", "text", ",", "depth", "=", "self",...
Do search and close panel
[ "Do", "search", "and", "close", "panel" ]
277b96bfdee39d8a3048ea5408c6d6716d568336
https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/gui/catalog/search.py#L126-L136
train
224,304
intake/intake
intake/container/ndarray.py
RemoteArray._persist
def _persist(source, path, component=None, storage_options=None, **kwargs): """Save array to local persistent store Makes a parquet dataset out of the data using zarr. This then becomes a data entry in the persisted datasets catalog. Only works locally for the moment. Parameters ---------- source: a DataSource instance to save name: str or None Key to refer to this persisted dataset by. If not given, will attempt to get from the source's name kwargs: passed on to zarr array creation, see """ from dask.array import to_zarr, from_array from ..source.zarr import ZarrArraySource try: arr = source.to_dask() except NotImplementedError: arr = from_array(source.read(), chunks=-1).rechunk('auto') to_zarr(arr, path, component=None, storage_options=storage_options, **kwargs) source = ZarrArraySource(path, storage_options, component) return source
python
def _persist(source, path, component=None, storage_options=None, **kwargs): """Save array to local persistent store Makes a parquet dataset out of the data using zarr. This then becomes a data entry in the persisted datasets catalog. Only works locally for the moment. Parameters ---------- source: a DataSource instance to save name: str or None Key to refer to this persisted dataset by. If not given, will attempt to get from the source's name kwargs: passed on to zarr array creation, see """ from dask.array import to_zarr, from_array from ..source.zarr import ZarrArraySource try: arr = source.to_dask() except NotImplementedError: arr = from_array(source.read(), chunks=-1).rechunk('auto') to_zarr(arr, path, component=None, storage_options=storage_options, **kwargs) source = ZarrArraySource(path, storage_options, component) return source
[ "def", "_persist", "(", "source", ",", "path", ",", "component", "=", "None", ",", "storage_options", "=", "None", ",", "*", "*", "kwargs", ")", ":", "from", "dask", ".", "array", "import", "to_zarr", ",", "from_array", "from", ".", ".", "source", ".",...
Save array to local persistent store Makes a parquet dataset out of the data using zarr. This then becomes a data entry in the persisted datasets catalog. Only works locally for the moment. Parameters ---------- source: a DataSource instance to save name: str or None Key to refer to this persisted dataset by. If not given, will attempt to get from the source's name kwargs: passed on to zarr array creation, see
[ "Save", "array", "to", "local", "persistent", "store" ]
277b96bfdee39d8a3048ea5408c6d6716d568336
https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/container/ndarray.py#L68-L94
train
224,305
intake/intake
intake/gui/source/defined_plots.py
DefinedPlots.source
def source(self, source): """When the source gets updated, update the the options in the selector""" BaseView.source.fset(self, source) if self.select: self.select.options = self.options
python
def source(self, source): """When the source gets updated, update the the options in the selector""" BaseView.source.fset(self, source) if self.select: self.select.options = self.options
[ "def", "source", "(", "self", ",", "source", ")", ":", "BaseView", ".", "source", ".", "fset", "(", "self", ",", "source", ")", "if", "self", ".", "select", ":", "self", ".", "select", ".", "options", "=", "self", ".", "options" ]
When the source gets updated, update the the options in the selector
[ "When", "the", "source", "gets", "updated", "update", "the", "the", "options", "in", "the", "selector" ]
277b96bfdee39d8a3048ea5408c6d6716d568336
https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/gui/source/defined_plots.py#L87-L91
train
224,306
intake/intake
intake/source/textfiles.py
get_file
def get_file(f, decoder, read): """Serializable function to take an OpenFile object and read lines""" with f as f: if decoder is None: return list(f) else: d = f.read() if read else f out = decoder(d) if isinstance(out, (tuple, list)): return out else: return [out]
python
def get_file(f, decoder, read): """Serializable function to take an OpenFile object and read lines""" with f as f: if decoder is None: return list(f) else: d = f.read() if read else f out = decoder(d) if isinstance(out, (tuple, list)): return out else: return [out]
[ "def", "get_file", "(", "f", ",", "decoder", ",", "read", ")", ":", "with", "f", "as", "f", ":", "if", "decoder", "is", "None", ":", "return", "list", "(", "f", ")", "else", ":", "d", "=", "f", ".", "read", "(", ")", "if", "read", "else", "f"...
Serializable function to take an OpenFile object and read lines
[ "Serializable", "function", "to", "take", "an", "OpenFile", "object", "and", "read", "lines" ]
277b96bfdee39d8a3048ea5408c6d6716d568336
https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/source/textfiles.py#L113-L124
train
224,307
intake/intake
intake/auth/base.py
BaseAuth.get_case_insensitive
def get_case_insensitive(self, dictionary, key, default=None): """Case-insensitive search of a dictionary for key. Returns the value if key match is found, otherwise default. """ lower_key = key.lower() for k, v in dictionary.items(): if lower_key == k.lower(): return v else: return default
python
def get_case_insensitive(self, dictionary, key, default=None): """Case-insensitive search of a dictionary for key. Returns the value if key match is found, otherwise default. """ lower_key = key.lower() for k, v in dictionary.items(): if lower_key == k.lower(): return v else: return default
[ "def", "get_case_insensitive", "(", "self", ",", "dictionary", ",", "key", ",", "default", "=", "None", ")", ":", "lower_key", "=", "key", ".", "lower", "(", ")", "for", "k", ",", "v", "in", "dictionary", ".", "items", "(", ")", ":", "if", "lower_key...
Case-insensitive search of a dictionary for key. Returns the value if key match is found, otherwise default.
[ "Case", "-", "insensitive", "search", "of", "a", "dictionary", "for", "key", "." ]
277b96bfdee39d8a3048ea5408c6d6716d568336
https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/auth/base.py#L46-L56
train
224,308
intake/intake
intake/gui/catalog/add.py
FileSelector.url
def url(self): """Path to local catalog file""" return os.path.join(self.path, self.main.value[0])
python
def url(self): """Path to local catalog file""" return os.path.join(self.path, self.main.value[0])
[ "def", "url", "(", "self", ")", ":", "return", "os", ".", "path", ".", "join", "(", "self", ".", "path", ",", "self", ".", "main", ".", "value", "[", "0", "]", ")" ]
Path to local catalog file
[ "Path", "to", "local", "catalog", "file" ]
277b96bfdee39d8a3048ea5408c6d6716d568336
https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/gui/catalog/add.py#L81-L83
train
224,309
intake/intake
intake/gui/catalog/add.py
FileSelector.validate
def validate(self, arg=None): """Check that inputted path is valid - set validator accordingly""" if os.path.isdir(self.path): self.validator.object = None else: self.validator.object = ICONS['error']
python
def validate(self, arg=None): """Check that inputted path is valid - set validator accordingly""" if os.path.isdir(self.path): self.validator.object = None else: self.validator.object = ICONS['error']
[ "def", "validate", "(", "self", ",", "arg", "=", "None", ")", ":", "if", "os", ".", "path", ".", "isdir", "(", "self", ".", "path", ")", ":", "self", ".", "validator", ".", "object", "=", "None", "else", ":", "self", ".", "validator", ".", "objec...
Check that inputted path is valid - set validator accordingly
[ "Check", "that", "inputted", "path", "is", "valid", "-", "set", "validator", "accordingly" ]
277b96bfdee39d8a3048ea5408c6d6716d568336
https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/gui/catalog/add.py#L91-L96
train
224,310
intake/intake
intake/gui/catalog/add.py
CatAdder.add_cat
def add_cat(self, arg=None): """Add cat and close panel""" try: self.done_callback(self.cat) self.visible = False except Exception as e: self.validator.object = ICONS['error'] raise e
python
def add_cat(self, arg=None): """Add cat and close panel""" try: self.done_callback(self.cat) self.visible = False except Exception as e: self.validator.object = ICONS['error'] raise e
[ "def", "add_cat", "(", "self", ",", "arg", "=", "None", ")", ":", "try", ":", "self", ".", "done_callback", "(", "self", ".", "cat", ")", "self", ".", "visible", "=", "False", "except", "Exception", "as", "e", ":", "self", ".", "validator", ".", "o...
Add cat and close panel
[ "Add", "cat", "and", "close", "panel" ]
277b96bfdee39d8a3048ea5408c6d6716d568336
https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/gui/catalog/add.py#L248-L255
train
224,311
intake/intake
intake/gui/catalog/add.py
CatAdder.tab_change
def tab_change(self, event): """When tab changes remove error, and enable widget if on url tab""" self.remove_error() if event.new == 1: self.widget.disabled = False
python
def tab_change(self, event): """When tab changes remove error, and enable widget if on url tab""" self.remove_error() if event.new == 1: self.widget.disabled = False
[ "def", "tab_change", "(", "self", ",", "event", ")", ":", "self", ".", "remove_error", "(", ")", "if", "event", ".", "new", "==", "1", ":", "self", ".", "widget", ".", "disabled", "=", "False" ]
When tab changes remove error, and enable widget if on url tab
[ "When", "tab", "changes", "remove", "error", "and", "enable", "widget", "if", "on", "url", "tab" ]
277b96bfdee39d8a3048ea5408c6d6716d568336
https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/gui/catalog/add.py#L261-L265
train
224,312
intake/intake
intake/gui/catalog/gui.py
CatGUI.callback
def callback(self, cats): """When a catalog is selected, enable widgets that depend on that condition and do done_callback""" enable = bool(cats) if not enable: # close search if it is visible self.search.visible = False enable_widget(self.search_widget, enable) enable_widget(self.remove_widget, enable) if self.done_callback: self.done_callback(cats)
python
def callback(self, cats): """When a catalog is selected, enable widgets that depend on that condition and do done_callback""" enable = bool(cats) if not enable: # close search if it is visible self.search.visible = False enable_widget(self.search_widget, enable) enable_widget(self.remove_widget, enable) if self.done_callback: self.done_callback(cats)
[ "def", "callback", "(", "self", ",", "cats", ")", ":", "enable", "=", "bool", "(", "cats", ")", "if", "not", "enable", ":", "# close search if it is visible", "self", ".", "search", ".", "visible", "=", "False", "enable_widget", "(", "self", ".", "search_w...
When a catalog is selected, enable widgets that depend on that condition and do done_callback
[ "When", "a", "catalog", "is", "selected", "enable", "widgets", "that", "depend", "on", "that", "condition", "and", "do", "done_callback" ]
277b96bfdee39d8a3048ea5408c6d6716d568336
https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/gui/catalog/gui.py#L119-L130
train
224,313
intake/intake
intake/gui/catalog/gui.py
CatGUI.on_click_search_widget
def on_click_search_widget(self, event): """ When the search control is toggled, set visibility and hand down cats""" self.search.cats = self.cats self.search.visible = event.new if self.search.visible: self.search.watchers.append( self.select.widget.link(self.search, value='cats'))
python
def on_click_search_widget(self, event): """ When the search control is toggled, set visibility and hand down cats""" self.search.cats = self.cats self.search.visible = event.new if self.search.visible: self.search.watchers.append( self.select.widget.link(self.search, value='cats'))
[ "def", "on_click_search_widget", "(", "self", ",", "event", ")", ":", "self", ".", "search", ".", "cats", "=", "self", ".", "cats", "self", ".", "search", ".", "visible", "=", "event", ".", "new", "if", "self", ".", "search", ".", "visible", ":", "se...
When the search control is toggled, set visibility and hand down cats
[ "When", "the", "search", "control", "is", "toggled", "set", "visibility", "and", "hand", "down", "cats" ]
277b96bfdee39d8a3048ea5408c6d6716d568336
https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/gui/catalog/gui.py#L132-L138
train
224,314
intake/intake
intake/utils.py
no_duplicates_constructor
def no_duplicates_constructor(loader, node, deep=False): """Check for duplicate keys while loading YAML https://gist.github.com/pypt/94d747fe5180851196eb """ mapping = {} for key_node, value_node in node.value: key = loader.construct_object(key_node, deep=deep) value = loader.construct_object(value_node, deep=deep) if key in mapping: from intake.catalog.exceptions import DuplicateKeyError raise DuplicateKeyError("while constructing a mapping", node.start_mark, "found duplicate key (%s)" % key, key_node.start_mark) mapping[key] = value return loader.construct_mapping(node, deep)
python
def no_duplicates_constructor(loader, node, deep=False): """Check for duplicate keys while loading YAML https://gist.github.com/pypt/94d747fe5180851196eb """ mapping = {} for key_node, value_node in node.value: key = loader.construct_object(key_node, deep=deep) value = loader.construct_object(value_node, deep=deep) if key in mapping: from intake.catalog.exceptions import DuplicateKeyError raise DuplicateKeyError("while constructing a mapping", node.start_mark, "found duplicate key (%s)" % key, key_node.start_mark) mapping[key] = value return loader.construct_mapping(node, deep)
[ "def", "no_duplicates_constructor", "(", "loader", ",", "node", ",", "deep", "=", "False", ")", ":", "mapping", "=", "{", "}", "for", "key_node", ",", "value_node", "in", "node", ".", "value", ":", "key", "=", "loader", ".", "construct_object", "(", "key...
Check for duplicate keys while loading YAML https://gist.github.com/pypt/94d747fe5180851196eb
[ "Check", "for", "duplicate", "keys", "while", "loading", "YAML" ]
277b96bfdee39d8a3048ea5408c6d6716d568336
https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/utils.py#L20-L39
train
224,315
intake/intake
intake/utils.py
classname
def classname(ob): """Get the object's class's name as package.module.Class""" import inspect if inspect.isclass(ob): return '.'.join([ob.__module__, ob.__name__]) else: return '.'.join([ob.__class__.__module__, ob.__class__.__name__])
python
def classname(ob): """Get the object's class's name as package.module.Class""" import inspect if inspect.isclass(ob): return '.'.join([ob.__module__, ob.__name__]) else: return '.'.join([ob.__class__.__module__, ob.__class__.__name__])
[ "def", "classname", "(", "ob", ")", ":", "import", "inspect", "if", "inspect", ".", "isclass", "(", "ob", ")", ":", "return", "'.'", ".", "join", "(", "[", "ob", ".", "__module__", ",", "ob", ".", "__name__", "]", ")", "else", ":", "return", "'.'",...
Get the object's class's name as package.module.Class
[ "Get", "the", "object", "s", "class", "s", "name", "as", "package", ".", "module", ".", "Class" ]
277b96bfdee39d8a3048ea5408c6d6716d568336
https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/utils.py#L62-L68
train
224,316
intake/intake
intake/utils.py
pretty_describe
def pretty_describe(object, nestedness=0, indent=2): """Maintain dict ordering - but make string version prettier""" if not isinstance(object, dict): return str(object) sep = f'\n{" " * nestedness * indent}' out = sep.join((f'{k}: {pretty_describe(v, nestedness + 1)}' for k, v in object.items())) if nestedness > 0 and out: return f'{sep}{out}' return out
python
def pretty_describe(object, nestedness=0, indent=2): """Maintain dict ordering - but make string version prettier""" if not isinstance(object, dict): return str(object) sep = f'\n{" " * nestedness * indent}' out = sep.join((f'{k}: {pretty_describe(v, nestedness + 1)}' for k, v in object.items())) if nestedness > 0 and out: return f'{sep}{out}' return out
[ "def", "pretty_describe", "(", "object", ",", "nestedness", "=", "0", ",", "indent", "=", "2", ")", ":", "if", "not", "isinstance", "(", "object", ",", "dict", ")", ":", "return", "str", "(", "object", ")", "sep", "=", "f'\\n{\" \" * nestedness * indent}'"...
Maintain dict ordering - but make string version prettier
[ "Maintain", "dict", "ordering", "-", "but", "make", "string", "version", "prettier" ]
277b96bfdee39d8a3048ea5408c6d6716d568336
https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/utils.py#L125-L133
train
224,317
intake/intake
intake/gui/gui.py
GUI.add
def add(self, *args, **kwargs): """Add to list of cats""" return self.cat.select.add(*args, **kwargs)
python
def add(self, *args, **kwargs): """Add to list of cats""" return self.cat.select.add(*args, **kwargs)
[ "def", "add", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "self", ".", "cat", ".", "select", ".", "add", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
Add to list of cats
[ "Add", "to", "list", "of", "cats" ]
277b96bfdee39d8a3048ea5408c6d6716d568336
https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/gui/gui.py#L74-L76
train
224,318
intake/intake
intake/gui/base.py
coerce_to_list
def coerce_to_list(items, preprocess=None): """Given an instance or list, coerce to list. With optional preprocessing. """ if not isinstance(items, list): items = [items] if preprocess: items = list(map(preprocess, items)) return items
python
def coerce_to_list(items, preprocess=None): """Given an instance or list, coerce to list. With optional preprocessing. """ if not isinstance(items, list): items = [items] if preprocess: items = list(map(preprocess, items)) return items
[ "def", "coerce_to_list", "(", "items", ",", "preprocess", "=", "None", ")", ":", "if", "not", "isinstance", "(", "items", ",", "list", ")", ":", "items", "=", "[", "items", "]", "if", "preprocess", ":", "items", "=", "list", "(", "map", "(", "preproc...
Given an instance or list, coerce to list. With optional preprocessing.
[ "Given", "an", "instance", "or", "list", "coerce", "to", "list", "." ]
277b96bfdee39d8a3048ea5408c6d6716d568336
https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/gui/base.py#L25-L34
train
224,319
intake/intake
intake/gui/base.py
Base._repr_mimebundle_
def _repr_mimebundle_(self, *args, **kwargs): """Display in a notebook or a server""" try: if self.logo: p = pn.Row( self.logo_panel, self.panel, margin=0) return p._repr_mimebundle_(*args, **kwargs) else: return self.panel._repr_mimebundle_(*args, **kwargs) except: raise RuntimeError("Panel does not seem to be set up properly")
python
def _repr_mimebundle_(self, *args, **kwargs): """Display in a notebook or a server""" try: if self.logo: p = pn.Row( self.logo_panel, self.panel, margin=0) return p._repr_mimebundle_(*args, **kwargs) else: return self.panel._repr_mimebundle_(*args, **kwargs) except: raise RuntimeError("Panel does not seem to be set up properly")
[ "def", "_repr_mimebundle_", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "try", ":", "if", "self", ".", "logo", ":", "p", "=", "pn", ".", "Row", "(", "self", ".", "logo_panel", ",", "self", ".", "panel", ",", "margin", "=", ...
Display in a notebook or a server
[ "Display", "in", "a", "notebook", "or", "a", "server" ]
277b96bfdee39d8a3048ea5408c6d6716d568336
https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/gui/base.py#L78-L90
train
224,320
intake/intake
intake/gui/base.py
Base.unwatch
def unwatch(self): """Get rid of any lingering watchers and remove from list""" if self.watchers is not None: unwatched = [] for watcher in self.watchers: watcher.inst.param.unwatch(watcher) unwatched.append(watcher) self.watchers = [w for w in self.watchers if w not in unwatched]
python
def unwatch(self): """Get rid of any lingering watchers and remove from list""" if self.watchers is not None: unwatched = [] for watcher in self.watchers: watcher.inst.param.unwatch(watcher) unwatched.append(watcher) self.watchers = [w for w in self.watchers if w not in unwatched]
[ "def", "unwatch", "(", "self", ")", ":", "if", "self", ".", "watchers", "is", "not", "None", ":", "unwatched", "=", "[", "]", "for", "watcher", "in", "self", ".", "watchers", ":", "watcher", ".", "inst", ".", "param", ".", "unwatch", "(", "watcher", ...
Get rid of any lingering watchers and remove from list
[ "Get", "rid", "of", "any", "lingering", "watchers", "and", "remove", "from", "list" ]
277b96bfdee39d8a3048ea5408c6d6716d568336
https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/gui/base.py#L114-L121
train
224,321
intake/intake
intake/gui/base.py
BaseSelector._create_options
def _create_options(self, items): """Helper method to create options from list, or instance. Applies preprocess method if available to create a uniform output """ return OrderedDict(map(lambda x: (x.name, x), coerce_to_list(items, self.preprocess)))
python
def _create_options(self, items): """Helper method to create options from list, or instance. Applies preprocess method if available to create a uniform output """ return OrderedDict(map(lambda x: (x.name, x), coerce_to_list(items, self.preprocess)))
[ "def", "_create_options", "(", "self", ",", "items", ")", ":", "return", "OrderedDict", "(", "map", "(", "lambda", "x", ":", "(", "x", ".", "name", ",", "x", ")", ",", "coerce_to_list", "(", "items", ",", "self", ".", "preprocess", ")", ")", ")" ]
Helper method to create options from list, or instance. Applies preprocess method if available to create a uniform output
[ "Helper", "method", "to", "create", "options", "from", "list", "or", "instance", "." ]
277b96bfdee39d8a3048ea5408c6d6716d568336
https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/gui/base.py#L175-L182
train
224,322
intake/intake
intake/gui/base.py
BaseSelector.options
def options(self, new): """Set options from list, or instance of named item Over-writes old options """ options = self._create_options(new) if self.widget.value: self.widget.set_param(options=options, value=list(options.values())[:1]) else: self.widget.options = options self.widget.value = list(options.values())[:1]
python
def options(self, new): """Set options from list, or instance of named item Over-writes old options """ options = self._create_options(new) if self.widget.value: self.widget.set_param(options=options, value=list(options.values())[:1]) else: self.widget.options = options self.widget.value = list(options.values())[:1]
[ "def", "options", "(", "self", ",", "new", ")", ":", "options", "=", "self", ".", "_create_options", "(", "new", ")", "if", "self", ".", "widget", ".", "value", ":", "self", ".", "widget", ".", "set_param", "(", "options", "=", "options", ",", "value...
Set options from list, or instance of named item Over-writes old options
[ "Set", "options", "from", "list", "or", "instance", "of", "named", "item" ]
277b96bfdee39d8a3048ea5408c6d6716d568336
https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/gui/base.py#L190-L200
train
224,323
intake/intake
intake/gui/base.py
BaseSelector.add
def add(self, items): """Add items to options""" options = self._create_options(items) for k, v in options.items(): if k in self.labels and v not in self.items: options.pop(k) count = 0 while f'{k}_{count}' in self.labels: count += 1 options[f'{k}_{count}'] = v self.widget.options.update(options) self.widget.param.trigger('options') self.widget.value = list(options.values())[:1]
python
def add(self, items): """Add items to options""" options = self._create_options(items) for k, v in options.items(): if k in self.labels and v not in self.items: options.pop(k) count = 0 while f'{k}_{count}' in self.labels: count += 1 options[f'{k}_{count}'] = v self.widget.options.update(options) self.widget.param.trigger('options') self.widget.value = list(options.values())[:1]
[ "def", "add", "(", "self", ",", "items", ")", ":", "options", "=", "self", ".", "_create_options", "(", "items", ")", "for", "k", ",", "v", "in", "options", ".", "items", "(", ")", ":", "if", "k", "in", "self", ".", "labels", "and", "v", "not", ...
Add items to options
[ "Add", "items", "to", "options" ]
277b96bfdee39d8a3048ea5408c6d6716d568336
https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/gui/base.py#L202-L214
train
224,324
intake/intake
intake/gui/base.py
BaseSelector.remove
def remove(self, items): """Remove items from options""" items = coerce_to_list(items) new_options = {k: v for k, v in self.options.items() if v not in items} self.widget.options = new_options self.widget.param.trigger('options')
python
def remove(self, items): """Remove items from options""" items = coerce_to_list(items) new_options = {k: v for k, v in self.options.items() if v not in items} self.widget.options = new_options self.widget.param.trigger('options')
[ "def", "remove", "(", "self", ",", "items", ")", ":", "items", "=", "coerce_to_list", "(", "items", ")", "new_options", "=", "{", "k", ":", "v", "for", "k", ",", "v", "in", "self", ".", "options", ".", "items", "(", ")", "if", "v", "not", "in", ...
Remove items from options
[ "Remove", "items", "from", "options" ]
277b96bfdee39d8a3048ea5408c6d6716d568336
https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/gui/base.py#L216-L221
train
224,325
intake/intake
intake/gui/base.py
BaseSelector.selected
def selected(self, new): """Set selected from list or instance of object or name. Over-writes existing selection """ def preprocess(item): if isinstance(item, str): return self.options[item] return item items = coerce_to_list(new, preprocess) self.widget.value = items
python
def selected(self, new): """Set selected from list or instance of object or name. Over-writes existing selection """ def preprocess(item): if isinstance(item, str): return self.options[item] return item items = coerce_to_list(new, preprocess) self.widget.value = items
[ "def", "selected", "(", "self", ",", "new", ")", ":", "def", "preprocess", "(", "item", ")", ":", "if", "isinstance", "(", "item", ",", "str", ")", ":", "return", "self", ".", "options", "[", "item", "]", "return", "item", "items", "=", "coerce_to_li...
Set selected from list or instance of object or name. Over-writes existing selection
[ "Set", "selected", "from", "list", "or", "instance", "of", "object", "or", "name", "." ]
277b96bfdee39d8a3048ea5408c6d6716d568336
https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/gui/base.py#L229-L239
train
224,326
intake/intake
intake/gui/base.py
BaseView.source
def source(self, source): """When the source gets updated, update the select widget""" if isinstance(source, list): # if source is a list, get first item or None source = source[0] if len(source) > 0 else None self._source = source
python
def source(self, source): """When the source gets updated, update the select widget""" if isinstance(source, list): # if source is a list, get first item or None source = source[0] if len(source) > 0 else None self._source = source
[ "def", "source", "(", "self", ",", "source", ")", ":", "if", "isinstance", "(", "source", ",", "list", ")", ":", "# if source is a list, get first item or None", "source", "=", "source", "[", "0", "]", "if", "len", "(", "source", ")", ">", "0", "else", "...
When the source gets updated, update the select widget
[ "When", "the", "source", "gets", "updated", "update", "the", "select", "widget" ]
277b96bfdee39d8a3048ea5408c6d6716d568336
https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/gui/base.py#L268-L273
train
224,327
intake/intake
intake/gui/source/gui.py
SourceGUI.callback
def callback(self, sources): """When a source is selected, enable widgets that depend on that condition and do done_callback""" enable = bool(sources) if not enable: self.plot_widget.value = False enable_widget(self.plot_widget, enable) if self.done_callback: self.done_callback(sources)
python
def callback(self, sources): """When a source is selected, enable widgets that depend on that condition and do done_callback""" enable = bool(sources) if not enable: self.plot_widget.value = False enable_widget(self.plot_widget, enable) if self.done_callback: self.done_callback(sources)
[ "def", "callback", "(", "self", ",", "sources", ")", ":", "enable", "=", "bool", "(", "sources", ")", "if", "not", "enable", ":", "self", ".", "plot_widget", ".", "value", "=", "False", "enable_widget", "(", "self", ".", "plot_widget", ",", "enable", "...
When a source is selected, enable widgets that depend on that condition and do done_callback
[ "When", "a", "source", "is", "selected", "enable", "widgets", "that", "depend", "on", "that", "condition", "and", "do", "done_callback" ]
277b96bfdee39d8a3048ea5408c6d6716d568336
https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/gui/source/gui.py#L112-L121
train
224,328
intake/intake
intake/gui/source/gui.py
SourceGUI.on_click_plot_widget
def on_click_plot_widget(self, event): """ When the plot control is toggled, set visibility and hand down source""" self.plot.source = self.sources self.plot.visible = event.new if self.plot.visible: self.plot.watchers.append( self.select.widget.link(self.plot, value='source'))
python
def on_click_plot_widget(self, event): """ When the plot control is toggled, set visibility and hand down source""" self.plot.source = self.sources self.plot.visible = event.new if self.plot.visible: self.plot.watchers.append( self.select.widget.link(self.plot, value='source'))
[ "def", "on_click_plot_widget", "(", "self", ",", "event", ")", ":", "self", ".", "plot", ".", "source", "=", "self", ".", "sources", "self", ".", "plot", ".", "visible", "=", "event", ".", "new", "if", "self", ".", "plot", ".", "visible", ":", "self"...
When the plot control is toggled, set visibility and hand down source
[ "When", "the", "plot", "control", "is", "toggled", "set", "visibility", "and", "hand", "down", "source" ]
277b96bfdee39d8a3048ea5408c6d6716d568336
https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/gui/source/gui.py#L123-L129
train
224,329
intake/intake
intake/source/cache.py
sanitize_path
def sanitize_path(path): """Utility for cleaning up paths.""" storage_option = infer_storage_options(path) protocol = storage_option['protocol'] if protocol in ('http', 'https'): # Most FSs remove the protocol but not HTTPFS. We need to strip # it to match properly. path = os.path.normpath(path.replace("{}://".format(protocol), '')) elif protocol == 'file': # Remove trailing slashes from file paths. path = os.path.normpath(path) # Remove colons path = path.replace(':', '') # Otherwise we just make sure that path is posix return make_path_posix(path)
python
def sanitize_path(path): """Utility for cleaning up paths.""" storage_option = infer_storage_options(path) protocol = storage_option['protocol'] if protocol in ('http', 'https'): # Most FSs remove the protocol but not HTTPFS. We need to strip # it to match properly. path = os.path.normpath(path.replace("{}://".format(protocol), '')) elif protocol == 'file': # Remove trailing slashes from file paths. path = os.path.normpath(path) # Remove colons path = path.replace(':', '') # Otherwise we just make sure that path is posix return make_path_posix(path)
[ "def", "sanitize_path", "(", "path", ")", ":", "storage_option", "=", "infer_storage_options", "(", "path", ")", "protocol", "=", "storage_option", "[", "'protocol'", "]", "if", "protocol", "in", "(", "'http'", ",", "'https'", ")", ":", "# Most FSs remove the pr...
Utility for cleaning up paths.
[ "Utility", "for", "cleaning", "up", "paths", "." ]
277b96bfdee39d8a3048ea5408c6d6716d568336
https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/source/cache.py#L27-L43
train
224,330
intake/intake
intake/source/cache.py
_download
def _download(file_in, file_out, blocksize, output=False): """Read from input and write to output file in blocks""" with warnings.catch_warnings(): warnings.filterwarnings('ignore') if output: try: from tqdm.autonotebook import tqdm except ImportError: logger.warn("Cache progress bar requires tqdm to be installed:" " conda/pip install tqdm") output = False if output: try: file_size = file_in.fs.size(file_in.path) pbar_disabled = False except ValueError as err: logger.debug("File system error requesting size: {}".format(err)) file_size = 0 pbar_disabled = True for i in range(100): if i not in display: display.add(i) out = i break pbar = tqdm(total=file_size // 2 ** 20, leave=False, disable=pbar_disabled, position=out, desc=os.path.basename(file_out.path), mininterval=0.1, bar_format=r'{n}/|/{l_bar}') logger.debug("Caching {}".format(file_in.path)) with file_in as f1: with file_out as f2: data = True while data: data = f1.read(blocksize) f2.write(data) if output: pbar.update(len(data) // 2**20) if output: try: pbar.update(pbar.total - pbar.n) # force to full pbar.close() except Exception as e: logger.debug('tqdm exception: %s' % e) finally: display.remove(out)
python
def _download(file_in, file_out, blocksize, output=False): """Read from input and write to output file in blocks""" with warnings.catch_warnings(): warnings.filterwarnings('ignore') if output: try: from tqdm.autonotebook import tqdm except ImportError: logger.warn("Cache progress bar requires tqdm to be installed:" " conda/pip install tqdm") output = False if output: try: file_size = file_in.fs.size(file_in.path) pbar_disabled = False except ValueError as err: logger.debug("File system error requesting size: {}".format(err)) file_size = 0 pbar_disabled = True for i in range(100): if i not in display: display.add(i) out = i break pbar = tqdm(total=file_size // 2 ** 20, leave=False, disable=pbar_disabled, position=out, desc=os.path.basename(file_out.path), mininterval=0.1, bar_format=r'{n}/|/{l_bar}') logger.debug("Caching {}".format(file_in.path)) with file_in as f1: with file_out as f2: data = True while data: data = f1.read(blocksize) f2.write(data) if output: pbar.update(len(data) // 2**20) if output: try: pbar.update(pbar.total - pbar.n) # force to full pbar.close() except Exception as e: logger.debug('tqdm exception: %s' % e) finally: display.remove(out)
[ "def", "_download", "(", "file_in", ",", "file_out", ",", "blocksize", ",", "output", "=", "False", ")", ":", "with", "warnings", ".", "catch_warnings", "(", ")", ":", "warnings", ".", "filterwarnings", "(", "'ignore'", ")", "if", "output", ":", "try", "...
Read from input and write to output file in blocks
[ "Read", "from", "input", "and", "write", "to", "output", "file", "in", "blocks" ]
277b96bfdee39d8a3048ea5408c6d6716d568336
https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/source/cache.py#L259-L306
train
224,331
intake/intake
intake/source/cache.py
make_caches
def make_caches(driver, specs, catdir=None, cache_dir=None, storage_options={}): """ Creates Cache objects from the cache_specs provided in the catalog yaml file Parameters ---------- driver: str Name of the plugin that can load catalog entry specs: list Specification for caching the data source. """ if specs is None: return [] return [registry.get(spec['type'], FileCache)( driver, spec, catdir=catdir, cache_dir=cache_dir, storage_options=storage_options) for spec in specs]
python
def make_caches(driver, specs, catdir=None, cache_dir=None, storage_options={}): """ Creates Cache objects from the cache_specs provided in the catalog yaml file Parameters ---------- driver: str Name of the plugin that can load catalog entry specs: list Specification for caching the data source. """ if specs is None: return [] return [registry.get(spec['type'], FileCache)( driver, spec, catdir=catdir, cache_dir=cache_dir, storage_options=storage_options) for spec in specs]
[ "def", "make_caches", "(", "driver", ",", "specs", ",", "catdir", "=", "None", ",", "cache_dir", "=", "None", ",", "storage_options", "=", "{", "}", ")", ":", "if", "specs", "is", "None", ":", "return", "[", "]", "return", "[", "registry", ".", "get"...
Creates Cache objects from the cache_specs provided in the catalog yaml file Parameters ---------- driver: str Name of the plugin that can load catalog entry specs: list Specification for caching the data source.
[ "Creates", "Cache", "objects", "from", "the", "cache_specs", "provided", "in", "the", "catalog", "yaml", "file" ]
277b96bfdee39d8a3048ea5408c6d6716d568336
https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/source/cache.py#L540-L557
train
224,332
intake/intake
intake/source/cache.py
BaseCache.load
def load(self, urlpath, output=None, **kwargs): """ Downloads data from a given url, generates a hashed filename, logs metadata, and caches it locally. Parameters ---------- urlpath: str, location of data May be a local path, or remote path if including a protocol specifier such as ``'s3://'``. May include glob wildcards. output: bool Whether to show progress bars; turn off for testing Returns ------- List of local cache_paths to be opened instead of the remote file(s). If caching is disable, the urlpath is returned. """ if conf.get('cache_disabled', False): return [urlpath] self.output = output if output is not None else conf.get( 'cache_download_progress', True) cache_paths = self._from_metadata(urlpath) if cache_paths is None: files_in, files_out = self._make_files(urlpath) self._load(files_in, files_out, urlpath) cache_paths = self._from_metadata(urlpath) return cache_paths
python
def load(self, urlpath, output=None, **kwargs): """ Downloads data from a given url, generates a hashed filename, logs metadata, and caches it locally. Parameters ---------- urlpath: str, location of data May be a local path, or remote path if including a protocol specifier such as ``'s3://'``. May include glob wildcards. output: bool Whether to show progress bars; turn off for testing Returns ------- List of local cache_paths to be opened instead of the remote file(s). If caching is disable, the urlpath is returned. """ if conf.get('cache_disabled', False): return [urlpath] self.output = output if output is not None else conf.get( 'cache_download_progress', True) cache_paths = self._from_metadata(urlpath) if cache_paths is None: files_in, files_out = self._make_files(urlpath) self._load(files_in, files_out, urlpath) cache_paths = self._from_metadata(urlpath) return cache_paths
[ "def", "load", "(", "self", ",", "urlpath", ",", "output", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "conf", ".", "get", "(", "'cache_disabled'", ",", "False", ")", ":", "return", "[", "urlpath", "]", "self", ".", "output", "=", "output...
Downloads data from a given url, generates a hashed filename, logs metadata, and caches it locally. Parameters ---------- urlpath: str, location of data May be a local path, or remote path if including a protocol specifier such as ``'s3://'``. May include glob wildcards. output: bool Whether to show progress bars; turn off for testing Returns ------- List of local cache_paths to be opened instead of the remote file(s). If caching is disable, the urlpath is returned.
[ "Downloads", "data", "from", "a", "given", "url", "generates", "a", "hashed", "filename", "logs", "metadata", "and", "caches", "it", "locally", "." ]
277b96bfdee39d8a3048ea5408c6d6716d568336
https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/source/cache.py#L133-L162
train
224,333
intake/intake
intake/source/cache.py
BaseCache._load
def _load(self, files_in, files_out, urlpath, meta=True): """Download a set of files""" import dask out = [] outnames = [] for file_in, file_out in zip(files_in, files_out): cache_path = file_out.path outnames.append(cache_path) # If `_munge_path` did not find a match we want to avoid # writing to the urlpath. if cache_path == urlpath: continue if not os.path.isfile(cache_path): logger.debug("Caching file: {}".format(file_in.path)) logger.debug("Original path: {}".format(urlpath)) logger.debug("Cached at: {}".format(cache_path)) if meta: self._log_metadata(urlpath, file_in.path, cache_path) ddown = dask.delayed(_download) out.append(ddown(file_in, file_out, self.blocksize, self.output)) dask.compute(*out) return outnames
python
def _load(self, files_in, files_out, urlpath, meta=True): """Download a set of files""" import dask out = [] outnames = [] for file_in, file_out in zip(files_in, files_out): cache_path = file_out.path outnames.append(cache_path) # If `_munge_path` did not find a match we want to avoid # writing to the urlpath. if cache_path == urlpath: continue if not os.path.isfile(cache_path): logger.debug("Caching file: {}".format(file_in.path)) logger.debug("Original path: {}".format(urlpath)) logger.debug("Cached at: {}".format(cache_path)) if meta: self._log_metadata(urlpath, file_in.path, cache_path) ddown = dask.delayed(_download) out.append(ddown(file_in, file_out, self.blocksize, self.output)) dask.compute(*out) return outnames
[ "def", "_load", "(", "self", ",", "files_in", ",", "files_out", ",", "urlpath", ",", "meta", "=", "True", ")", ":", "import", "dask", "out", "=", "[", "]", "outnames", "=", "[", "]", "for", "file_in", ",", "file_out", "in", "zip", "(", "files_in", ...
Download a set of files
[ "Download", "a", "set", "of", "files" ]
277b96bfdee39d8a3048ea5408c6d6716d568336
https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/source/cache.py#L170-L194
train
224,334
intake/intake
intake/source/cache.py
BaseCache.clear_cache
def clear_cache(self, urlpath): """ Clears cache and metadata for a given urlpath. Parameters ---------- urlpath: str, location of data May be a local path, or remote path if including a protocol specifier such as ``'s3://'``. May include glob wildcards. """ cache_entries = self._metadata.pop(urlpath, []) # ignore if missing for cache_entry in cache_entries: try: os.remove(cache_entry['cache_path']) except (OSError, IOError): pass try: fn = os.path.dirname(cache_entry['cache_path']) os.rmdir(fn) except (OSError, IOError): logger.debug("Failed to remove cache directory: %s" % fn)
python
def clear_cache(self, urlpath): """ Clears cache and metadata for a given urlpath. Parameters ---------- urlpath: str, location of data May be a local path, or remote path if including a protocol specifier such as ``'s3://'``. May include glob wildcards. """ cache_entries = self._metadata.pop(urlpath, []) # ignore if missing for cache_entry in cache_entries: try: os.remove(cache_entry['cache_path']) except (OSError, IOError): pass try: fn = os.path.dirname(cache_entry['cache_path']) os.rmdir(fn) except (OSError, IOError): logger.debug("Failed to remove cache directory: %s" % fn)
[ "def", "clear_cache", "(", "self", ",", "urlpath", ")", ":", "cache_entries", "=", "self", ".", "_metadata", ".", "pop", "(", "urlpath", ",", "[", "]", ")", "# ignore if missing", "for", "cache_entry", "in", "cache_entries", ":", "try", ":", "os", ".", "...
Clears cache and metadata for a given urlpath. Parameters ---------- urlpath: str, location of data May be a local path, or remote path if including a protocol specifier such as ``'s3://'``. May include glob wildcards.
[ "Clears", "cache", "and", "metadata", "for", "a", "given", "urlpath", "." ]
277b96bfdee39d8a3048ea5408c6d6716d568336
https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/source/cache.py#L215-L236
train
224,335
intake/intake
intake/source/cache.py
BaseCache.clear_all
def clear_all(self): """ Clears all cache and metadata. """ for urlpath in self._metadata.keys(): self.clear_cache(urlpath) # Safely clean up anything else. if not os.path.isdir(self._cache_dir): return for subdir in os.listdir(self._cache_dir): try: fn = posixpath.join(self._cache_dir, subdir) if os.path.isdir(fn): shutil.rmtree(fn) if os.path.isfile(fn): os.remove(fn) except (OSError, IOError) as e: logger.warning(str(e))
python
def clear_all(self): """ Clears all cache and metadata. """ for urlpath in self._metadata.keys(): self.clear_cache(urlpath) # Safely clean up anything else. if not os.path.isdir(self._cache_dir): return for subdir in os.listdir(self._cache_dir): try: fn = posixpath.join(self._cache_dir, subdir) if os.path.isdir(fn): shutil.rmtree(fn) if os.path.isfile(fn): os.remove(fn) except (OSError, IOError) as e: logger.warning(str(e))
[ "def", "clear_all", "(", "self", ")", ":", "for", "urlpath", "in", "self", ".", "_metadata", ".", "keys", "(", ")", ":", "self", ".", "clear_cache", "(", "urlpath", ")", "# Safely clean up anything else.", "if", "not", "os", ".", "path", ".", "isdir", "(...
Clears all cache and metadata.
[ "Clears", "all", "cache", "and", "metadata", "." ]
277b96bfdee39d8a3048ea5408c6d6716d568336
https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/source/cache.py#L238-L256
train
224,336
mottosso/Qt.py
membership.py
write_json
def write_json(dictionary, filename): """Write dictionary to JSON""" with open(filename, 'w') as data_file: json.dump(dictionary, data_file, indent=4, sort_keys=True) print('--> Wrote ' + os.path.basename(filename))
python
def write_json(dictionary, filename): """Write dictionary to JSON""" with open(filename, 'w') as data_file: json.dump(dictionary, data_file, indent=4, sort_keys=True) print('--> Wrote ' + os.path.basename(filename))
[ "def", "write_json", "(", "dictionary", ",", "filename", ")", ":", "with", "open", "(", "filename", ",", "'w'", ")", "as", "data_file", ":", "json", ".", "dump", "(", "dictionary", ",", "data_file", ",", "indent", "=", "4", ",", "sort_keys", "=", "True...
Write dictionary to JSON
[ "Write", "dictionary", "to", "JSON" ]
d88a0c1762ad90d1965008cc14c53504bbcc0061
https://github.com/mottosso/Qt.py/blob/d88a0c1762ad90d1965008cc14c53504bbcc0061/membership.py#L35-L39
train
224,337
mottosso/Qt.py
membership.py
compare
def compare(dicts): """Compare by iteration""" common_members = {} common_keys = reduce(lambda x, y: x & y, map(dict.keys, dicts)) for k in common_keys: common_members[k] = list( reduce(lambda x, y: x & y, [set(d[k]) for d in dicts])) return common_members
python
def compare(dicts): """Compare by iteration""" common_members = {} common_keys = reduce(lambda x, y: x & y, map(dict.keys, dicts)) for k in common_keys: common_members[k] = list( reduce(lambda x, y: x & y, [set(d[k]) for d in dicts])) return common_members
[ "def", "compare", "(", "dicts", ")", ":", "common_members", "=", "{", "}", "common_keys", "=", "reduce", "(", "lambda", "x", ",", "y", ":", "x", "&", "y", ",", "map", "(", "dict", ".", "keys", ",", "dicts", ")", ")", "for", "k", "in", "common_key...
Compare by iteration
[ "Compare", "by", "iteration" ]
d88a0c1762ad90d1965008cc14c53504bbcc0061
https://github.com/mottosso/Qt.py/blob/d88a0c1762ad90d1965008cc14c53504bbcc0061/membership.py#L42-L51
train
224,338
mottosso/Qt.py
membership.py
sort_common_members
def sort_common_members(): """Sorts the keys and members""" filename = PREFIX + '/common_members.json' sorted_json_data = {} json_data = read_json(filename) all_keys = [] for key, value in json_data.items(): all_keys.append(key) sorted_keys = sorted(all_keys) for key in sorted_keys: if len(json_data[key]) > 0: # Only add modules which have common members sorted_json_data[key] = sorted(json_data[key]) print('--> Sorted/cleaned ' + os.path.basename(filename)) write_json(sorted_json_data, filename)
python
def sort_common_members(): """Sorts the keys and members""" filename = PREFIX + '/common_members.json' sorted_json_data = {} json_data = read_json(filename) all_keys = [] for key, value in json_data.items(): all_keys.append(key) sorted_keys = sorted(all_keys) for key in sorted_keys: if len(json_data[key]) > 0: # Only add modules which have common members sorted_json_data[key] = sorted(json_data[key]) print('--> Sorted/cleaned ' + os.path.basename(filename)) write_json(sorted_json_data, filename)
[ "def", "sort_common_members", "(", ")", ":", "filename", "=", "PREFIX", "+", "'/common_members.json'", "sorted_json_data", "=", "{", "}", "json_data", "=", "read_json", "(", "filename", ")", "all_keys", "=", "[", "]", "for", "key", ",", "value", "in", "json_...
Sorts the keys and members
[ "Sorts", "the", "keys", "and", "members" ]
d88a0c1762ad90d1965008cc14c53504bbcc0061
https://github.com/mottosso/Qt.py/blob/d88a0c1762ad90d1965008cc14c53504bbcc0061/membership.py#L77-L96
train
224,339
mottosso/Qt.py
membership.py
generate_common_members
def generate_common_members(): """Generate JSON with commonly shared members""" pyside = read_json(PREFIX + '/PySide.json') pyside2 = read_json(PREFIX + '/PySide2.json') pyqt4 = read_json(PREFIX + '/PyQt4.json') pyqt5 = read_json(PREFIX + '/PyQt5.json') dicts = [pyside, pyside2, pyqt4, pyqt5] common_members = compare(dicts) write_json(common_members, PREFIX + '/common_members.json')
python
def generate_common_members(): """Generate JSON with commonly shared members""" pyside = read_json(PREFIX + '/PySide.json') pyside2 = read_json(PREFIX + '/PySide2.json') pyqt4 = read_json(PREFIX + '/PyQt4.json') pyqt5 = read_json(PREFIX + '/PyQt5.json') dicts = [pyside, pyside2, pyqt4, pyqt5] common_members = compare(dicts) write_json(common_members, PREFIX + '/common_members.json')
[ "def", "generate_common_members", "(", ")", ":", "pyside", "=", "read_json", "(", "PREFIX", "+", "'/PySide.json'", ")", "pyside2", "=", "read_json", "(", "PREFIX", "+", "'/PySide2.json'", ")", "pyqt4", "=", "read_json", "(", "PREFIX", "+", "'/PyQt4.json'", ")"...
Generate JSON with commonly shared members
[ "Generate", "JSON", "with", "commonly", "shared", "members" ]
d88a0c1762ad90d1965008cc14c53504bbcc0061
https://github.com/mottosso/Qt.py/blob/d88a0c1762ad90d1965008cc14c53504bbcc0061/membership.py#L99-L109
train
224,340
mottosso/Qt.py
caveats.py
parse
def parse(fname): """Return blocks of code as list of dicts Arguments: fname (str): Relative name of caveats file """ blocks = list() with io.open(fname, "r", encoding="utf-8") as f: in_block = False current_block = None current_header = "" for line in f: # Doctests are within a quadruple hashtag header. if line.startswith("#### "): current_header = line.rstrip() # The actuat test is within a fenced block. if line.startswith("```"): in_block = False if in_block: current_block.append(line) if line.startswith("```python"): in_block = True current_block = list() current_block.append(current_header) blocks.append(current_block) tests = list() for block in blocks: header = ( block[0].strip("# ") # Remove Markdown .rstrip() # Remove newline .lower() # PEP08 ) # Remove unsupported characters header = re.sub(r"\W", "_", header) # Adding "untested" anywhere in the first line of # the doctest excludes it from the test. if "untested" in block[1].lower(): continue data = re.sub(" ", "", block[1]) # Remove spaces data = ( data.strip("#") .rstrip() # Remove newline .split(",") ) binding, doctest_version = (data + [None])[:2] # Run tests on both Python 2 and 3, unless explicitly stated if doctest_version is not None: if doctest_version not in ("Python2", "Python3"): raise SyntaxError( "Invalid Python version:\n%s\n" "Python version must follow binding, e.g.\n" "# PyQt5, Python3" % doctest_version) active_version = "Python%i" % sys.version_info[0] if doctest_version != active_version: continue tests.append({ "header": header, "binding": binding, "body": block[2:] }) return tests
python
def parse(fname): """Return blocks of code as list of dicts Arguments: fname (str): Relative name of caveats file """ blocks = list() with io.open(fname, "r", encoding="utf-8") as f: in_block = False current_block = None current_header = "" for line in f: # Doctests are within a quadruple hashtag header. if line.startswith("#### "): current_header = line.rstrip() # The actuat test is within a fenced block. if line.startswith("```"): in_block = False if in_block: current_block.append(line) if line.startswith("```python"): in_block = True current_block = list() current_block.append(current_header) blocks.append(current_block) tests = list() for block in blocks: header = ( block[0].strip("# ") # Remove Markdown .rstrip() # Remove newline .lower() # PEP08 ) # Remove unsupported characters header = re.sub(r"\W", "_", header) # Adding "untested" anywhere in the first line of # the doctest excludes it from the test. if "untested" in block[1].lower(): continue data = re.sub(" ", "", block[1]) # Remove spaces data = ( data.strip("#") .rstrip() # Remove newline .split(",") ) binding, doctest_version = (data + [None])[:2] # Run tests on both Python 2 and 3, unless explicitly stated if doctest_version is not None: if doctest_version not in ("Python2", "Python3"): raise SyntaxError( "Invalid Python version:\n%s\n" "Python version must follow binding, e.g.\n" "# PyQt5, Python3" % doctest_version) active_version = "Python%i" % sys.version_info[0] if doctest_version != active_version: continue tests.append({ "header": header, "binding": binding, "body": block[2:] }) return tests
[ "def", "parse", "(", "fname", ")", ":", "blocks", "=", "list", "(", ")", "with", "io", ".", "open", "(", "fname", ",", "\"r\"", ",", "encoding", "=", "\"utf-8\"", ")", "as", "f", ":", "in_block", "=", "False", "current_block", "=", "None", "current_h...
Return blocks of code as list of dicts Arguments: fname (str): Relative name of caveats file
[ "Return", "blocks", "of", "code", "as", "list", "of", "dicts" ]
d88a0c1762ad90d1965008cc14c53504bbcc0061
https://github.com/mottosso/Qt.py/blob/d88a0c1762ad90d1965008cc14c53504bbcc0061/caveats.py#L6-L82
train
224,341
mottosso/Qt.py
Qt.py
_qInstallMessageHandler
def _qInstallMessageHandler(handler): """Install a message handler that works in all bindings Args: handler: A function that takes 3 arguments, or None """ def messageOutputHandler(*args): # In Qt4 bindings, message handlers are passed 2 arguments # In Qt5 bindings, message handlers are passed 3 arguments # The first argument is a QtMsgType # The last argument is the message to be printed # The Middle argument (if passed) is a QMessageLogContext if len(args) == 3: msgType, logContext, msg = args elif len(args) == 2: msgType, msg = args logContext = None else: raise TypeError( "handler expected 2 or 3 arguments, got {0}".format(len(args))) if isinstance(msg, bytes): # In python 3, some bindings pass a bytestring, which cannot be # used elsewhere. Decoding a python 2 or 3 bytestring object will # consistently return a unicode object. msg = msg.decode() handler(msgType, logContext, msg) passObject = messageOutputHandler if handler else handler if Qt.IsPySide or Qt.IsPyQt4: return Qt._QtCore.qInstallMsgHandler(passObject) elif Qt.IsPySide2 or Qt.IsPyQt5: return Qt._QtCore.qInstallMessageHandler(passObject)
python
def _qInstallMessageHandler(handler): """Install a message handler that works in all bindings Args: handler: A function that takes 3 arguments, or None """ def messageOutputHandler(*args): # In Qt4 bindings, message handlers are passed 2 arguments # In Qt5 bindings, message handlers are passed 3 arguments # The first argument is a QtMsgType # The last argument is the message to be printed # The Middle argument (if passed) is a QMessageLogContext if len(args) == 3: msgType, logContext, msg = args elif len(args) == 2: msgType, msg = args logContext = None else: raise TypeError( "handler expected 2 or 3 arguments, got {0}".format(len(args))) if isinstance(msg, bytes): # In python 3, some bindings pass a bytestring, which cannot be # used elsewhere. Decoding a python 2 or 3 bytestring object will # consistently return a unicode object. msg = msg.decode() handler(msgType, logContext, msg) passObject = messageOutputHandler if handler else handler if Qt.IsPySide or Qt.IsPyQt4: return Qt._QtCore.qInstallMsgHandler(passObject) elif Qt.IsPySide2 or Qt.IsPyQt5: return Qt._QtCore.qInstallMessageHandler(passObject)
[ "def", "_qInstallMessageHandler", "(", "handler", ")", ":", "def", "messageOutputHandler", "(", "*", "args", ")", ":", "# In Qt4 bindings, message handlers are passed 2 arguments", "# In Qt5 bindings, message handlers are passed 3 arguments", "# The first argument is a QtMsgType", "#...
Install a message handler that works in all bindings Args: handler: A function that takes 3 arguments, or None
[ "Install", "a", "message", "handler", "that", "works", "in", "all", "bindings" ]
d88a0c1762ad90d1965008cc14c53504bbcc0061
https://github.com/mottosso/Qt.py/blob/d88a0c1762ad90d1965008cc14c53504bbcc0061/Qt.py#L683-L716
train
224,342
mottosso/Qt.py
Qt.py
_import_sub_module
def _import_sub_module(module, name): """import_sub_module will mimic the function of importlib.import_module""" module = __import__(module.__name__ + "." + name) for level in name.split("."): module = getattr(module, level) return module
python
def _import_sub_module(module, name): """import_sub_module will mimic the function of importlib.import_module""" module = __import__(module.__name__ + "." + name) for level in name.split("."): module = getattr(module, level) return module
[ "def", "_import_sub_module", "(", "module", ",", "name", ")", ":", "module", "=", "__import__", "(", "module", ".", "__name__", "+", "\".\"", "+", "name", ")", "for", "level", "in", "name", ".", "split", "(", "\".\"", ")", ":", "module", "=", "getattr"...
import_sub_module will mimic the function of importlib.import_module
[ "import_sub_module", "will", "mimic", "the", "function", "of", "importlib", ".", "import_module" ]
d88a0c1762ad90d1965008cc14c53504bbcc0061
https://github.com/mottosso/Qt.py/blob/d88a0c1762ad90d1965008cc14c53504bbcc0061/Qt.py#L1191-L1196
train
224,343
mottosso/Qt.py
Qt.py
_setup
def _setup(module, extras): """Install common submodules""" Qt.__binding__ = module.__name__ for name in list(_common_members) + extras: try: submodule = _import_sub_module( module, name) except ImportError: try: # For extra modules like sip and shiboken that may not be # children of the binding. submodule = __import__(name) except ImportError: continue setattr(Qt, "_" + name, submodule) if name not in extras: # Store reference to original binding, # but don't store speciality modules # such as uic or QtUiTools setattr(Qt, name, _new_module(name))
python
def _setup(module, extras): """Install common submodules""" Qt.__binding__ = module.__name__ for name in list(_common_members) + extras: try: submodule = _import_sub_module( module, name) except ImportError: try: # For extra modules like sip and shiboken that may not be # children of the binding. submodule = __import__(name) except ImportError: continue setattr(Qt, "_" + name, submodule) if name not in extras: # Store reference to original binding, # but don't store speciality modules # such as uic or QtUiTools setattr(Qt, name, _new_module(name))
[ "def", "_setup", "(", "module", ",", "extras", ")", ":", "Qt", ".", "__binding__", "=", "module", ".", "__name__", "for", "name", "in", "list", "(", "_common_members", ")", "+", "extras", ":", "try", ":", "submodule", "=", "_import_sub_module", "(", "mod...
Install common submodules
[ "Install", "common", "submodules" ]
d88a0c1762ad90d1965008cc14c53504bbcc0061
https://github.com/mottosso/Qt.py/blob/d88a0c1762ad90d1965008cc14c53504bbcc0061/Qt.py#L1199-L1222
train
224,344
mottosso/Qt.py
Qt.py
_build_compatibility_members
def _build_compatibility_members(binding, decorators=None): """Apply `binding` to QtCompat Arguments: binding (str): Top level binding in _compatibility_members. decorators (dict, optional): Provides the ability to decorate the original Qt methods when needed by a binding. This can be used to change the returned value to a standard value. The key should be the classname, the value is a dict where the keys are the target method names, and the values are the decorator functions. """ decorators = decorators or dict() # Allow optional site-level customization of the compatibility members. # This method does not need to be implemented in QtSiteConfig. try: import QtSiteConfig except ImportError: pass else: if hasattr(QtSiteConfig, 'update_compatibility_decorators'): QtSiteConfig.update_compatibility_decorators(binding, decorators) _QtCompat = type("QtCompat", (object,), {}) for classname, bindings in _compatibility_members[binding].items(): attrs = {} for target, binding in bindings.items(): namespaces = binding.split('.') try: src_object = getattr(Qt, "_" + namespaces[0]) except AttributeError as e: _log("QtCompat: AttributeError: %s" % e) # Skip reassignment of non-existing members. # This can happen if a request was made to # rename a member that didn't exist, for example # if QtWidgets isn't available on the target platform. continue # Walk down any remaining namespace getting the object assuming # that if the first namespace exists the rest will exist. for namespace in namespaces[1:]: src_object = getattr(src_object, namespace) # decorate the Qt method if a decorator was provided. if target in decorators.get(classname, []): # staticmethod must be called on the decorated method to # prevent a TypeError being raised when the decorated method # is called. src_object = staticmethod( decorators[classname][target](src_object)) attrs[target] = src_object # Create the QtCompat class and install it into the namespace compat_class = type(classname, (_QtCompat,), attrs) setattr(Qt.QtCompat, classname, compat_class)
python
def _build_compatibility_members(binding, decorators=None): """Apply `binding` to QtCompat Arguments: binding (str): Top level binding in _compatibility_members. decorators (dict, optional): Provides the ability to decorate the original Qt methods when needed by a binding. This can be used to change the returned value to a standard value. The key should be the classname, the value is a dict where the keys are the target method names, and the values are the decorator functions. """ decorators = decorators or dict() # Allow optional site-level customization of the compatibility members. # This method does not need to be implemented in QtSiteConfig. try: import QtSiteConfig except ImportError: pass else: if hasattr(QtSiteConfig, 'update_compatibility_decorators'): QtSiteConfig.update_compatibility_decorators(binding, decorators) _QtCompat = type("QtCompat", (object,), {}) for classname, bindings in _compatibility_members[binding].items(): attrs = {} for target, binding in bindings.items(): namespaces = binding.split('.') try: src_object = getattr(Qt, "_" + namespaces[0]) except AttributeError as e: _log("QtCompat: AttributeError: %s" % e) # Skip reassignment of non-existing members. # This can happen if a request was made to # rename a member that didn't exist, for example # if QtWidgets isn't available on the target platform. continue # Walk down any remaining namespace getting the object assuming # that if the first namespace exists the rest will exist. for namespace in namespaces[1:]: src_object = getattr(src_object, namespace) # decorate the Qt method if a decorator was provided. if target in decorators.get(classname, []): # staticmethod must be called on the decorated method to # prevent a TypeError being raised when the decorated method # is called. src_object = staticmethod( decorators[classname][target](src_object)) attrs[target] = src_object # Create the QtCompat class and install it into the namespace compat_class = type(classname, (_QtCompat,), attrs) setattr(Qt.QtCompat, classname, compat_class)
[ "def", "_build_compatibility_members", "(", "binding", ",", "decorators", "=", "None", ")", ":", "decorators", "=", "decorators", "or", "dict", "(", ")", "# Allow optional site-level customization of the compatibility members.", "# This method does not need to be implemented in Q...
Apply `binding` to QtCompat Arguments: binding (str): Top level binding in _compatibility_members. decorators (dict, optional): Provides the ability to decorate the original Qt methods when needed by a binding. This can be used to change the returned value to a standard value. The key should be the classname, the value is a dict where the keys are the target method names, and the values are the decorator functions.
[ "Apply", "binding", "to", "QtCompat" ]
d88a0c1762ad90d1965008cc14c53504bbcc0061
https://github.com/mottosso/Qt.py/blob/d88a0c1762ad90d1965008cc14c53504bbcc0061/Qt.py#L1297-L1355
train
224,345
mottosso/Qt.py
Qt.py
_convert
def _convert(lines): """Convert compiled .ui file from PySide2 to Qt.py Arguments: lines (list): Each line of of .ui file Usage: >> with open("myui.py") as f: .. lines = _convert(f.readlines()) """ def parse(line): line = line.replace("from PySide2 import", "from Qt import QtCompat,") line = line.replace("QtWidgets.QApplication.translate", "QtCompat.translate") if "QtCore.SIGNAL" in line: raise NotImplementedError("QtCore.SIGNAL is missing from PyQt5 " "and so Qt.py does not support it: you " "should avoid defining signals inside " "your ui files.") return line parsed = list() for line in lines: line = parse(line) parsed.append(line) return parsed
python
def _convert(lines): """Convert compiled .ui file from PySide2 to Qt.py Arguments: lines (list): Each line of of .ui file Usage: >> with open("myui.py") as f: .. lines = _convert(f.readlines()) """ def parse(line): line = line.replace("from PySide2 import", "from Qt import QtCompat,") line = line.replace("QtWidgets.QApplication.translate", "QtCompat.translate") if "QtCore.SIGNAL" in line: raise NotImplementedError("QtCore.SIGNAL is missing from PyQt5 " "and so Qt.py does not support it: you " "should avoid defining signals inside " "your ui files.") return line parsed = list() for line in lines: line = parse(line) parsed.append(line) return parsed
[ "def", "_convert", "(", "lines", ")", ":", "def", "parse", "(", "line", ")", ":", "line", "=", "line", ".", "replace", "(", "\"from PySide2 import\"", ",", "\"from Qt import QtCompat,\"", ")", "line", "=", "line", ".", "replace", "(", "\"QtWidgets.QApplication...
Convert compiled .ui file from PySide2 to Qt.py Arguments: lines (list): Each line of of .ui file Usage: >> with open("myui.py") as f: .. lines = _convert(f.readlines())
[ "Convert", "compiled", ".", "ui", "file", "from", "PySide2", "to", "Qt", ".", "py" ]
d88a0c1762ad90d1965008cc14c53504bbcc0061
https://github.com/mottosso/Qt.py/blob/d88a0c1762ad90d1965008cc14c53504bbcc0061/Qt.py#L1595-L1623
train
224,346
mottosso/Qt.py
examples/QtSiteConfig/QtSiteConfig.py
update_compatibility_decorators
def update_compatibility_decorators(binding, decorators): """ This optional function is called by Qt.py to modify the decorators applied to QtCompat namespace objects. Arguments: binding (str): The Qt binding being wrapped by Qt.py decorators (dict): Maps specific decorator functions to QtCompat namespace methods. See Qt._build_compatibility_members for more info. """ def _widgetDecorator(some_function): def wrapper(*args, **kwargs): ret = some_function(*args, **kwargs) # Modifies the returned value so we can test that the # decorator works. return "Test: {}".format(ret) # preserve docstring and name of original function wrapper.__doc__ = some_function.__doc__ wrapper.__name__ = some_function.__name__ return wrapper # Assign a different decorator for the same method name on each class def _mainWindowDecorator(some_function): def wrapper(*args, **kwargs): ret = some_function(*args, **kwargs) # Modifies the returned value so we can test that the # decorator works. return "QMainWindow Test: {}".format(ret) # preserve docstring and name of original function wrapper.__doc__ = some_function.__doc__ wrapper.__name__ = some_function.__name__ return wrapper decorators.setdefault("QWidget", {})["windowTitleDecorator"] = ( _widgetDecorator ) decorators.setdefault("QMainWindow", {})["windowTitleDecorator"] = ( _mainWindowDecorator )
python
def update_compatibility_decorators(binding, decorators): """ This optional function is called by Qt.py to modify the decorators applied to QtCompat namespace objects. Arguments: binding (str): The Qt binding being wrapped by Qt.py decorators (dict): Maps specific decorator functions to QtCompat namespace methods. See Qt._build_compatibility_members for more info. """ def _widgetDecorator(some_function): def wrapper(*args, **kwargs): ret = some_function(*args, **kwargs) # Modifies the returned value so we can test that the # decorator works. return "Test: {}".format(ret) # preserve docstring and name of original function wrapper.__doc__ = some_function.__doc__ wrapper.__name__ = some_function.__name__ return wrapper # Assign a different decorator for the same method name on each class def _mainWindowDecorator(some_function): def wrapper(*args, **kwargs): ret = some_function(*args, **kwargs) # Modifies the returned value so we can test that the # decorator works. return "QMainWindow Test: {}".format(ret) # preserve docstring and name of original function wrapper.__doc__ = some_function.__doc__ wrapper.__name__ = some_function.__name__ return wrapper decorators.setdefault("QWidget", {})["windowTitleDecorator"] = ( _widgetDecorator ) decorators.setdefault("QMainWindow", {})["windowTitleDecorator"] = ( _mainWindowDecorator )
[ "def", "update_compatibility_decorators", "(", "binding", ",", "decorators", ")", ":", "def", "_widgetDecorator", "(", "some_function", ")", ":", "def", "wrapper", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "ret", "=", "some_function", "(", "*", ...
This optional function is called by Qt.py to modify the decorators applied to QtCompat namespace objects. Arguments: binding (str): The Qt binding being wrapped by Qt.py decorators (dict): Maps specific decorator functions to QtCompat namespace methods. See Qt._build_compatibility_members for more info.
[ "This", "optional", "function", "is", "called", "by", "Qt", ".", "py", "to", "modify", "the", "decorators", "applied", "to", "QtCompat", "namespace", "objects", "." ]
d88a0c1762ad90d1965008cc14c53504bbcc0061
https://github.com/mottosso/Qt.py/blob/d88a0c1762ad90d1965008cc14c53504bbcc0061/examples/QtSiteConfig/QtSiteConfig.py#L53-L91
train
224,347
mottosso/Qt.py
examples/loadUi/baseinstance2.py
load_ui_type
def load_ui_type(uifile): """Pyside equivalent for the loadUiType function in PyQt. From the PyQt4 documentation: Load a Qt Designer .ui file and return a tuple of the generated form class and the Qt base class. These can then be used to create any number of instances of the user interface without having to parse the .ui file more than once. Note: Pyside lacks the "loadUiType" command, so we have to convert the ui file to py code in-memory first and then execute it in a special frame to retrieve the form_class. Args: uifile (str): Absolute path to .ui file Returns: tuple: the generated form class, the Qt base class """ import pysideuic import xml.etree.ElementTree as ElementTree from cStringIO import StringIO parsed = ElementTree.parse(uifile) widget_class = parsed.find('widget').get('class') form_class = parsed.find('class').text with open(uifile, 'r') as f: o = StringIO() frame = {} pysideuic.compileUi(f, o, indent=0) pyc = compile(o.getvalue(), '<string>', 'exec') exec(pyc) in frame # Fetch the base_class and form class based on their type in # the xml from designer form_class = frame['Ui_%s' % form_class] base_class = eval('QtWidgets.%s' % widget_class) return form_class, base_class
python
def load_ui_type(uifile): """Pyside equivalent for the loadUiType function in PyQt. From the PyQt4 documentation: Load a Qt Designer .ui file and return a tuple of the generated form class and the Qt base class. These can then be used to create any number of instances of the user interface without having to parse the .ui file more than once. Note: Pyside lacks the "loadUiType" command, so we have to convert the ui file to py code in-memory first and then execute it in a special frame to retrieve the form_class. Args: uifile (str): Absolute path to .ui file Returns: tuple: the generated form class, the Qt base class """ import pysideuic import xml.etree.ElementTree as ElementTree from cStringIO import StringIO parsed = ElementTree.parse(uifile) widget_class = parsed.find('widget').get('class') form_class = parsed.find('class').text with open(uifile, 'r') as f: o = StringIO() frame = {} pysideuic.compileUi(f, o, indent=0) pyc = compile(o.getvalue(), '<string>', 'exec') exec(pyc) in frame # Fetch the base_class and form class based on their type in # the xml from designer form_class = frame['Ui_%s' % form_class] base_class = eval('QtWidgets.%s' % widget_class) return form_class, base_class
[ "def", "load_ui_type", "(", "uifile", ")", ":", "import", "pysideuic", "import", "xml", ".", "etree", ".", "ElementTree", "as", "ElementTree", "from", "cStringIO", "import", "StringIO", "parsed", "=", "ElementTree", ".", "parse", "(", "uifile", ")", "widget_cl...
Pyside equivalent for the loadUiType function in PyQt. From the PyQt4 documentation: Load a Qt Designer .ui file and return a tuple of the generated form class and the Qt base class. These can then be used to create any number of instances of the user interface without having to parse the .ui file more than once. Note: Pyside lacks the "loadUiType" command, so we have to convert the ui file to py code in-memory first and then execute it in a special frame to retrieve the form_class. Args: uifile (str): Absolute path to .ui file Returns: tuple: the generated form class, the Qt base class
[ "Pyside", "equivalent", "for", "the", "loadUiType", "function", "in", "PyQt", "." ]
d88a0c1762ad90d1965008cc14c53504bbcc0061
https://github.com/mottosso/Qt.py/blob/d88a0c1762ad90d1965008cc14c53504bbcc0061/examples/loadUi/baseinstance2.py#L10-L51
train
224,348
mottosso/Qt.py
examples/loadUi/baseinstance2.py
pyside_load_ui
def pyside_load_ui(uifile, base_instance=None): """Provide PyQt4.uic.loadUi functionality to PySide Args: uifile (str): Absolute path to .ui file base_instance (QWidget): The widget into which UI widgets are loaded Note: pysideuic is required for this to work with PySide. This seems to work correctly in Maya as well as outside of it as opposed to other implementations which involve overriding QUiLoader. Returns: QWidget: the base instance """ form_class, base_class = load_ui_type(uifile) if not base_instance: typeName = form_class.__name__ finalType = type(typeName, (form_class, base_class), {}) base_instance = finalType() else: if not isinstance(base_instance, base_class): raise RuntimeError( 'The base_instance passed to loadUi does not inherit from' ' needed base type (%s)' % type(base_class)) typeName = type(base_instance).__name__ base_instance.__class__ = type(typeName, (form_class, type(base_instance)), {}) base_instance.setupUi(base_instance) return base_instance
python
def pyside_load_ui(uifile, base_instance=None): """Provide PyQt4.uic.loadUi functionality to PySide Args: uifile (str): Absolute path to .ui file base_instance (QWidget): The widget into which UI widgets are loaded Note: pysideuic is required for this to work with PySide. This seems to work correctly in Maya as well as outside of it as opposed to other implementations which involve overriding QUiLoader. Returns: QWidget: the base instance """ form_class, base_class = load_ui_type(uifile) if not base_instance: typeName = form_class.__name__ finalType = type(typeName, (form_class, base_class), {}) base_instance = finalType() else: if not isinstance(base_instance, base_class): raise RuntimeError( 'The base_instance passed to loadUi does not inherit from' ' needed base type (%s)' % type(base_class)) typeName = type(base_instance).__name__ base_instance.__class__ = type(typeName, (form_class, type(base_instance)), {}) base_instance.setupUi(base_instance) return base_instance
[ "def", "pyside_load_ui", "(", "uifile", ",", "base_instance", "=", "None", ")", ":", "form_class", ",", "base_class", "=", "load_ui_type", "(", "uifile", ")", "if", "not", "base_instance", ":", "typeName", "=", "form_class", ".", "__name__", "finalType", "=", ...
Provide PyQt4.uic.loadUi functionality to PySide Args: uifile (str): Absolute path to .ui file base_instance (QWidget): The widget into which UI widgets are loaded Note: pysideuic is required for this to work with PySide. This seems to work correctly in Maya as well as outside of it as opposed to other implementations which involve overriding QUiLoader. Returns: QWidget: the base instance
[ "Provide", "PyQt4", ".", "uic", ".", "loadUi", "functionality", "to", "PySide" ]
d88a0c1762ad90d1965008cc14c53504bbcc0061
https://github.com/mottosso/Qt.py/blob/d88a0c1762ad90d1965008cc14c53504bbcc0061/examples/loadUi/baseinstance2.py#L54-L89
train
224,349
Azure/azure-cosmos-python
samples/IndexManagement/Program.py
ExplicitlyExcludeFromIndex
def ExplicitlyExcludeFromIndex(client, database_id): """ The default index policy on a DocumentContainer will AUTOMATICALLY index ALL documents added. There may be scenarios where you want to exclude a specific doc from the index even though all other documents are being indexed automatically. This method demonstrates how to use an index directive to control this """ try: DeleteContainerIfExists(client, database_id, COLLECTION_ID) database_link = GetDatabaseLink(database_id) # collections = Query_Entities(client, 'collection', parent_link = database_link) # print(collections) # Create a collection with default index policy (i.e. automatic = true) created_Container = client.CreateContainer(database_link, {"id" : COLLECTION_ID}) print(created_Container) print("\n" + "-" * 25 + "\n1. Collection created with index policy") print_dictionary_items(created_Container["indexingPolicy"]) # Create a document and query on it immediately. # Will work as automatic indexing is still True collection_link = GetContainerLink(database_id, COLLECTION_ID) doc = client.CreateItem(collection_link, { "id" : "doc1", "orderId" : "order1" }) print("\n" + "-" * 25 + "Document doc1 created with order1" + "-" * 25) print(doc) query = { "query": "SELECT * FROM r WHERE r.orderId=@orderNo", "parameters": [ { "name":"@orderNo", "value": "order1" } ] } QueryDocumentsWithCustomQuery(client, collection_link, query) # Now, create a document but this time explictly exclude it from the collection using IndexingDirective # Then query for that document # Shoud NOT find it, because we excluded it from the index # BUT, the document is there and doing a ReadDocument by Id will prove it doc2 = client.CreateItem(collection_link, { "id" : "doc2", "orderId" : "order2" }, {'indexingDirective' : documents.IndexingDirective.Exclude}) print("\n" + "-" * 25 + "Document doc2 created with order2" + "-" * 25) print(doc2) query = { "query": "SELECT * FROM r WHERE r.orderId=@orderNo", "parameters": [ { "name":"@orderNo", "value": "order2" } ] } QueryDocumentsWithCustomQuery(client, collection_link, query) docRead = client.ReadItem(GetDocumentLink(database_id, COLLECTION_ID, "doc2")) print("Document read by ID: \n", docRead["id"]) # Cleanup client.DeleteContainer(collection_link) print("\n") except errors.HTTPFailure as e: if e.status_code == 409: print("Entity already exists") elif e.status_code == 404: print("Entity doesn't exist") else: raise
python
def ExplicitlyExcludeFromIndex(client, database_id): """ The default index policy on a DocumentContainer will AUTOMATICALLY index ALL documents added. There may be scenarios where you want to exclude a specific doc from the index even though all other documents are being indexed automatically. This method demonstrates how to use an index directive to control this """ try: DeleteContainerIfExists(client, database_id, COLLECTION_ID) database_link = GetDatabaseLink(database_id) # collections = Query_Entities(client, 'collection', parent_link = database_link) # print(collections) # Create a collection with default index policy (i.e. automatic = true) created_Container = client.CreateContainer(database_link, {"id" : COLLECTION_ID}) print(created_Container) print("\n" + "-" * 25 + "\n1. Collection created with index policy") print_dictionary_items(created_Container["indexingPolicy"]) # Create a document and query on it immediately. # Will work as automatic indexing is still True collection_link = GetContainerLink(database_id, COLLECTION_ID) doc = client.CreateItem(collection_link, { "id" : "doc1", "orderId" : "order1" }) print("\n" + "-" * 25 + "Document doc1 created with order1" + "-" * 25) print(doc) query = { "query": "SELECT * FROM r WHERE r.orderId=@orderNo", "parameters": [ { "name":"@orderNo", "value": "order1" } ] } QueryDocumentsWithCustomQuery(client, collection_link, query) # Now, create a document but this time explictly exclude it from the collection using IndexingDirective # Then query for that document # Shoud NOT find it, because we excluded it from the index # BUT, the document is there and doing a ReadDocument by Id will prove it doc2 = client.CreateItem(collection_link, { "id" : "doc2", "orderId" : "order2" }, {'indexingDirective' : documents.IndexingDirective.Exclude}) print("\n" + "-" * 25 + "Document doc2 created with order2" + "-" * 25) print(doc2) query = { "query": "SELECT * FROM r WHERE r.orderId=@orderNo", "parameters": [ { "name":"@orderNo", "value": "order2" } ] } QueryDocumentsWithCustomQuery(client, collection_link, query) docRead = client.ReadItem(GetDocumentLink(database_id, COLLECTION_ID, "doc2")) print("Document read by ID: \n", docRead["id"]) # Cleanup client.DeleteContainer(collection_link) print("\n") except errors.HTTPFailure as e: if e.status_code == 409: print("Entity already exists") elif e.status_code == 404: print("Entity doesn't exist") else: raise
[ "def", "ExplicitlyExcludeFromIndex", "(", "client", ",", "database_id", ")", ":", "try", ":", "DeleteContainerIfExists", "(", "client", ",", "database_id", ",", "COLLECTION_ID", ")", "database_link", "=", "GetDatabaseLink", "(", "database_id", ")", "# collections = Qu...
The default index policy on a DocumentContainer will AUTOMATICALLY index ALL documents added. There may be scenarios where you want to exclude a specific doc from the index even though all other documents are being indexed automatically. This method demonstrates how to use an index directive to control this
[ "The", "default", "index", "policy", "on", "a", "DocumentContainer", "will", "AUTOMATICALLY", "index", "ALL", "documents", "added", ".", "There", "may", "be", "scenarios", "where", "you", "want", "to", "exclude", "a", "specific", "doc", "from", "the", "index",...
dd01b3c5d308c6da83cfcaa0ab7083351a476353
https://github.com/Azure/azure-cosmos-python/blob/dd01b3c5d308c6da83cfcaa0ab7083351a476353/samples/IndexManagement/Program.py#L171-L231
train
224,350
Azure/azure-cosmos-python
samples/IndexManagement/Program.py
ExcludePathsFromIndex
def ExcludePathsFromIndex(client, database_id): """The default behavior is for Cosmos to index every attribute in every document automatically. There are times when a document contains large amounts of information, in deeply nested structures that you know you will never search on. In extreme cases like this, you can exclude paths from the index to save on storage cost, improve write performance and also improve read performance because the index is smaller This method demonstrates how to set excludedPaths within indexingPolicy """ try: DeleteContainerIfExists(client, database_id, COLLECTION_ID) database_link = GetDatabaseLink(database_id) # collections = Query_Entities(client, 'collection', parent_link = database_link) # print(collections) doc_with_nested_structures = { "id" : "doc1", "foo" : "bar", "metaData" : "meta", "subDoc" : { "searchable" : "searchable", "nonSearchable" : "value" }, "excludedNode" : { "subExcluded" : "something", "subExcludedNode" : { "someProperty" : "value" } } } collection_to_create = { "id" : COLLECTION_ID , "indexingPolicy" : { "includedPaths" : [ {'path' : "/*"} ], # Special mandatory path of "/*" required to denote include entire tree "excludedPaths" : [ {'path' : "/metaData/*"}, # exclude metaData node, and anything under it {'path' : "/subDoc/nonSearchable/*"}, # exclude ONLY a part of subDoc {'path' : "/\"excludedNode\"/*"} # exclude excludedNode node, and anything under it ] } } print(collection_to_create) print(doc_with_nested_structures) # Create a collection with the defined properties # The effect of the above IndexingPolicy is that only id, foo, and the subDoc/searchable are indexed created_Container = client.CreateContainer(database_link, collection_to_create) print(created_Container) print("\n" + "-" * 25 + "\n4. Collection created with index policy") print_dictionary_items(created_Container["indexingPolicy"]) # The effect of the above IndexingPolicy is that only id, foo, and the subDoc/searchable are indexed collection_link = GetContainerLink(database_id, COLLECTION_ID) doc = client.CreateItem(collection_link, doc_with_nested_structures) print("\n" + "-" * 25 + "Document doc1 created with nested structures" + "-" * 25) print(doc) # Querying for a document on either metaData or /subDoc/subSubDoc/someProperty > fail because these paths were excluded and they raise a BadRequest(400) Exception query = {"query": "SELECT * FROM r WHERE r.metaData=@desiredValue", "parameters" : [{ "name":"@desiredValue", "value": "meta" }]} QueryDocumentsWithCustomQuery(client, collection_link, query) query = {"query": "SELECT * FROM r WHERE r.subDoc.nonSearchable=@desiredValue", "parameters" : [{ "name":"@desiredValue", "value": "value" }]} QueryDocumentsWithCustomQuery(client, collection_link, query) query = {"query": "SELECT * FROM r WHERE r.excludedNode.subExcludedNode.someProperty=@desiredValue", "parameters" : [{ "name":"@desiredValue", "value": "value" }]} QueryDocumentsWithCustomQuery(client, collection_link, query) # Querying for a document using foo, or even subDoc/searchable > succeed because they were not excluded query = {"query": "SELECT * FROM r WHERE r.foo=@desiredValue", "parameters" : [{ "name":"@desiredValue", "value": "bar" }]} QueryDocumentsWithCustomQuery(client, collection_link, query) query = {"query": "SELECT * FROM r WHERE r.subDoc.searchable=@desiredValue", "parameters" : [{ "name":"@desiredValue", "value": "searchable" }]} QueryDocumentsWithCustomQuery(client, collection_link, query) # Cleanup client.DeleteContainer(collection_link) print("\n") except errors.HTTPFailure as e: if e.status_code == 409: print("Entity already exists") elif e.status_code == 404: print("Entity doesn't exist") else: raise
python
def ExcludePathsFromIndex(client, database_id): """The default behavior is for Cosmos to index every attribute in every document automatically. There are times when a document contains large amounts of information, in deeply nested structures that you know you will never search on. In extreme cases like this, you can exclude paths from the index to save on storage cost, improve write performance and also improve read performance because the index is smaller This method demonstrates how to set excludedPaths within indexingPolicy """ try: DeleteContainerIfExists(client, database_id, COLLECTION_ID) database_link = GetDatabaseLink(database_id) # collections = Query_Entities(client, 'collection', parent_link = database_link) # print(collections) doc_with_nested_structures = { "id" : "doc1", "foo" : "bar", "metaData" : "meta", "subDoc" : { "searchable" : "searchable", "nonSearchable" : "value" }, "excludedNode" : { "subExcluded" : "something", "subExcludedNode" : { "someProperty" : "value" } } } collection_to_create = { "id" : COLLECTION_ID , "indexingPolicy" : { "includedPaths" : [ {'path' : "/*"} ], # Special mandatory path of "/*" required to denote include entire tree "excludedPaths" : [ {'path' : "/metaData/*"}, # exclude metaData node, and anything under it {'path' : "/subDoc/nonSearchable/*"}, # exclude ONLY a part of subDoc {'path' : "/\"excludedNode\"/*"} # exclude excludedNode node, and anything under it ] } } print(collection_to_create) print(doc_with_nested_structures) # Create a collection with the defined properties # The effect of the above IndexingPolicy is that only id, foo, and the subDoc/searchable are indexed created_Container = client.CreateContainer(database_link, collection_to_create) print(created_Container) print("\n" + "-" * 25 + "\n4. Collection created with index policy") print_dictionary_items(created_Container["indexingPolicy"]) # The effect of the above IndexingPolicy is that only id, foo, and the subDoc/searchable are indexed collection_link = GetContainerLink(database_id, COLLECTION_ID) doc = client.CreateItem(collection_link, doc_with_nested_structures) print("\n" + "-" * 25 + "Document doc1 created with nested structures" + "-" * 25) print(doc) # Querying for a document on either metaData or /subDoc/subSubDoc/someProperty > fail because these paths were excluded and they raise a BadRequest(400) Exception query = {"query": "SELECT * FROM r WHERE r.metaData=@desiredValue", "parameters" : [{ "name":"@desiredValue", "value": "meta" }]} QueryDocumentsWithCustomQuery(client, collection_link, query) query = {"query": "SELECT * FROM r WHERE r.subDoc.nonSearchable=@desiredValue", "parameters" : [{ "name":"@desiredValue", "value": "value" }]} QueryDocumentsWithCustomQuery(client, collection_link, query) query = {"query": "SELECT * FROM r WHERE r.excludedNode.subExcludedNode.someProperty=@desiredValue", "parameters" : [{ "name":"@desiredValue", "value": "value" }]} QueryDocumentsWithCustomQuery(client, collection_link, query) # Querying for a document using foo, or even subDoc/searchable > succeed because they were not excluded query = {"query": "SELECT * FROM r WHERE r.foo=@desiredValue", "parameters" : [{ "name":"@desiredValue", "value": "bar" }]} QueryDocumentsWithCustomQuery(client, collection_link, query) query = {"query": "SELECT * FROM r WHERE r.subDoc.searchable=@desiredValue", "parameters" : [{ "name":"@desiredValue", "value": "searchable" }]} QueryDocumentsWithCustomQuery(client, collection_link, query) # Cleanup client.DeleteContainer(collection_link) print("\n") except errors.HTTPFailure as e: if e.status_code == 409: print("Entity already exists") elif e.status_code == 404: print("Entity doesn't exist") else: raise
[ "def", "ExcludePathsFromIndex", "(", "client", ",", "database_id", ")", ":", "try", ":", "DeleteContainerIfExists", "(", "client", ",", "database_id", ",", "COLLECTION_ID", ")", "database_link", "=", "GetDatabaseLink", "(", "database_id", ")", "# collections = Query_E...
The default behavior is for Cosmos to index every attribute in every document automatically. There are times when a document contains large amounts of information, in deeply nested structures that you know you will never search on. In extreme cases like this, you can exclude paths from the index to save on storage cost, improve write performance and also improve read performance because the index is smaller This method demonstrates how to set excludedPaths within indexingPolicy
[ "The", "default", "behavior", "is", "for", "Cosmos", "to", "index", "every", "attribute", "in", "every", "document", "automatically", ".", "There", "are", "times", "when", "a", "document", "contains", "large", "amounts", "of", "information", "in", "deeply", "n...
dd01b3c5d308c6da83cfcaa0ab7083351a476353
https://github.com/Azure/azure-cosmos-python/blob/dd01b3c5d308c6da83cfcaa0ab7083351a476353/samples/IndexManagement/Program.py#L294-L367
train
224,351
Azure/azure-cosmos-python
samples/IndexManagement/Program.py
UseRangeIndexesOnStrings
def UseRangeIndexesOnStrings(client, database_id): """Showing how range queries can be performed even on strings. """ try: DeleteContainerIfExists(client, database_id, COLLECTION_ID) database_link = GetDatabaseLink(database_id) # collections = Query_Entities(client, 'collection', parent_link = database_link) # print(collections) # Use range indexes on strings # This is how you can specify a range index on strings (and numbers) for all properties. # This is the recommended indexing policy for collections. i.e. precision -1 #indexingPolicy = { # 'indexingPolicy': { # 'includedPaths': [ # { # 'indexes': [ # { # 'kind': documents.IndexKind.Range, # 'dataType': documents.DataType.String, # 'precision': -1 # } # ] # } # ] # } #} # For demo purposes, we are going to use the default (range on numbers, hash on strings) for the whole document (/* ) # and just include a range index on strings for the "region". collection_definition = { 'id': COLLECTION_ID, 'indexingPolicy': { 'includedPaths': [ { 'path': '/region/?', 'indexes': [ { 'kind': documents.IndexKind.Range, 'dataType': documents.DataType.String, 'precision': -1 } ] }, { 'path': '/*' } ] } } created_Container = client.CreateContainer(database_link, collection_definition) print(created_Container) print("\n" + "-" * 25 + "\n6. Collection created with index policy") print_dictionary_items(created_Container["indexingPolicy"]) collection_link = GetContainerLink(database_id, COLLECTION_ID) client.CreateItem(collection_link, { "id" : "doc1", "region" : "USA" }) client.CreateItem(collection_link, { "id" : "doc2", "region" : "UK" }) client.CreateItem(collection_link, { "id" : "doc3", "region" : "Armenia" }) client.CreateItem(collection_link, { "id" : "doc4", "region" : "Egypt" }) # Now ordering against region is allowed. You can run the following query query = { "query" : "SELECT * FROM r ORDER BY r.region" } message = "Documents ordered by region" QueryDocumentsWithCustomQuery(client, collection_link, query, message) # You can also perform filters against string comparison like >= 'UK'. Note that you can perform a prefix query, # the equivalent of LIKE 'U%' (is >= 'U' AND < 'U') query = { "query" : "SELECT * FROM r WHERE r.region >= 'U'" } message = "Documents with region begining with U" QueryDocumentsWithCustomQuery(client, collection_link, query, message) # Cleanup client.DeleteContainer(collection_link) print("\n") except errors.HTTPFailure as e: if e.status_code == 409: print("Entity already exists") elif e.status_code == 404: print("Entity doesn't exist") else: raise
python
def UseRangeIndexesOnStrings(client, database_id): """Showing how range queries can be performed even on strings. """ try: DeleteContainerIfExists(client, database_id, COLLECTION_ID) database_link = GetDatabaseLink(database_id) # collections = Query_Entities(client, 'collection', parent_link = database_link) # print(collections) # Use range indexes on strings # This is how you can specify a range index on strings (and numbers) for all properties. # This is the recommended indexing policy for collections. i.e. precision -1 #indexingPolicy = { # 'indexingPolicy': { # 'includedPaths': [ # { # 'indexes': [ # { # 'kind': documents.IndexKind.Range, # 'dataType': documents.DataType.String, # 'precision': -1 # } # ] # } # ] # } #} # For demo purposes, we are going to use the default (range on numbers, hash on strings) for the whole document (/* ) # and just include a range index on strings for the "region". collection_definition = { 'id': COLLECTION_ID, 'indexingPolicy': { 'includedPaths': [ { 'path': '/region/?', 'indexes': [ { 'kind': documents.IndexKind.Range, 'dataType': documents.DataType.String, 'precision': -1 } ] }, { 'path': '/*' } ] } } created_Container = client.CreateContainer(database_link, collection_definition) print(created_Container) print("\n" + "-" * 25 + "\n6. Collection created with index policy") print_dictionary_items(created_Container["indexingPolicy"]) collection_link = GetContainerLink(database_id, COLLECTION_ID) client.CreateItem(collection_link, { "id" : "doc1", "region" : "USA" }) client.CreateItem(collection_link, { "id" : "doc2", "region" : "UK" }) client.CreateItem(collection_link, { "id" : "doc3", "region" : "Armenia" }) client.CreateItem(collection_link, { "id" : "doc4", "region" : "Egypt" }) # Now ordering against region is allowed. You can run the following query query = { "query" : "SELECT * FROM r ORDER BY r.region" } message = "Documents ordered by region" QueryDocumentsWithCustomQuery(client, collection_link, query, message) # You can also perform filters against string comparison like >= 'UK'. Note that you can perform a prefix query, # the equivalent of LIKE 'U%' (is >= 'U' AND < 'U') query = { "query" : "SELECT * FROM r WHERE r.region >= 'U'" } message = "Documents with region begining with U" QueryDocumentsWithCustomQuery(client, collection_link, query, message) # Cleanup client.DeleteContainer(collection_link) print("\n") except errors.HTTPFailure as e: if e.status_code == 409: print("Entity already exists") elif e.status_code == 404: print("Entity doesn't exist") else: raise
[ "def", "UseRangeIndexesOnStrings", "(", "client", ",", "database_id", ")", ":", "try", ":", "DeleteContainerIfExists", "(", "client", ",", "database_id", ",", "COLLECTION_ID", ")", "database_link", "=", "GetDatabaseLink", "(", "database_id", ")", "# collections = Quer...
Showing how range queries can be performed even on strings.
[ "Showing", "how", "range", "queries", "can", "be", "performed", "even", "on", "strings", "." ]
dd01b3c5d308c6da83cfcaa0ab7083351a476353
https://github.com/Azure/azure-cosmos-python/blob/dd01b3c5d308c6da83cfcaa0ab7083351a476353/samples/IndexManagement/Program.py#L428-L512
train
224,352
Azure/azure-cosmos-python
azure/cosmos/range_partition_resolver.py
RangePartitionResolver.ResolveForCreate
def ResolveForCreate(self, document): """Resolves the collection for creating the document based on the partition key. :param dict document: The document to be created. :return: Collection Self link or Name based link which should handle the Create operation. :rtype: str """ if document is None: raise ValueError("document is None.") partition_key = self.partition_key_extractor(document) containing_range = self._GetContainingRange(partition_key) if containing_range is None: raise ValueError("A containing range for " + str(partition_key) + " doesn't exist in the partition map.") return self.partition_map.get(containing_range)
python
def ResolveForCreate(self, document): """Resolves the collection for creating the document based on the partition key. :param dict document: The document to be created. :return: Collection Self link or Name based link which should handle the Create operation. :rtype: str """ if document is None: raise ValueError("document is None.") partition_key = self.partition_key_extractor(document) containing_range = self._GetContainingRange(partition_key) if containing_range is None: raise ValueError("A containing range for " + str(partition_key) + " doesn't exist in the partition map.") return self.partition_map.get(containing_range)
[ "def", "ResolveForCreate", "(", "self", ",", "document", ")", ":", "if", "document", "is", "None", ":", "raise", "ValueError", "(", "\"document is None.\"", ")", "partition_key", "=", "self", ".", "partition_key_extractor", "(", "document", ")", "containing_range"...
Resolves the collection for creating the document based on the partition key. :param dict document: The document to be created. :return: Collection Self link or Name based link which should handle the Create operation. :rtype: str
[ "Resolves", "the", "collection", "for", "creating", "the", "document", "based", "on", "the", "partition", "key", "." ]
dd01b3c5d308c6da83cfcaa0ab7083351a476353
https://github.com/Azure/azure-cosmos-python/blob/dd01b3c5d308c6da83cfcaa0ab7083351a476353/azure/cosmos/range_partition_resolver.py#L46-L66
train
224,353
Azure/azure-cosmos-python
azure/cosmos/range_partition_resolver.py
RangePartitionResolver._GetContainingRange
def _GetContainingRange(self, partition_key): """Gets the containing range based on the partition key. """ for keyrange in self.partition_map.keys(): if keyrange.Contains(partition_key): return keyrange return None
python
def _GetContainingRange(self, partition_key): """Gets the containing range based on the partition key. """ for keyrange in self.partition_map.keys(): if keyrange.Contains(partition_key): return keyrange return None
[ "def", "_GetContainingRange", "(", "self", ",", "partition_key", ")", ":", "for", "keyrange", "in", "self", ".", "partition_map", ".", "keys", "(", ")", ":", "if", "keyrange", ".", "Contains", "(", "partition_key", ")", ":", "return", "keyrange", "return", ...
Gets the containing range based on the partition key.
[ "Gets", "the", "containing", "range", "based", "on", "the", "partition", "key", "." ]
dd01b3c5d308c6da83cfcaa0ab7083351a476353
https://github.com/Azure/azure-cosmos-python/blob/dd01b3c5d308c6da83cfcaa0ab7083351a476353/azure/cosmos/range_partition_resolver.py#L87-L94
train
224,354
Azure/azure-cosmos-python
azure/cosmos/range_partition_resolver.py
RangePartitionResolver._GetIntersectingRanges
def _GetIntersectingRanges(self, partition_key): """Gets the intersecting ranges based on the partition key. """ partitionkey_ranges = set() intersecting_ranges = set() if partition_key is None: return list(self.partition_map.keys()) if isinstance(partition_key, prange.Range): partitionkey_ranges.add(partition_key) elif isinstance(partition_key, list): for key in partition_key: if key is None: return list(self.partition_map.keys()) elif isinstance(key, prange.Range): partitionkey_ranges.add(key) else: partitionkey_ranges.add(prange.Range(key, key)) else: partitionkey_ranges.add(prange.Range(partition_key, partition_key)) for partitionKeyRange in partitionkey_ranges: for keyrange in self.partition_map.keys(): if keyrange.Intersect(partitionKeyRange): intersecting_ranges.add(keyrange) return intersecting_ranges
python
def _GetIntersectingRanges(self, partition_key): """Gets the intersecting ranges based on the partition key. """ partitionkey_ranges = set() intersecting_ranges = set() if partition_key is None: return list(self.partition_map.keys()) if isinstance(partition_key, prange.Range): partitionkey_ranges.add(partition_key) elif isinstance(partition_key, list): for key in partition_key: if key is None: return list(self.partition_map.keys()) elif isinstance(key, prange.Range): partitionkey_ranges.add(key) else: partitionkey_ranges.add(prange.Range(key, key)) else: partitionkey_ranges.add(prange.Range(partition_key, partition_key)) for partitionKeyRange in partitionkey_ranges: for keyrange in self.partition_map.keys(): if keyrange.Intersect(partitionKeyRange): intersecting_ranges.add(keyrange) return intersecting_ranges
[ "def", "_GetIntersectingRanges", "(", "self", ",", "partition_key", ")", ":", "partitionkey_ranges", "=", "set", "(", ")", "intersecting_ranges", "=", "set", "(", ")", "if", "partition_key", "is", "None", ":", "return", "list", "(", "self", ".", "partition_map...
Gets the intersecting ranges based on the partition key.
[ "Gets", "the", "intersecting", "ranges", "based", "on", "the", "partition", "key", "." ]
dd01b3c5d308c6da83cfcaa0ab7083351a476353
https://github.com/Azure/azure-cosmos-python/blob/dd01b3c5d308c6da83cfcaa0ab7083351a476353/azure/cosmos/range_partition_resolver.py#L96-L123
train
224,355
Azure/azure-cosmos-python
azure/cosmos/query_iterable.py
QueryIterable._create_execution_context
def _create_execution_context(self): """instantiates the internal query execution context based. """ if hasattr(self, '_database_link'): # client side partitioning query return base_execution_context._MultiCollectionQueryExecutionContext(self._client, self._options, self._database_link, self._query, self._partition_key) else: # return execution_dispatcher._ProxyQueryExecutionContext(self._client, self._collection_link, self._query, self._options, self._fetch_function)
python
def _create_execution_context(self): """instantiates the internal query execution context based. """ if hasattr(self, '_database_link'): # client side partitioning query return base_execution_context._MultiCollectionQueryExecutionContext(self._client, self._options, self._database_link, self._query, self._partition_key) else: # return execution_dispatcher._ProxyQueryExecutionContext(self._client, self._collection_link, self._query, self._options, self._fetch_function)
[ "def", "_create_execution_context", "(", "self", ")", ":", "if", "hasattr", "(", "self", ",", "'_database_link'", ")", ":", "# client side partitioning query", "return", "base_execution_context", ".", "_MultiCollectionQueryExecutionContext", "(", "self", ".", "_client", ...
instantiates the internal query execution context based.
[ "instantiates", "the", "internal", "query", "execution", "context", "based", "." ]
dd01b3c5d308c6da83cfcaa0ab7083351a476353
https://github.com/Azure/azure-cosmos-python/blob/dd01b3c5d308c6da83cfcaa0ab7083351a476353/azure/cosmos/query_iterable.py#L87-L95
train
224,356
Azure/azure-cosmos-python
azure/cosmos/retry_utility.py
_Execute
def _Execute(client, global_endpoint_manager, function, *args, **kwargs): """Exectutes the function with passed parameters applying all retry policies :param object client: Document client instance :param object global_endpoint_manager: Instance of _GlobalEndpointManager class :param function function: Function to be called wrapped with retries :param (non-keyworded, variable number of arguments list) *args: :param (keyworded, variable number of arguments list) **kwargs: """ # instantiate all retry policies here to be applied for each request execution endpointDiscovery_retry_policy = endpoint_discovery_retry_policy._EndpointDiscoveryRetryPolicy(client.connection_policy, global_endpoint_manager, *args) resourceThrottle_retry_policy = resource_throttle_retry_policy._ResourceThrottleRetryPolicy(client.connection_policy.RetryOptions.MaxRetryAttemptCount, client.connection_policy.RetryOptions.FixedRetryIntervalInMilliseconds, client.connection_policy.RetryOptions.MaxWaitTimeInSeconds) defaultRetry_policy = default_retry_policy._DefaultRetryPolicy(*args) sessionRetry_policy = session_retry_policy._SessionRetryPolicy(client.connection_policy.EnableEndpointDiscovery, global_endpoint_manager, *args) while True: try: if args: result = _ExecuteFunction(function, global_endpoint_manager, *args, **kwargs) else: result = _ExecuteFunction(function, *args, **kwargs) if not client.last_response_headers: client.last_response_headers = {} # setting the throttle related response headers before returning the result client.last_response_headers[HttpHeaders.ThrottleRetryCount] = resourceThrottle_retry_policy.current_retry_attempt_count client.last_response_headers[HttpHeaders.ThrottleRetryWaitTimeInMs] = resourceThrottle_retry_policy.cummulative_wait_time_in_milliseconds return result except errors.HTTPFailure as e: retry_policy = None if (e.status_code == StatusCodes.FORBIDDEN and e.sub_status == SubStatusCodes.WRITE_FORBIDDEN): retry_policy = endpointDiscovery_retry_policy elif e.status_code == StatusCodes.TOO_MANY_REQUESTS: retry_policy = resourceThrottle_retry_policy elif e.status_code == StatusCodes.NOT_FOUND and e.sub_status and e.sub_status == SubStatusCodes.READ_SESSION_NOTAVAILABLE: retry_policy = sessionRetry_policy else: retry_policy = defaultRetry_policy # If none of the retry policies applies or there is no retry needed, set the throttle related response hedaers and # re-throw the exception back # arg[0] is the request. It needs to be modified for write forbidden exception if not (retry_policy.ShouldRetry(e)): if not client.last_response_headers: client.last_response_headers = {} client.last_response_headers[HttpHeaders.ThrottleRetryCount] = resourceThrottle_retry_policy.current_retry_attempt_count client.last_response_headers[HttpHeaders.ThrottleRetryWaitTimeInMs] = resourceThrottle_retry_policy.cummulative_wait_time_in_milliseconds if len(args) > 0 and args[0].should_clear_session_token_on_session_read_failure: client.session.clear_session_token(client.last_response_headers) raise else: # Wait for retry_after_in_milliseconds time before the next retry time.sleep(retry_policy.retry_after_in_milliseconds / 1000.0)
python
def _Execute(client, global_endpoint_manager, function, *args, **kwargs): """Exectutes the function with passed parameters applying all retry policies :param object client: Document client instance :param object global_endpoint_manager: Instance of _GlobalEndpointManager class :param function function: Function to be called wrapped with retries :param (non-keyworded, variable number of arguments list) *args: :param (keyworded, variable number of arguments list) **kwargs: """ # instantiate all retry policies here to be applied for each request execution endpointDiscovery_retry_policy = endpoint_discovery_retry_policy._EndpointDiscoveryRetryPolicy(client.connection_policy, global_endpoint_manager, *args) resourceThrottle_retry_policy = resource_throttle_retry_policy._ResourceThrottleRetryPolicy(client.connection_policy.RetryOptions.MaxRetryAttemptCount, client.connection_policy.RetryOptions.FixedRetryIntervalInMilliseconds, client.connection_policy.RetryOptions.MaxWaitTimeInSeconds) defaultRetry_policy = default_retry_policy._DefaultRetryPolicy(*args) sessionRetry_policy = session_retry_policy._SessionRetryPolicy(client.connection_policy.EnableEndpointDiscovery, global_endpoint_manager, *args) while True: try: if args: result = _ExecuteFunction(function, global_endpoint_manager, *args, **kwargs) else: result = _ExecuteFunction(function, *args, **kwargs) if not client.last_response_headers: client.last_response_headers = {} # setting the throttle related response headers before returning the result client.last_response_headers[HttpHeaders.ThrottleRetryCount] = resourceThrottle_retry_policy.current_retry_attempt_count client.last_response_headers[HttpHeaders.ThrottleRetryWaitTimeInMs] = resourceThrottle_retry_policy.cummulative_wait_time_in_milliseconds return result except errors.HTTPFailure as e: retry_policy = None if (e.status_code == StatusCodes.FORBIDDEN and e.sub_status == SubStatusCodes.WRITE_FORBIDDEN): retry_policy = endpointDiscovery_retry_policy elif e.status_code == StatusCodes.TOO_MANY_REQUESTS: retry_policy = resourceThrottle_retry_policy elif e.status_code == StatusCodes.NOT_FOUND and e.sub_status and e.sub_status == SubStatusCodes.READ_SESSION_NOTAVAILABLE: retry_policy = sessionRetry_policy else: retry_policy = defaultRetry_policy # If none of the retry policies applies or there is no retry needed, set the throttle related response hedaers and # re-throw the exception back # arg[0] is the request. It needs to be modified for write forbidden exception if not (retry_policy.ShouldRetry(e)): if not client.last_response_headers: client.last_response_headers = {} client.last_response_headers[HttpHeaders.ThrottleRetryCount] = resourceThrottle_retry_policy.current_retry_attempt_count client.last_response_headers[HttpHeaders.ThrottleRetryWaitTimeInMs] = resourceThrottle_retry_policy.cummulative_wait_time_in_milliseconds if len(args) > 0 and args[0].should_clear_session_token_on_session_read_failure: client.session.clear_session_token(client.last_response_headers) raise else: # Wait for retry_after_in_milliseconds time before the next retry time.sleep(retry_policy.retry_after_in_milliseconds / 1000.0)
[ "def", "_Execute", "(", "client", ",", "global_endpoint_manager", ",", "function", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "# instantiate all retry policies here to be applied for each request execution", "endpointDiscovery_retry_policy", "=", "endpoint_discovery...
Exectutes the function with passed parameters applying all retry policies :param object client: Document client instance :param object global_endpoint_manager: Instance of _GlobalEndpointManager class :param function function: Function to be called wrapped with retries :param (non-keyworded, variable number of arguments list) *args: :param (keyworded, variable number of arguments list) **kwargs:
[ "Exectutes", "the", "function", "with", "passed", "parameters", "applying", "all", "retry", "policies" ]
dd01b3c5d308c6da83cfcaa0ab7083351a476353
https://github.com/Azure/azure-cosmos-python/blob/dd01b3c5d308c6da83cfcaa0ab7083351a476353/azure/cosmos/retry_utility.py#L34-L95
train
224,357
Azure/azure-cosmos-python
azure/cosmos/global_endpoint_manager.py
_GlobalEndpointManager._GetDatabaseAccount
def _GetDatabaseAccount(self): """Gets the database account first by using the default endpoint, and if that doesn't returns use the endpoints for the preferred locations in the order they are specified to get the database account. """ try: database_account = self._GetDatabaseAccountStub(self.DefaultEndpoint) return database_account # If for any reason(non-globaldb related), we are not able to get the database account from the above call to GetDatabaseAccount, # we would try to get this information from any of the preferred locations that the user might have specified(by creating a locational endpoint) # and keeping eating the exception until we get the database account and return None at the end, if we are not able to get that info from any endpoints except errors.HTTPFailure: for location_name in self.PreferredLocations: locational_endpoint = _GlobalEndpointManager.GetLocationalEndpoint(self.DefaultEndpoint, location_name) try: database_account = self._GetDatabaseAccountStub(locational_endpoint) return database_account except errors.HTTPFailure: pass return None
python
def _GetDatabaseAccount(self): """Gets the database account first by using the default endpoint, and if that doesn't returns use the endpoints for the preferred locations in the order they are specified to get the database account. """ try: database_account = self._GetDatabaseAccountStub(self.DefaultEndpoint) return database_account # If for any reason(non-globaldb related), we are not able to get the database account from the above call to GetDatabaseAccount, # we would try to get this information from any of the preferred locations that the user might have specified(by creating a locational endpoint) # and keeping eating the exception until we get the database account and return None at the end, if we are not able to get that info from any endpoints except errors.HTTPFailure: for location_name in self.PreferredLocations: locational_endpoint = _GlobalEndpointManager.GetLocationalEndpoint(self.DefaultEndpoint, location_name) try: database_account = self._GetDatabaseAccountStub(locational_endpoint) return database_account except errors.HTTPFailure: pass return None
[ "def", "_GetDatabaseAccount", "(", "self", ")", ":", "try", ":", "database_account", "=", "self", ".", "_GetDatabaseAccountStub", "(", "self", ".", "DefaultEndpoint", ")", "return", "database_account", "# If for any reason(non-globaldb related), we are not able to get the dat...
Gets the database account first by using the default endpoint, and if that doesn't returns use the endpoints for the preferred locations in the order they are specified to get the database account.
[ "Gets", "the", "database", "account", "first", "by", "using", "the", "default", "endpoint", "and", "if", "that", "doesn", "t", "returns", "use", "the", "endpoints", "for", "the", "preferred", "locations", "in", "the", "order", "they", "are", "specified", "to...
dd01b3c5d308c6da83cfcaa0ab7083351a476353
https://github.com/Azure/azure-cosmos-python/blob/dd01b3c5d308c6da83cfcaa0ab7083351a476353/azure/cosmos/global_endpoint_manager.py#L104-L124
train
224,358
Azure/azure-cosmos-python
azure/cosmos/partition.py
_Partition.CompareTo
def CompareTo(self, other_hash_value): """Compares the passed hash value with the hash value of this object """ if len(self.hash_value) != len(other_hash_value): raise ValueError("Length of hashes doesn't match.") # The hash byte array that is returned from ComputeHash method has the MSB at the end of the array # so comparing the bytes from the end for compare operations. for i in xrange(0, len(self.hash_value)): if(self.hash_value[len(self.hash_value) - i - 1] < other_hash_value[len(self.hash_value) - i - 1]): return -1 elif self.hash_value[len(self.hash_value) - i - 1] > other_hash_value[len(self.hash_value) - i - 1]: return 1 return 0
python
def CompareTo(self, other_hash_value): """Compares the passed hash value with the hash value of this object """ if len(self.hash_value) != len(other_hash_value): raise ValueError("Length of hashes doesn't match.") # The hash byte array that is returned from ComputeHash method has the MSB at the end of the array # so comparing the bytes from the end for compare operations. for i in xrange(0, len(self.hash_value)): if(self.hash_value[len(self.hash_value) - i - 1] < other_hash_value[len(self.hash_value) - i - 1]): return -1 elif self.hash_value[len(self.hash_value) - i - 1] > other_hash_value[len(self.hash_value) - i - 1]: return 1 return 0
[ "def", "CompareTo", "(", "self", ",", "other_hash_value", ")", ":", "if", "len", "(", "self", ".", "hash_value", ")", "!=", "len", "(", "other_hash_value", ")", ":", "raise", "ValueError", "(", "\"Length of hashes doesn't match.\"", ")", "# The hash byte array tha...
Compares the passed hash value with the hash value of this object
[ "Compares", "the", "passed", "hash", "value", "with", "the", "hash", "value", "of", "this", "object" ]
dd01b3c5d308c6da83cfcaa0ab7083351a476353
https://github.com/Azure/azure-cosmos-python/blob/dd01b3c5d308c6da83cfcaa0ab7083351a476353/azure/cosmos/partition.py#L48-L61
train
224,359
Azure/azure-cosmos-python
azure/cosmos/murmur_hash.py
_MurmurHash.ComputeHash
def ComputeHash(self, key): """ Computes the hash of the value passed using MurmurHash3 algorithm. :param bytearray key: Byte array representing the key to be hashed. :return: 32 bit hash value. :rtype: int """ if key is None: raise ValueError("key is None.") hash_value = self._ComputeHash(key) return bytearray(pack('I', hash_value))
python
def ComputeHash(self, key): """ Computes the hash of the value passed using MurmurHash3 algorithm. :param bytearray key: Byte array representing the key to be hashed. :return: 32 bit hash value. :rtype: int """ if key is None: raise ValueError("key is None.") hash_value = self._ComputeHash(key) return bytearray(pack('I', hash_value))
[ "def", "ComputeHash", "(", "self", ",", "key", ")", ":", "if", "key", "is", "None", ":", "raise", "ValueError", "(", "\"key is None.\"", ")", "hash_value", "=", "self", ".", "_ComputeHash", "(", "key", ")", "return", "bytearray", "(", "pack", "(", "'I'",...
Computes the hash of the value passed using MurmurHash3 algorithm. :param bytearray key: Byte array representing the key to be hashed. :return: 32 bit hash value. :rtype: int
[ "Computes", "the", "hash", "of", "the", "value", "passed", "using", "MurmurHash3", "algorithm", "." ]
dd01b3c5d308c6da83cfcaa0ab7083351a476353
https://github.com/Azure/azure-cosmos-python/blob/dd01b3c5d308c6da83cfcaa0ab7083351a476353/azure/cosmos/murmur_hash.py#L49-L64
train
224,360
Azure/azure-cosmos-python
azure/cosmos/murmur_hash.py
_MurmurHash._ComputeHash
def _ComputeHash( key, seed = 0x0 ): """Computes the hash of the value passed using MurmurHash3 algorithm with the seed value. """ def fmix( h ): h ^= h >> 16 h = ( h * 0x85ebca6b ) & 0xFFFFFFFF h ^= h >> 13 h = ( h * 0xc2b2ae35 ) & 0xFFFFFFFF h ^= h >> 16 return h length = len( key ) nblocks = int( length / 4 ) h1 = seed c1 = 0xcc9e2d51 c2 = 0x1b873593 # body for block_start in xrange( 0, nblocks * 4, 4 ): k1 = key[ block_start + 3 ] << 24 | \ key[ block_start + 2 ] << 16 | \ key[ block_start + 1 ] << 8 | \ key[ block_start + 0 ] k1 = c1 * k1 & 0xFFFFFFFF k1 = ( k1 << 15 | k1 >> 17 ) & 0xFFFFFFFF # inlined ROTL32 k1 = ( c2 * k1 ) & 0xFFFFFFFF h1 ^= k1 h1 = ( h1 << 13 | h1 >> 19 ) & 0xFFFFFFFF # inlined _ROTL32 h1 = ( h1 * 5 + 0xe6546b64 ) & 0xFFFFFFFF # tail tail_index = nblocks * 4 k1 = 0 tail_size = length & 3 if tail_size >= 3: k1 ^= key[ tail_index + 2 ] << 16 if tail_size >= 2: k1 ^= key[ tail_index + 1 ] << 8 if tail_size >= 1: k1 ^= key[ tail_index + 0 ] if tail_size != 0: k1 = ( k1 * c1 ) & 0xFFFFFFFF k1 = ( k1 << 15 | k1 >> 17 ) & 0xFFFFFFFF # _ROTL32 k1 = ( k1 * c2 ) & 0xFFFFFFFF h1 ^= k1 return fmix( h1 ^ length )
python
def _ComputeHash( key, seed = 0x0 ): """Computes the hash of the value passed using MurmurHash3 algorithm with the seed value. """ def fmix( h ): h ^= h >> 16 h = ( h * 0x85ebca6b ) & 0xFFFFFFFF h ^= h >> 13 h = ( h * 0xc2b2ae35 ) & 0xFFFFFFFF h ^= h >> 16 return h length = len( key ) nblocks = int( length / 4 ) h1 = seed c1 = 0xcc9e2d51 c2 = 0x1b873593 # body for block_start in xrange( 0, nblocks * 4, 4 ): k1 = key[ block_start + 3 ] << 24 | \ key[ block_start + 2 ] << 16 | \ key[ block_start + 1 ] << 8 | \ key[ block_start + 0 ] k1 = c1 * k1 & 0xFFFFFFFF k1 = ( k1 << 15 | k1 >> 17 ) & 0xFFFFFFFF # inlined ROTL32 k1 = ( c2 * k1 ) & 0xFFFFFFFF h1 ^= k1 h1 = ( h1 << 13 | h1 >> 19 ) & 0xFFFFFFFF # inlined _ROTL32 h1 = ( h1 * 5 + 0xe6546b64 ) & 0xFFFFFFFF # tail tail_index = nblocks * 4 k1 = 0 tail_size = length & 3 if tail_size >= 3: k1 ^= key[ tail_index + 2 ] << 16 if tail_size >= 2: k1 ^= key[ tail_index + 1 ] << 8 if tail_size >= 1: k1 ^= key[ tail_index + 0 ] if tail_size != 0: k1 = ( k1 * c1 ) & 0xFFFFFFFF k1 = ( k1 << 15 | k1 >> 17 ) & 0xFFFFFFFF # _ROTL32 k1 = ( k1 * c2 ) & 0xFFFFFFFF h1 ^= k1 return fmix( h1 ^ length )
[ "def", "_ComputeHash", "(", "key", ",", "seed", "=", "0x0", ")", ":", "def", "fmix", "(", "h", ")", ":", "h", "^=", "h", ">>", "16", "h", "=", "(", "h", "*", "0x85ebca6b", ")", "&", "0xFFFFFFFF", "h", "^=", "h", ">>", "13", "h", "=", "(", "...
Computes the hash of the value passed using MurmurHash3 algorithm with the seed value.
[ "Computes", "the", "hash", "of", "the", "value", "passed", "using", "MurmurHash3", "algorithm", "with", "the", "seed", "value", "." ]
dd01b3c5d308c6da83cfcaa0ab7083351a476353
https://github.com/Azure/azure-cosmos-python/blob/dd01b3c5d308c6da83cfcaa0ab7083351a476353/azure/cosmos/murmur_hash.py#L67-L119
train
224,361
Azure/azure-cosmos-python
azure/cosmos/vector_session_token.py
VectorSessionToken.create
def create(cls, session_token): """ Parses session token and creates the vector session token :param str session_token: :return: A Vector session Token :rtype: VectorSessionToken """ version = None global_lsn = None local_lsn_by_region = {} if not session_token: return None segments = session_token.split(cls.segment_separator) if len(segments) < 2: return None try: version = int(segments[0]) except ValueError as _: return None try: global_lsn = int(segments[1]) except ValueError as _: return None for i in range(2, len(segments)): region_segment = segments[i] region_id_with_lsn = region_segment.split(cls.region_progress_separator) if len(region_id_with_lsn) != 2: return None try: region_id = int(region_id_with_lsn[0]) local_lsn = int(region_id_with_lsn[1]) except ValueError as _: return None local_lsn_by_region[region_id] = local_lsn return VectorSessionToken(version, global_lsn, local_lsn_by_region, session_token)
python
def create(cls, session_token): """ Parses session token and creates the vector session token :param str session_token: :return: A Vector session Token :rtype: VectorSessionToken """ version = None global_lsn = None local_lsn_by_region = {} if not session_token: return None segments = session_token.split(cls.segment_separator) if len(segments) < 2: return None try: version = int(segments[0]) except ValueError as _: return None try: global_lsn = int(segments[1]) except ValueError as _: return None for i in range(2, len(segments)): region_segment = segments[i] region_id_with_lsn = region_segment.split(cls.region_progress_separator) if len(region_id_with_lsn) != 2: return None try: region_id = int(region_id_with_lsn[0]) local_lsn = int(region_id_with_lsn[1]) except ValueError as _: return None local_lsn_by_region[region_id] = local_lsn return VectorSessionToken(version, global_lsn, local_lsn_by_region, session_token)
[ "def", "create", "(", "cls", ",", "session_token", ")", ":", "version", "=", "None", "global_lsn", "=", "None", "local_lsn_by_region", "=", "{", "}", "if", "not", "session_token", ":", "return", "None", "segments", "=", "session_token", ".", "split", "(", ...
Parses session token and creates the vector session token :param str session_token: :return: A Vector session Token :rtype: VectorSessionToken
[ "Parses", "session", "token", "and", "creates", "the", "vector", "session", "token" ]
dd01b3c5d308c6da83cfcaa0ab7083351a476353
https://github.com/Azure/azure-cosmos-python/blob/dd01b3c5d308c6da83cfcaa0ab7083351a476353/azure/cosmos/vector_session_token.py#L53-L99
train
224,362
Azure/azure-cosmos-python
azure/cosmos/consistent_hash_ring.py
_ConsistentHashRing._ConstructPartitions
def _ConstructPartitions(self, collection_links, partitions_per_node): """Constructs the partitions in the consistent ring by assigning them to collection nodes using the hashing algorithm and then finally sorting the partitions based on the hash value. """ collections_node_count = len(collection_links) partitions = [partition._Partition() for _ in xrange(0, partitions_per_node * collections_node_count)] index = 0 for collection_node in collection_links: hash_value = self.hash_generator.ComputeHash(self._GetBytes(collection_node)) for _ in xrange(0, partitions_per_node): partitions[index] = partition._Partition(hash_value, collection_node) index += 1 hash_value = self.hash_generator.ComputeHash(hash_value) partitions.sort() return partitions
python
def _ConstructPartitions(self, collection_links, partitions_per_node): """Constructs the partitions in the consistent ring by assigning them to collection nodes using the hashing algorithm and then finally sorting the partitions based on the hash value. """ collections_node_count = len(collection_links) partitions = [partition._Partition() for _ in xrange(0, partitions_per_node * collections_node_count)] index = 0 for collection_node in collection_links: hash_value = self.hash_generator.ComputeHash(self._GetBytes(collection_node)) for _ in xrange(0, partitions_per_node): partitions[index] = partition._Partition(hash_value, collection_node) index += 1 hash_value = self.hash_generator.ComputeHash(hash_value) partitions.sort() return partitions
[ "def", "_ConstructPartitions", "(", "self", ",", "collection_links", ",", "partitions_per_node", ")", ":", "collections_node_count", "=", "len", "(", "collection_links", ")", "partitions", "=", "[", "partition", ".", "_Partition", "(", ")", "for", "_", "in", "xr...
Constructs the partitions in the consistent ring by assigning them to collection nodes using the hashing algorithm and then finally sorting the partitions based on the hash value.
[ "Constructs", "the", "partitions", "in", "the", "consistent", "ring", "by", "assigning", "them", "to", "collection", "nodes", "using", "the", "hashing", "algorithm", "and", "then", "finally", "sorting", "the", "partitions", "based", "on", "the", "hash", "value",...
dd01b3c5d308c6da83cfcaa0ab7083351a476353
https://github.com/Azure/azure-cosmos-python/blob/dd01b3c5d308c6da83cfcaa0ab7083351a476353/azure/cosmos/consistent_hash_ring.py#L75-L91
train
224,363
Azure/azure-cosmos-python
azure/cosmos/consistent_hash_ring.py
_ConsistentHashRing._FindPartition
def _FindPartition(self, key): """Finds the partition from the byte array representation of the partition key. """ hash_value = self.hash_generator.ComputeHash(key) return self._LowerBoundSearch(self.partitions, hash_value)
python
def _FindPartition(self, key): """Finds the partition from the byte array representation of the partition key. """ hash_value = self.hash_generator.ComputeHash(key) return self._LowerBoundSearch(self.partitions, hash_value)
[ "def", "_FindPartition", "(", "self", ",", "key", ")", ":", "hash_value", "=", "self", ".", "hash_generator", ".", "ComputeHash", "(", "key", ")", "return", "self", ".", "_LowerBoundSearch", "(", "self", ".", "partitions", ",", "hash_value", ")" ]
Finds the partition from the byte array representation of the partition key.
[ "Finds", "the", "partition", "from", "the", "byte", "array", "representation", "of", "the", "partition", "key", "." ]
dd01b3c5d308c6da83cfcaa0ab7083351a476353
https://github.com/Azure/azure-cosmos-python/blob/dd01b3c5d308c6da83cfcaa0ab7083351a476353/azure/cosmos/consistent_hash_ring.py#L93-L97
train
224,364
Azure/azure-cosmos-python
azure/cosmos/consistent_hash_ring.py
_ConsistentHashRing._GetSerializedPartitionList
def _GetSerializedPartitionList(self): """Gets the serialized version of the ConsistentRing. Added this helper for the test code. """ partition_list = list() for part in self.partitions: partition_list.append((part.node, unpack("<L", part.hash_value)[0])) return partition_list
python
def _GetSerializedPartitionList(self): """Gets the serialized version of the ConsistentRing. Added this helper for the test code. """ partition_list = list() for part in self.partitions: partition_list.append((part.node, unpack("<L", part.hash_value)[0])) return partition_list
[ "def", "_GetSerializedPartitionList", "(", "self", ")", ":", "partition_list", "=", "list", "(", ")", "for", "part", "in", "self", ".", "partitions", ":", "partition_list", ".", "append", "(", "(", "part", ".", "node", ",", "unpack", "(", "\"<L\"", ",", ...
Gets the serialized version of the ConsistentRing. Added this helper for the test code.
[ "Gets", "the", "serialized", "version", "of", "the", "ConsistentRing", ".", "Added", "this", "helper", "for", "the", "test", "code", "." ]
dd01b3c5d308c6da83cfcaa0ab7083351a476353
https://github.com/Azure/azure-cosmos-python/blob/dd01b3c5d308c6da83cfcaa0ab7083351a476353/azure/cosmos/consistent_hash_ring.py#L99-L108
train
224,365
Azure/azure-cosmos-python
azure/cosmos/consistent_hash_ring.py
_ConsistentHashRing._GetBytes
def _GetBytes(partition_key): """Gets the bytes representing the value of the partition key. """ if isinstance(partition_key, six.string_types): return bytearray(partition_key, encoding='utf-8') else: raise ValueError("Unsupported " + str(type(partition_key)) + " for partitionKey.")
python
def _GetBytes(partition_key): """Gets the bytes representing the value of the partition key. """ if isinstance(partition_key, six.string_types): return bytearray(partition_key, encoding='utf-8') else: raise ValueError("Unsupported " + str(type(partition_key)) + " for partitionKey.")
[ "def", "_GetBytes", "(", "partition_key", ")", ":", "if", "isinstance", "(", "partition_key", ",", "six", ".", "string_types", ")", ":", "return", "bytearray", "(", "partition_key", ",", "encoding", "=", "'utf-8'", ")", "else", ":", "raise", "ValueError", "(...
Gets the bytes representing the value of the partition key.
[ "Gets", "the", "bytes", "representing", "the", "value", "of", "the", "partition", "key", "." ]
dd01b3c5d308c6da83cfcaa0ab7083351a476353
https://github.com/Azure/azure-cosmos-python/blob/dd01b3c5d308c6da83cfcaa0ab7083351a476353/azure/cosmos/consistent_hash_ring.py#L111-L117
train
224,366
Azure/azure-cosmos-python
azure/cosmos/consistent_hash_ring.py
_ConsistentHashRing._LowerBoundSearch
def _LowerBoundSearch(partitions, hash_value): """Searches the partition in the partition array using hashValue. """ for i in xrange(0, len(partitions) - 1): if partitions[i].CompareTo(hash_value) <= 0 and partitions[i+1].CompareTo(hash_value) > 0: return i return len(partitions) - 1
python
def _LowerBoundSearch(partitions, hash_value): """Searches the partition in the partition array using hashValue. """ for i in xrange(0, len(partitions) - 1): if partitions[i].CompareTo(hash_value) <= 0 and partitions[i+1].CompareTo(hash_value) > 0: return i return len(partitions) - 1
[ "def", "_LowerBoundSearch", "(", "partitions", ",", "hash_value", ")", ":", "for", "i", "in", "xrange", "(", "0", ",", "len", "(", "partitions", ")", "-", "1", ")", ":", "if", "partitions", "[", "i", "]", ".", "CompareTo", "(", "hash_value", ")", "<=...
Searches the partition in the partition array using hashValue.
[ "Searches", "the", "partition", "in", "the", "partition", "array", "using", "hashValue", "." ]
dd01b3c5d308c6da83cfcaa0ab7083351a476353
https://github.com/Azure/azure-cosmos-python/blob/dd01b3c5d308c6da83cfcaa0ab7083351a476353/azure/cosmos/consistent_hash_ring.py#L120-L127
train
224,367
Azure/azure-cosmos-python
azure/cosmos/execution_context/base_execution_context.py
_QueryExecutionContextBase._fetch_items_helper_no_retries
def _fetch_items_helper_no_retries(self, fetch_function): """Fetches more items and doesn't retry on failure :return: List of fetched items. :rtype: list """ fetched_items = [] # Continues pages till finds a non empty page or all results are exhausted while self._continuation or not self._has_started: if not self._has_started: self._has_started = True self._options['continuation'] = self._continuation (fetched_items, response_headers) = fetch_function(self._options) fetched_items continuation_key = http_constants.HttpHeaders.Continuation # Use Etag as continuation token for change feed queries. if self._is_change_feed: continuation_key = http_constants.HttpHeaders.ETag # In change feed queries, the continuation token is always populated. The hasNext() test is whether # there is any items in the response or not. if not self._is_change_feed or len(fetched_items) > 0: self._continuation = response_headers.get(continuation_key) else: self._continuation = None if fetched_items: break return fetched_items
python
def _fetch_items_helper_no_retries(self, fetch_function): """Fetches more items and doesn't retry on failure :return: List of fetched items. :rtype: list """ fetched_items = [] # Continues pages till finds a non empty page or all results are exhausted while self._continuation or not self._has_started: if not self._has_started: self._has_started = True self._options['continuation'] = self._continuation (fetched_items, response_headers) = fetch_function(self._options) fetched_items continuation_key = http_constants.HttpHeaders.Continuation # Use Etag as continuation token for change feed queries. if self._is_change_feed: continuation_key = http_constants.HttpHeaders.ETag # In change feed queries, the continuation token is always populated. The hasNext() test is whether # there is any items in the response or not. if not self._is_change_feed or len(fetched_items) > 0: self._continuation = response_headers.get(continuation_key) else: self._continuation = None if fetched_items: break return fetched_items
[ "def", "_fetch_items_helper_no_retries", "(", "self", ",", "fetch_function", ")", ":", "fetched_items", "=", "[", "]", "# Continues pages till finds a non empty page or all results are exhausted", "while", "self", ".", "_continuation", "or", "not", "self", ".", "_has_starte...
Fetches more items and doesn't retry on failure :return: List of fetched items. :rtype: list
[ "Fetches", "more", "items", "and", "doesn", "t", "retry", "on", "failure" ]
dd01b3c5d308c6da83cfcaa0ab7083351a476353
https://github.com/Azure/azure-cosmos-python/blob/dd01b3c5d308c6da83cfcaa0ab7083351a476353/azure/cosmos/execution_context/base_execution_context.py#L110-L137
train
224,368
Azure/azure-cosmos-python
azure/cosmos/execution_context/base_execution_context.py
_MultiCollectionQueryExecutionContext._fetch_next_block
def _fetch_next_block(self): """Fetches the next block of query results. This iterates fetches the next block of results from the current collection link. Once the current collection results were exhausted. It moves to the next collection link. :return: List of fetched items. :rtype: list """ # Fetch next block of results by executing the query against the current document collection fetched_items = self._fetch_items_helper_with_retries(self._fetch_function) # If there are multiple document collections to query for(in case of partitioning), keep looping through each one of them, # creating separate feed queries for each collection and fetching the items while not fetched_items: if self._collection_links and self._current_collection_index < self._collection_links_length: path = base.GetPathFromLink(self._collection_links[self._current_collection_index], 'docs') collection_id = base.GetResourceIdOrFullNameFromLink(self._collection_links[self._current_collection_index]) self._continuation = None self._has_started = False def fetch_fn(options): return self._client.QueryFeed(path, collection_id, self._query, options) self._fetch_function = fetch_fn fetched_items = self._fetch_items_helper_with_retries(self._fetch_function) self._current_collection_index += 1 else: break return fetched_items
python
def _fetch_next_block(self): """Fetches the next block of query results. This iterates fetches the next block of results from the current collection link. Once the current collection results were exhausted. It moves to the next collection link. :return: List of fetched items. :rtype: list """ # Fetch next block of results by executing the query against the current document collection fetched_items = self._fetch_items_helper_with_retries(self._fetch_function) # If there are multiple document collections to query for(in case of partitioning), keep looping through each one of them, # creating separate feed queries for each collection and fetching the items while not fetched_items: if self._collection_links and self._current_collection_index < self._collection_links_length: path = base.GetPathFromLink(self._collection_links[self._current_collection_index], 'docs') collection_id = base.GetResourceIdOrFullNameFromLink(self._collection_links[self._current_collection_index]) self._continuation = None self._has_started = False def fetch_fn(options): return self._client.QueryFeed(path, collection_id, self._query, options) self._fetch_function = fetch_fn fetched_items = self._fetch_items_helper_with_retries(self._fetch_function) self._current_collection_index += 1 else: break return fetched_items
[ "def", "_fetch_next_block", "(", "self", ")", ":", "# Fetch next block of results by executing the query against the current document collection", "fetched_items", "=", "self", ".", "_fetch_items_helper_with_retries", "(", "self", ".", "_fetch_function", ")", "# If there are multip...
Fetches the next block of query results. This iterates fetches the next block of results from the current collection link. Once the current collection results were exhausted. It moves to the next collection link. :return: List of fetched items. :rtype: list
[ "Fetches", "the", "next", "block", "of", "query", "results", ".", "This", "iterates", "fetches", "the", "next", "block", "of", "results", "from", "the", "current", "collection", "link", ".", "Once", "the", "current", "collection", "results", "were", "exhausted...
dd01b3c5d308c6da83cfcaa0ab7083351a476353
https://github.com/Azure/azure-cosmos-python/blob/dd01b3c5d308c6da83cfcaa0ab7083351a476353/azure/cosmos/execution_context/base_execution_context.py#L227-L263
train
224,369
Azure/azure-cosmos-python
azure/cosmos/range.py
Range.Contains
def Contains(self, other): """Checks if the passed parameter is in the range of this object. """ if other is None: raise ValueError("other is None.") if isinstance(other, Range): if other.low >= self.low and other.high <= self.high: return True return False else: return self.Contains(Range(other, other))
python
def Contains(self, other): """Checks if the passed parameter is in the range of this object. """ if other is None: raise ValueError("other is None.") if isinstance(other, Range): if other.low >= self.low and other.high <= self.high: return True return False else: return self.Contains(Range(other, other))
[ "def", "Contains", "(", "self", ",", "other", ")", ":", "if", "other", "is", "None", ":", "raise", "ValueError", "(", "\"other is None.\"", ")", "if", "isinstance", "(", "other", ",", "Range", ")", ":", "if", "other", ".", "low", ">=", "self", ".", "...
Checks if the passed parameter is in the range of this object.
[ "Checks", "if", "the", "passed", "parameter", "is", "in", "the", "range", "of", "this", "object", "." ]
dd01b3c5d308c6da83cfcaa0ab7083351a476353
https://github.com/Azure/azure-cosmos-python/blob/dd01b3c5d308c6da83cfcaa0ab7083351a476353/azure/cosmos/range.py#L57-L68
train
224,370
Azure/azure-cosmos-python
azure/cosmos/range.py
Range.Intersect
def Intersect(self, other): """Checks if the passed parameter intersects the range of this object. """ if isinstance(other, Range): max_low = self.low if (self.low >= other.low) else other.low min_high = self.high if (self.high <= other.high) else other.high if max_low <= min_high: return True return False
python
def Intersect(self, other): """Checks if the passed parameter intersects the range of this object. """ if isinstance(other, Range): max_low = self.low if (self.low >= other.low) else other.low min_high = self.high if (self.high <= other.high) else other.high if max_low <= min_high: return True return False
[ "def", "Intersect", "(", "self", ",", "other", ")", ":", "if", "isinstance", "(", "other", ",", "Range", ")", ":", "max_low", "=", "self", ".", "low", "if", "(", "self", ".", "low", ">=", "other", ".", "low", ")", "else", "other", ".", "low", "mi...
Checks if the passed parameter intersects the range of this object.
[ "Checks", "if", "the", "passed", "parameter", "intersects", "the", "range", "of", "this", "object", "." ]
dd01b3c5d308c6da83cfcaa0ab7083351a476353
https://github.com/Azure/azure-cosmos-python/blob/dd01b3c5d308c6da83cfcaa0ab7083351a476353/azure/cosmos/range.py#L70-L80
train
224,371
Azure/azure-cosmos-python
azure/cosmos/cosmos_client.py
CosmosClient.RegisterPartitionResolver
def RegisterPartitionResolver(self, database_link, partition_resolver): """Registers the partition resolver associated with the database link :param str database_link: Database Self Link or ID based link. :param object partition_resolver: An instance of PartitionResolver. """ if not database_link: raise ValueError("database_link is None or empty.") if partition_resolver is None: raise ValueError("partition_resolver is None.") self.partition_resolvers = {base.TrimBeginningAndEndingSlashes(database_link): partition_resolver}
python
def RegisterPartitionResolver(self, database_link, partition_resolver): """Registers the partition resolver associated with the database link :param str database_link: Database Self Link or ID based link. :param object partition_resolver: An instance of PartitionResolver. """ if not database_link: raise ValueError("database_link is None or empty.") if partition_resolver is None: raise ValueError("partition_resolver is None.") self.partition_resolvers = {base.TrimBeginningAndEndingSlashes(database_link): partition_resolver}
[ "def", "RegisterPartitionResolver", "(", "self", ",", "database_link", ",", "partition_resolver", ")", ":", "if", "not", "database_link", ":", "raise", "ValueError", "(", "\"database_link is None or empty.\"", ")", "if", "partition_resolver", "is", "None", ":", "raise...
Registers the partition resolver associated with the database link :param str database_link: Database Self Link or ID based link. :param object partition_resolver: An instance of PartitionResolver.
[ "Registers", "the", "partition", "resolver", "associated", "with", "the", "database", "link" ]
dd01b3c5d308c6da83cfcaa0ab7083351a476353
https://github.com/Azure/azure-cosmos-python/blob/dd01b3c5d308c6da83cfcaa0ab7083351a476353/azure/cosmos/cosmos_client.py#L180-L195
train
224,372
Azure/azure-cosmos-python
azure/cosmos/cosmos_client.py
CosmosClient.GetPartitionResolver
def GetPartitionResolver(self, database_link): """Gets the partition resolver associated with the database link :param str database_link: Database self link or ID based link. :return: An instance of PartitionResolver. :rtype: object """ if not database_link: raise ValueError("database_link is None or empty.") return self.partition_resolvers.get(base.TrimBeginningAndEndingSlashes(database_link))
python
def GetPartitionResolver(self, database_link): """Gets the partition resolver associated with the database link :param str database_link: Database self link or ID based link. :return: An instance of PartitionResolver. :rtype: object """ if not database_link: raise ValueError("database_link is None or empty.") return self.partition_resolvers.get(base.TrimBeginningAndEndingSlashes(database_link))
[ "def", "GetPartitionResolver", "(", "self", ",", "database_link", ")", ":", "if", "not", "database_link", ":", "raise", "ValueError", "(", "\"database_link is None or empty.\"", ")", "return", "self", ".", "partition_resolvers", ".", "get", "(", "base", ".", "Trim...
Gets the partition resolver associated with the database link :param str database_link: Database self link or ID based link. :return: An instance of PartitionResolver. :rtype: object
[ "Gets", "the", "partition", "resolver", "associated", "with", "the", "database", "link" ]
dd01b3c5d308c6da83cfcaa0ab7083351a476353
https://github.com/Azure/azure-cosmos-python/blob/dd01b3c5d308c6da83cfcaa0ab7083351a476353/azure/cosmos/cosmos_client.py#L198-L212
train
224,373
Azure/azure-cosmos-python
azure/cosmos/cosmos_client.py
CosmosClient.CreateDatabase
def CreateDatabase(self, database, options=None): """Creates a database. :param dict database: The Azure Cosmos database to create. :param dict options: The request options for the request. :return: The Database that was created. :rtype: dict """ if options is None: options = {} CosmosClient.__ValidateResource(database) path = '/dbs' return self.Create(database, path, 'dbs', None, None, options)
python
def CreateDatabase(self, database, options=None): """Creates a database. :param dict database: The Azure Cosmos database to create. :param dict options: The request options for the request. :return: The Database that was created. :rtype: dict """ if options is None: options = {} CosmosClient.__ValidateResource(database) path = '/dbs' return self.Create(database, path, 'dbs', None, None, options)
[ "def", "CreateDatabase", "(", "self", ",", "database", ",", "options", "=", "None", ")", ":", "if", "options", "is", "None", ":", "options", "=", "{", "}", "CosmosClient", ".", "__ValidateResource", "(", "database", ")", "path", "=", "'/dbs'", "return", ...
Creates a database. :param dict database: The Azure Cosmos database to create. :param dict options: The request options for the request. :return: The Database that was created. :rtype: dict
[ "Creates", "a", "database", "." ]
dd01b3c5d308c6da83cfcaa0ab7083351a476353
https://github.com/Azure/azure-cosmos-python/blob/dd01b3c5d308c6da83cfcaa0ab7083351a476353/azure/cosmos/cosmos_client.py#L215-L233
train
224,374
Azure/azure-cosmos-python
azure/cosmos/cosmos_client.py
CosmosClient.ReadDatabase
def ReadDatabase(self, database_link, options=None): """Reads a database. :param str database_link: The link to the database. :param dict options: The request options for the request. :return: The Database that was read. :rtype: dict """ if options is None: options = {} path = base.GetPathFromLink(database_link) database_id = base.GetResourceIdOrFullNameFromLink(database_link) return self.Read(path, 'dbs', database_id, None, options)
python
def ReadDatabase(self, database_link, options=None): """Reads a database. :param str database_link: The link to the database. :param dict options: The request options for the request. :return: The Database that was read. :rtype: dict """ if options is None: options = {} path = base.GetPathFromLink(database_link) database_id = base.GetResourceIdOrFullNameFromLink(database_link) return self.Read(path, 'dbs', database_id, None, options)
[ "def", "ReadDatabase", "(", "self", ",", "database_link", ",", "options", "=", "None", ")", ":", "if", "options", "is", "None", ":", "options", "=", "{", "}", "path", "=", "base", ".", "GetPathFromLink", "(", "database_link", ")", "database_id", "=", "ba...
Reads a database. :param str database_link: The link to the database. :param dict options: The request options for the request. :return: The Database that was read. :rtype: dict
[ "Reads", "a", "database", "." ]
dd01b3c5d308c6da83cfcaa0ab7083351a476353
https://github.com/Azure/azure-cosmos-python/blob/dd01b3c5d308c6da83cfcaa0ab7083351a476353/azure/cosmos/cosmos_client.py#L235-L253
train
224,375
Azure/azure-cosmos-python
azure/cosmos/cosmos_client.py
CosmosClient.QueryDatabases
def QueryDatabases(self, query, options=None): """Queries databases. :param (str or dict) query: :param dict options: The request options for the request. :return: Query Iterable of Databases. :rtype: query_iterable.QueryIterable """ if options is None: options = {} def fetch_fn(options): return self.__QueryFeed('/dbs', 'dbs', '', lambda r: r['Databases'], lambda _, b: b, query, options), self.last_response_headers return query_iterable.QueryIterable(self, query, options, fetch_fn)
python
def QueryDatabases(self, query, options=None): """Queries databases. :param (str or dict) query: :param dict options: The request options for the request. :return: Query Iterable of Databases. :rtype: query_iterable.QueryIterable """ if options is None: options = {} def fetch_fn(options): return self.__QueryFeed('/dbs', 'dbs', '', lambda r: r['Databases'], lambda _, b: b, query, options), self.last_response_headers return query_iterable.QueryIterable(self, query, options, fetch_fn)
[ "def", "QueryDatabases", "(", "self", ",", "query", ",", "options", "=", "None", ")", ":", "if", "options", "is", "None", ":", "options", "=", "{", "}", "def", "fetch_fn", "(", "options", ")", ":", "return", "self", ".", "__QueryFeed", "(", "'/dbs'", ...
Queries databases. :param (str or dict) query: :param dict options: The request options for the request. :return: Query Iterable of Databases. :rtype: query_iterable.QueryIterable
[ "Queries", "databases", "." ]
dd01b3c5d308c6da83cfcaa0ab7083351a476353
https://github.com/Azure/azure-cosmos-python/blob/dd01b3c5d308c6da83cfcaa0ab7083351a476353/azure/cosmos/cosmos_client.py#L272-L295
train
224,376
Azure/azure-cosmos-python
azure/cosmos/cosmos_client.py
CosmosClient.ReadContainers
def ReadContainers(self, database_link, options=None): """Reads all collections in a database. :param str database_link: The link to the database. :param dict options: The request options for the request. :return: Query Iterable of Collections. :rtype: query_iterable.QueryIterable """ if options is None: options = {} return self.QueryContainers(database_link, None, options)
python
def ReadContainers(self, database_link, options=None): """Reads all collections in a database. :param str database_link: The link to the database. :param dict options: The request options for the request. :return: Query Iterable of Collections. :rtype: query_iterable.QueryIterable """ if options is None: options = {} return self.QueryContainers(database_link, None, options)
[ "def", "ReadContainers", "(", "self", ",", "database_link", ",", "options", "=", "None", ")", ":", "if", "options", "is", "None", ":", "options", "=", "{", "}", "return", "self", ".", "QueryContainers", "(", "database_link", ",", "None", ",", "options", ...
Reads all collections in a database. :param str database_link: The link to the database. :param dict options: The request options for the request. :return: Query Iterable of Collections. :rtype: query_iterable.QueryIterable
[ "Reads", "all", "collections", "in", "a", "database", "." ]
dd01b3c5d308c6da83cfcaa0ab7083351a476353
https://github.com/Azure/azure-cosmos-python/blob/dd01b3c5d308c6da83cfcaa0ab7083351a476353/azure/cosmos/cosmos_client.py#L297-L313
train
224,377
Azure/azure-cosmos-python
azure/cosmos/cosmos_client.py
CosmosClient.CreateContainer
def CreateContainer(self, database_link, collection, options=None): """Creates a collection in a database. :param str database_link: The link to the database. :param dict collection: The Azure Cosmos collection to create. :param dict options: The request options for the request. :return: The Collection that was created. :rtype: dict """ if options is None: options = {} CosmosClient.__ValidateResource(collection) path = base.GetPathFromLink(database_link, 'colls') database_id = base.GetResourceIdOrFullNameFromLink(database_link) return self.Create(collection, path, 'colls', database_id, None, options)
python
def CreateContainer(self, database_link, collection, options=None): """Creates a collection in a database. :param str database_link: The link to the database. :param dict collection: The Azure Cosmos collection to create. :param dict options: The request options for the request. :return: The Collection that was created. :rtype: dict """ if options is None: options = {} CosmosClient.__ValidateResource(collection) path = base.GetPathFromLink(database_link, 'colls') database_id = base.GetResourceIdOrFullNameFromLink(database_link) return self.Create(collection, path, 'colls', database_id, None, options)
[ "def", "CreateContainer", "(", "self", ",", "database_link", ",", "collection", ",", "options", "=", "None", ")", ":", "if", "options", "is", "None", ":", "options", "=", "{", "}", "CosmosClient", ".", "__ValidateResource", "(", "collection", ")", "path", ...
Creates a collection in a database. :param str database_link: The link to the database. :param dict collection: The Azure Cosmos collection to create. :param dict options: The request options for the request. :return: The Collection that was created. :rtype: dict
[ "Creates", "a", "collection", "in", "a", "database", "." ]
dd01b3c5d308c6da83cfcaa0ab7083351a476353
https://github.com/Azure/azure-cosmos-python/blob/dd01b3c5d308c6da83cfcaa0ab7083351a476353/azure/cosmos/cosmos_client.py#L344-L369
train
224,378
Azure/azure-cosmos-python
azure/cosmos/cosmos_client.py
CosmosClient.ReplaceContainer
def ReplaceContainer(self, collection_link, collection, options=None): """Replaces a collection and return it. :param str collection_link: The link to the collection entity. :param dict collection: The collection to be used. :param dict options: The request options for the request. :return: The new Collection. :rtype: dict """ if options is None: options = {} CosmosClient.__ValidateResource(collection) path = base.GetPathFromLink(collection_link) collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) return self.Replace(collection, path, 'colls', collection_id, None, options)
python
def ReplaceContainer(self, collection_link, collection, options=None): """Replaces a collection and return it. :param str collection_link: The link to the collection entity. :param dict collection: The collection to be used. :param dict options: The request options for the request. :return: The new Collection. :rtype: dict """ if options is None: options = {} CosmosClient.__ValidateResource(collection) path = base.GetPathFromLink(collection_link) collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) return self.Replace(collection, path, 'colls', collection_id, None, options)
[ "def", "ReplaceContainer", "(", "self", ",", "collection_link", ",", "collection", ",", "options", "=", "None", ")", ":", "if", "options", "is", "None", ":", "options", "=", "{", "}", "CosmosClient", ".", "__ValidateResource", "(", "collection", ")", "path",...
Replaces a collection and return it. :param str collection_link: The link to the collection entity. :param dict collection: The collection to be used. :param dict options: The request options for the request. :return: The new Collection. :rtype: dict
[ "Replaces", "a", "collection", "and", "return", "it", "." ]
dd01b3c5d308c6da83cfcaa0ab7083351a476353
https://github.com/Azure/azure-cosmos-python/blob/dd01b3c5d308c6da83cfcaa0ab7083351a476353/azure/cosmos/cosmos_client.py#L371-L398
train
224,379
Azure/azure-cosmos-python
azure/cosmos/cosmos_client.py
CosmosClient.ReadContainer
def ReadContainer(self, collection_link, options=None): """Reads a collection. :param str collection_link: The link to the document collection. :param dict options: The request options for the request. :return: The read Collection. :rtype: dict """ if options is None: options = {} path = base.GetPathFromLink(collection_link) collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) return self.Read(path, 'colls', collection_id, None, options)
python
def ReadContainer(self, collection_link, options=None): """Reads a collection. :param str collection_link: The link to the document collection. :param dict options: The request options for the request. :return: The read Collection. :rtype: dict """ if options is None: options = {} path = base.GetPathFromLink(collection_link) collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) return self.Read(path, 'colls', collection_id, None, options)
[ "def", "ReadContainer", "(", "self", ",", "collection_link", ",", "options", "=", "None", ")", ":", "if", "options", "is", "None", ":", "options", "=", "{", "}", "path", "=", "base", ".", "GetPathFromLink", "(", "collection_link", ")", "collection_id", "="...
Reads a collection. :param str collection_link: The link to the document collection. :param dict options: The request options for the request. :return: The read Collection. :rtype: dict
[ "Reads", "a", "collection", "." ]
dd01b3c5d308c6da83cfcaa0ab7083351a476353
https://github.com/Azure/azure-cosmos-python/blob/dd01b3c5d308c6da83cfcaa0ab7083351a476353/azure/cosmos/cosmos_client.py#L400-L423
train
224,380
Azure/azure-cosmos-python
azure/cosmos/cosmos_client.py
CosmosClient.UpsertUser
def UpsertUser(self, database_link, user, options=None): """Upserts a user. :param str database_link: The link to the database. :param dict user: The Azure Cosmos user to upsert. :param dict options: The request options for the request. :return: The upserted User. :rtype: dict """ if options is None: options = {} database_id, path = self._GetDatabaseIdWithPathForUser(database_link, user) return self.Upsert(user, path, 'users', database_id, None, options)
python
def UpsertUser(self, database_link, user, options=None): """Upserts a user. :param str database_link: The link to the database. :param dict user: The Azure Cosmos user to upsert. :param dict options: The request options for the request. :return: The upserted User. :rtype: dict """ if options is None: options = {} database_id, path = self._GetDatabaseIdWithPathForUser(database_link, user) return self.Upsert(user, path, 'users', database_id, None, options)
[ "def", "UpsertUser", "(", "self", ",", "database_link", ",", "user", ",", "options", "=", "None", ")", ":", "if", "options", "is", "None", ":", "options", "=", "{", "}", "database_id", ",", "path", "=", "self", ".", "_GetDatabaseIdWithPathForUser", "(", ...
Upserts a user. :param str database_link: The link to the database. :param dict user: The Azure Cosmos user to upsert. :param dict options: The request options for the request. :return: The upserted User. :rtype: dict
[ "Upserts", "a", "user", "." ]
dd01b3c5d308c6da83cfcaa0ab7083351a476353
https://github.com/Azure/azure-cosmos-python/blob/dd01b3c5d308c6da83cfcaa0ab7083351a476353/azure/cosmos/cosmos_client.py#L452-L475
train
224,381
Azure/azure-cosmos-python
azure/cosmos/cosmos_client.py
CosmosClient.ReadUser
def ReadUser(self, user_link, options=None): """Reads a user. :param str user_link: The link to the user entity. :param dict options: The request options for the request. :return: The read User. :rtype: dict """ if options is None: options = {} path = base.GetPathFromLink(user_link) user_id = base.GetResourceIdOrFullNameFromLink(user_link) return self.Read(path, 'users', user_id, None, options)
python
def ReadUser(self, user_link, options=None): """Reads a user. :param str user_link: The link to the user entity. :param dict options: The request options for the request. :return: The read User. :rtype: dict """ if options is None: options = {} path = base.GetPathFromLink(user_link) user_id = base.GetResourceIdOrFullNameFromLink(user_link) return self.Read(path, 'users', user_id, None, options)
[ "def", "ReadUser", "(", "self", ",", "user_link", ",", "options", "=", "None", ")", ":", "if", "options", "is", "None", ":", "options", "=", "{", "}", "path", "=", "base", ".", "GetPathFromLink", "(", "user_link", ")", "user_id", "=", "base", ".", "G...
Reads a user. :param str user_link: The link to the user entity. :param dict options: The request options for the request. :return: The read User. :rtype: dict
[ "Reads", "a", "user", "." ]
dd01b3c5d308c6da83cfcaa0ab7083351a476353
https://github.com/Azure/azure-cosmos-python/blob/dd01b3c5d308c6da83cfcaa0ab7083351a476353/azure/cosmos/cosmos_client.py#L484-L503
train
224,382
Azure/azure-cosmos-python
azure/cosmos/cosmos_client.py
CosmosClient.ReadUsers
def ReadUsers(self, database_link, options=None): """Reads all users in a database. :params str database_link: The link to the database. :params dict options: The request options for the request. :return: Query iterable of Users. :rtype: query_iterable.QueryIterable """ if options is None: options = {} return self.QueryUsers(database_link, None, options)
python
def ReadUsers(self, database_link, options=None): """Reads all users in a database. :params str database_link: The link to the database. :params dict options: The request options for the request. :return: Query iterable of Users. :rtype: query_iterable.QueryIterable """ if options is None: options = {} return self.QueryUsers(database_link, None, options)
[ "def", "ReadUsers", "(", "self", ",", "database_link", ",", "options", "=", "None", ")", ":", "if", "options", "is", "None", ":", "options", "=", "{", "}", "return", "self", ".", "QueryUsers", "(", "database_link", ",", "None", ",", "options", ")" ]
Reads all users in a database. :params str database_link: The link to the database. :params dict options: The request options for the request. :return: Query iterable of Users. :rtype: query_iterable.QueryIterable
[ "Reads", "all", "users", "in", "a", "database", "." ]
dd01b3c5d308c6da83cfcaa0ab7083351a476353
https://github.com/Azure/azure-cosmos-python/blob/dd01b3c5d308c6da83cfcaa0ab7083351a476353/azure/cosmos/cosmos_client.py#L505-L521
train
224,383
Azure/azure-cosmos-python
azure/cosmos/cosmos_client.py
CosmosClient.QueryUsers
def QueryUsers(self, database_link, query, options=None): """Queries users in a database. :param str database_link: The link to the database. :param (str or dict) query: :param dict options: The request options for the request. :return: Query Iterable of Users. :rtype: query_iterable.QueryIterable """ if options is None: options = {} path = base.GetPathFromLink(database_link, 'users') database_id = base.GetResourceIdOrFullNameFromLink(database_link) def fetch_fn(options): return self.__QueryFeed(path, 'users', database_id, lambda r: r['Users'], lambda _, b: b, query, options), self.last_response_headers return query_iterable.QueryIterable(self, query, options, fetch_fn)
python
def QueryUsers(self, database_link, query, options=None): """Queries users in a database. :param str database_link: The link to the database. :param (str or dict) query: :param dict options: The request options for the request. :return: Query Iterable of Users. :rtype: query_iterable.QueryIterable """ if options is None: options = {} path = base.GetPathFromLink(database_link, 'users') database_id = base.GetResourceIdOrFullNameFromLink(database_link) def fetch_fn(options): return self.__QueryFeed(path, 'users', database_id, lambda r: r['Users'], lambda _, b: b, query, options), self.last_response_headers return query_iterable.QueryIterable(self, query, options, fetch_fn)
[ "def", "QueryUsers", "(", "self", ",", "database_link", ",", "query", ",", "options", "=", "None", ")", ":", "if", "options", "is", "None", ":", "options", "=", "{", "}", "path", "=", "base", ".", "GetPathFromLink", "(", "database_link", ",", "'users'", ...
Queries users in a database. :param str database_link: The link to the database. :param (str or dict) query: :param dict options: The request options for the request. :return: Query Iterable of Users. :rtype: query_iterable.QueryIterable
[ "Queries", "users", "in", "a", "database", "." ]
dd01b3c5d308c6da83cfcaa0ab7083351a476353
https://github.com/Azure/azure-cosmos-python/blob/dd01b3c5d308c6da83cfcaa0ab7083351a476353/azure/cosmos/cosmos_client.py#L523-L551
train
224,384
Azure/azure-cosmos-python
azure/cosmos/cosmos_client.py
CosmosClient.DeleteDatabase
def DeleteDatabase(self, database_link, options=None): """Deletes a database. :param str database_link: The link to the database. :param dict options: The request options for the request. :return: The deleted Database. :rtype: dict """ if options is None: options = {} path = base.GetPathFromLink(database_link) database_id = base.GetResourceIdOrFullNameFromLink(database_link) return self.DeleteResource(path, 'dbs', database_id, None, options)
python
def DeleteDatabase(self, database_link, options=None): """Deletes a database. :param str database_link: The link to the database. :param dict options: The request options for the request. :return: The deleted Database. :rtype: dict """ if options is None: options = {} path = base.GetPathFromLink(database_link) database_id = base.GetResourceIdOrFullNameFromLink(database_link) return self.DeleteResource(path, 'dbs', database_id, None, options)
[ "def", "DeleteDatabase", "(", "self", ",", "database_link", ",", "options", "=", "None", ")", ":", "if", "options", "is", "None", ":", "options", "=", "{", "}", "path", "=", "base", ".", "GetPathFromLink", "(", "database_link", ")", "database_id", "=", "...
Deletes a database. :param str database_link: The link to the database. :param dict options: The request options for the request. :return: The deleted Database. :rtype: dict
[ "Deletes", "a", "database", "." ]
dd01b3c5d308c6da83cfcaa0ab7083351a476353
https://github.com/Azure/azure-cosmos-python/blob/dd01b3c5d308c6da83cfcaa0ab7083351a476353/azure/cosmos/cosmos_client.py#L553-L576
train
224,385
Azure/azure-cosmos-python
azure/cosmos/cosmos_client.py
CosmosClient.CreatePermission
def CreatePermission(self, user_link, permission, options=None): """Creates a permission for a user. :param str user_link: The link to the user entity. :param dict permission: The Azure Cosmos user permission to create. :param dict options: The request options for the request. :return: The created Permission. :rtype: dict """ if options is None: options = {} path, user_id = self._GetUserIdWithPathForPermission(permission, user_link) return self.Create(permission, path, 'permissions', user_id, None, options)
python
def CreatePermission(self, user_link, permission, options=None): """Creates a permission for a user. :param str user_link: The link to the user entity. :param dict permission: The Azure Cosmos user permission to create. :param dict options: The request options for the request. :return: The created Permission. :rtype: dict """ if options is None: options = {} path, user_id = self._GetUserIdWithPathForPermission(permission, user_link) return self.Create(permission, path, 'permissions', user_id, None, options)
[ "def", "CreatePermission", "(", "self", ",", "user_link", ",", "permission", ",", "options", "=", "None", ")", ":", "if", "options", "is", "None", ":", "options", "=", "{", "}", "path", ",", "user_id", "=", "self", ".", "_GetUserIdWithPathForPermission", "...
Creates a permission for a user. :param str user_link: The link to the user entity. :param dict permission: The Azure Cosmos user permission to create. :param dict options: The request options for the request. :return: The created Permission. :rtype: dict
[ "Creates", "a", "permission", "for", "a", "user", "." ]
dd01b3c5d308c6da83cfcaa0ab7083351a476353
https://github.com/Azure/azure-cosmos-python/blob/dd01b3c5d308c6da83cfcaa0ab7083351a476353/azure/cosmos/cosmos_client.py#L578-L603
train
224,386
Azure/azure-cosmos-python
azure/cosmos/cosmos_client.py
CosmosClient.UpsertPermission
def UpsertPermission(self, user_link, permission, options=None): """Upserts a permission for a user. :param str user_link: The link to the user entity. :param dict permission: The Azure Cosmos user permission to upsert. :param dict options: The request options for the request. :return: The upserted permission. :rtype: dict """ if options is None: options = {} path, user_id = self._GetUserIdWithPathForPermission(permission, user_link) return self.Upsert(permission, path, 'permissions', user_id, None, options)
python
def UpsertPermission(self, user_link, permission, options=None): """Upserts a permission for a user. :param str user_link: The link to the user entity. :param dict permission: The Azure Cosmos user permission to upsert. :param dict options: The request options for the request. :return: The upserted permission. :rtype: dict """ if options is None: options = {} path, user_id = self._GetUserIdWithPathForPermission(permission, user_link) return self.Upsert(permission, path, 'permissions', user_id, None, options)
[ "def", "UpsertPermission", "(", "self", ",", "user_link", ",", "permission", ",", "options", "=", "None", ")", ":", "if", "options", "is", "None", ":", "options", "=", "{", "}", "path", ",", "user_id", "=", "self", ".", "_GetUserIdWithPathForPermission", "...
Upserts a permission for a user. :param str user_link: The link to the user entity. :param dict permission: The Azure Cosmos user permission to upsert. :param dict options: The request options for the request. :return: The upserted permission. :rtype: dict
[ "Upserts", "a", "permission", "for", "a", "user", "." ]
dd01b3c5d308c6da83cfcaa0ab7083351a476353
https://github.com/Azure/azure-cosmos-python/blob/dd01b3c5d308c6da83cfcaa0ab7083351a476353/azure/cosmos/cosmos_client.py#L605-L630
train
224,387
Azure/azure-cosmos-python
azure/cosmos/cosmos_client.py
CosmosClient.ReadPermission
def ReadPermission(self, permission_link, options=None): """Reads a permission. :param str permission_link: The link to the permission. :param dict options: The request options for the request. :return: The read permission. :rtype: dict """ if options is None: options = {} path = base.GetPathFromLink(permission_link) permission_id = base.GetResourceIdOrFullNameFromLink(permission_link) return self.Read(path, 'permissions', permission_id, None, options)
python
def ReadPermission(self, permission_link, options=None): """Reads a permission. :param str permission_link: The link to the permission. :param dict options: The request options for the request. :return: The read permission. :rtype: dict """ if options is None: options = {} path = base.GetPathFromLink(permission_link) permission_id = base.GetResourceIdOrFullNameFromLink(permission_link) return self.Read(path, 'permissions', permission_id, None, options)
[ "def", "ReadPermission", "(", "self", ",", "permission_link", ",", "options", "=", "None", ")", ":", "if", "options", "is", "None", ":", "options", "=", "{", "}", "path", "=", "base", ".", "GetPathFromLink", "(", "permission_link", ")", "permission_id", "=...
Reads a permission. :param str permission_link: The link to the permission. :param dict options: The request options for the request. :return: The read permission. :rtype: dict
[ "Reads", "a", "permission", "." ]
dd01b3c5d308c6da83cfcaa0ab7083351a476353
https://github.com/Azure/azure-cosmos-python/blob/dd01b3c5d308c6da83cfcaa0ab7083351a476353/azure/cosmos/cosmos_client.py#L639-L662
train
224,388
Azure/azure-cosmos-python
azure/cosmos/cosmos_client.py
CosmosClient.ReadPermissions
def ReadPermissions(self, user_link, options=None): """Reads all permissions for a user. :param str user_link: The link to the user entity. :param dict options: The request options for the request. :return: Query Iterable of Permissions. :rtype: query_iterable.QueryIterable """ if options is None: options = {} return self.QueryPermissions(user_link, None, options)
python
def ReadPermissions(self, user_link, options=None): """Reads all permissions for a user. :param str user_link: The link to the user entity. :param dict options: The request options for the request. :return: Query Iterable of Permissions. :rtype: query_iterable.QueryIterable """ if options is None: options = {} return self.QueryPermissions(user_link, None, options)
[ "def", "ReadPermissions", "(", "self", ",", "user_link", ",", "options", "=", "None", ")", ":", "if", "options", "is", "None", ":", "options", "=", "{", "}", "return", "self", ".", "QueryPermissions", "(", "user_link", ",", "None", ",", "options", ")" ]
Reads all permissions for a user. :param str user_link: The link to the user entity. :param dict options: The request options for the request. :return: Query Iterable of Permissions. :rtype: query_iterable.QueryIterable
[ "Reads", "all", "permissions", "for", "a", "user", "." ]
dd01b3c5d308c6da83cfcaa0ab7083351a476353
https://github.com/Azure/azure-cosmos-python/blob/dd01b3c5d308c6da83cfcaa0ab7083351a476353/azure/cosmos/cosmos_client.py#L664-L681
train
224,389
Azure/azure-cosmos-python
azure/cosmos/cosmos_client.py
CosmosClient.QueryPermissions
def QueryPermissions(self, user_link, query, options=None): """Queries permissions for a user. :param str user_link: The link to the user entity. :param (str or dict) query: :param dict options: The request options for the request. :return: Query Iterable of Permissions. :rtype: query_iterable.QueryIterable """ if options is None: options = {} path = base.GetPathFromLink(user_link, 'permissions') user_id = base.GetResourceIdOrFullNameFromLink(user_link) def fetch_fn(options): return self.__QueryFeed(path, 'permissions', user_id, lambda r: r['Permissions'], lambda _, b: b, query, options), self.last_response_headers return query_iterable.QueryIterable(self, query, options, fetch_fn)
python
def QueryPermissions(self, user_link, query, options=None): """Queries permissions for a user. :param str user_link: The link to the user entity. :param (str or dict) query: :param dict options: The request options for the request. :return: Query Iterable of Permissions. :rtype: query_iterable.QueryIterable """ if options is None: options = {} path = base.GetPathFromLink(user_link, 'permissions') user_id = base.GetResourceIdOrFullNameFromLink(user_link) def fetch_fn(options): return self.__QueryFeed(path, 'permissions', user_id, lambda r: r['Permissions'], lambda _, b: b, query, options), self.last_response_headers return query_iterable.QueryIterable(self, query, options, fetch_fn)
[ "def", "QueryPermissions", "(", "self", ",", "user_link", ",", "query", ",", "options", "=", "None", ")", ":", "if", "options", "is", "None", ":", "options", "=", "{", "}", "path", "=", "base", ".", "GetPathFromLink", "(", "user_link", ",", "'permissions...
Queries permissions for a user. :param str user_link: The link to the user entity. :param (str or dict) query: :param dict options: The request options for the request. :return: Query Iterable of Permissions. :rtype: query_iterable.QueryIterable
[ "Queries", "permissions", "for", "a", "user", "." ]
dd01b3c5d308c6da83cfcaa0ab7083351a476353
https://github.com/Azure/azure-cosmos-python/blob/dd01b3c5d308c6da83cfcaa0ab7083351a476353/azure/cosmos/cosmos_client.py#L683-L711
train
224,390
Azure/azure-cosmos-python
azure/cosmos/cosmos_client.py
CosmosClient.ReplaceUser
def ReplaceUser(self, user_link, user, options=None): """Replaces a user and return it. :param str user_link: The link to the user entity. :param dict user: :param dict options: The request options for the request. :return: The new User. :rtype: dict """ if options is None: options = {} CosmosClient.__ValidateResource(user) path = base.GetPathFromLink(user_link) user_id = base.GetResourceIdOrFullNameFromLink(user_link) return self.Replace(user, path, 'users', user_id, None, options)
python
def ReplaceUser(self, user_link, user, options=None): """Replaces a user and return it. :param str user_link: The link to the user entity. :param dict user: :param dict options: The request options for the request. :return: The new User. :rtype: dict """ if options is None: options = {} CosmosClient.__ValidateResource(user) path = base.GetPathFromLink(user_link) user_id = base.GetResourceIdOrFullNameFromLink(user_link) return self.Replace(user, path, 'users', user_id, None, options)
[ "def", "ReplaceUser", "(", "self", ",", "user_link", ",", "user", ",", "options", "=", "None", ")", ":", "if", "options", "is", "None", ":", "options", "=", "{", "}", "CosmosClient", ".", "__ValidateResource", "(", "user", ")", "path", "=", "base", "."...
Replaces a user and return it. :param str user_link: The link to the user entity. :param dict user: :param dict options: The request options for the request. :return: The new User. :rtype: dict
[ "Replaces", "a", "user", "and", "return", "it", "." ]
dd01b3c5d308c6da83cfcaa0ab7083351a476353
https://github.com/Azure/azure-cosmos-python/blob/dd01b3c5d308c6da83cfcaa0ab7083351a476353/azure/cosmos/cosmos_client.py#L713-L739
train
224,391
Azure/azure-cosmos-python
azure/cosmos/cosmos_client.py
CosmosClient.DeleteUser
def DeleteUser(self, user_link, options=None): """Deletes a user. :param str user_link: The link to the user entity. :param dict options: The request options for the request. :return: The deleted user. :rtype: dict """ if options is None: options = {} path = base.GetPathFromLink(user_link) user_id = base.GetResourceIdOrFullNameFromLink(user_link) return self.DeleteResource(path, 'users', user_id, None, options)
python
def DeleteUser(self, user_link, options=None): """Deletes a user. :param str user_link: The link to the user entity. :param dict options: The request options for the request. :return: The deleted user. :rtype: dict """ if options is None: options = {} path = base.GetPathFromLink(user_link) user_id = base.GetResourceIdOrFullNameFromLink(user_link) return self.DeleteResource(path, 'users', user_id, None, options)
[ "def", "DeleteUser", "(", "self", ",", "user_link", ",", "options", "=", "None", ")", ":", "if", "options", "is", "None", ":", "options", "=", "{", "}", "path", "=", "base", ".", "GetPathFromLink", "(", "user_link", ")", "user_id", "=", "base", ".", ...
Deletes a user. :param str user_link: The link to the user entity. :param dict options: The request options for the request. :return: The deleted user. :rtype: dict
[ "Deletes", "a", "user", "." ]
dd01b3c5d308c6da83cfcaa0ab7083351a476353
https://github.com/Azure/azure-cosmos-python/blob/dd01b3c5d308c6da83cfcaa0ab7083351a476353/azure/cosmos/cosmos_client.py#L741-L764
train
224,392
Azure/azure-cosmos-python
azure/cosmos/cosmos_client.py
CosmosClient.ReplacePermission
def ReplacePermission(self, permission_link, permission, options=None): """Replaces a permission and return it. :param str permission_link: The link to the permission. :param dict permission: :param dict options: The request options for the request. :return: The new Permission. :rtype: dict """ if options is None: options = {} CosmosClient.__ValidateResource(permission) path = base.GetPathFromLink(permission_link) permission_id = base.GetResourceIdOrFullNameFromLink(permission_link) return self.Replace(permission, path, 'permissions', permission_id, None, options)
python
def ReplacePermission(self, permission_link, permission, options=None): """Replaces a permission and return it. :param str permission_link: The link to the permission. :param dict permission: :param dict options: The request options for the request. :return: The new Permission. :rtype: dict """ if options is None: options = {} CosmosClient.__ValidateResource(permission) path = base.GetPathFromLink(permission_link) permission_id = base.GetResourceIdOrFullNameFromLink(permission_link) return self.Replace(permission, path, 'permissions', permission_id, None, options)
[ "def", "ReplacePermission", "(", "self", ",", "permission_link", ",", "permission", ",", "options", "=", "None", ")", ":", "if", "options", "is", "None", ":", "options", "=", "{", "}", "CosmosClient", ".", "__ValidateResource", "(", "permission", ")", "path"...
Replaces a permission and return it. :param str permission_link: The link to the permission. :param dict permission: :param dict options: The request options for the request. :return: The new Permission. :rtype: dict
[ "Replaces", "a", "permission", "and", "return", "it", "." ]
dd01b3c5d308c6da83cfcaa0ab7083351a476353
https://github.com/Azure/azure-cosmos-python/blob/dd01b3c5d308c6da83cfcaa0ab7083351a476353/azure/cosmos/cosmos_client.py#L766-L792
train
224,393
Azure/azure-cosmos-python
azure/cosmos/cosmos_client.py
CosmosClient.DeletePermission
def DeletePermission(self, permission_link, options=None): """Deletes a permission. :param str permission_link: The link to the permission. :param dict options: The request options for the request. :return: The deleted Permission. :rtype: dict """ if options is None: options = {} path = base.GetPathFromLink(permission_link) permission_id = base.GetResourceIdOrFullNameFromLink(permission_link) return self.DeleteResource(path, 'permissions', permission_id, None, options)
python
def DeletePermission(self, permission_link, options=None): """Deletes a permission. :param str permission_link: The link to the permission. :param dict options: The request options for the request. :return: The deleted Permission. :rtype: dict """ if options is None: options = {} path = base.GetPathFromLink(permission_link) permission_id = base.GetResourceIdOrFullNameFromLink(permission_link) return self.DeleteResource(path, 'permissions', permission_id, None, options)
[ "def", "DeletePermission", "(", "self", ",", "permission_link", ",", "options", "=", "None", ")", ":", "if", "options", "is", "None", ":", "options", "=", "{", "}", "path", "=", "base", ".", "GetPathFromLink", "(", "permission_link", ")", "permission_id", ...
Deletes a permission. :param str permission_link: The link to the permission. :param dict options: The request options for the request. :return: The deleted Permission. :rtype: dict
[ "Deletes", "a", "permission", "." ]
dd01b3c5d308c6da83cfcaa0ab7083351a476353
https://github.com/Azure/azure-cosmos-python/blob/dd01b3c5d308c6da83cfcaa0ab7083351a476353/azure/cosmos/cosmos_client.py#L794-L817
train
224,394
Azure/azure-cosmos-python
azure/cosmos/cosmos_client.py
CosmosClient.ReadItems
def ReadItems(self, collection_link, feed_options=None): """Reads all documents in a collection. :param str collection_link: The link to the document collection. :param dict feed_options: :return: Query Iterable of Documents. :rtype: query_iterable.QueryIterable """ if feed_options is None: feed_options = {} return self.QueryItems(collection_link, None, feed_options)
python
def ReadItems(self, collection_link, feed_options=None): """Reads all documents in a collection. :param str collection_link: The link to the document collection. :param dict feed_options: :return: Query Iterable of Documents. :rtype: query_iterable.QueryIterable """ if feed_options is None: feed_options = {} return self.QueryItems(collection_link, None, feed_options)
[ "def", "ReadItems", "(", "self", ",", "collection_link", ",", "feed_options", "=", "None", ")", ":", "if", "feed_options", "is", "None", ":", "feed_options", "=", "{", "}", "return", "self", ".", "QueryItems", "(", "collection_link", ",", "None", ",", "fee...
Reads all documents in a collection. :param str collection_link: The link to the document collection. :param dict feed_options: :return: Query Iterable of Documents. :rtype: query_iterable.QueryIterable
[ "Reads", "all", "documents", "in", "a", "collection", "." ]
dd01b3c5d308c6da83cfcaa0ab7083351a476353
https://github.com/Azure/azure-cosmos-python/blob/dd01b3c5d308c6da83cfcaa0ab7083351a476353/azure/cosmos/cosmos_client.py#L819-L835
train
224,395
Azure/azure-cosmos-python
azure/cosmos/cosmos_client.py
CosmosClient.QueryItems
def QueryItems(self, database_or_Container_link, query, options=None, partition_key=None): """Queries documents in a collection. :param str database_or_Container_link: The link to the database when using partitioning, otherwise link to the document collection. :param (str or dict) query: :param dict options: The request options for the request. :param str partition_key: Partition key for the query(default value None) :return: Query Iterable of Documents. :rtype: query_iterable.QueryIterable """ database_or_Container_link = base.TrimBeginningAndEndingSlashes(database_or_Container_link) if options is None: options = {} if(base.IsDatabaseLink(database_or_Container_link)): # Python doesn't have a good way of specifying an overloaded constructor, and this is how it's generally overloaded constructors are specified(by calling a @classmethod) and returning the 'self' instance return query_iterable.QueryIterable.PartitioningQueryIterable(self, query, options, database_or_Container_link, partition_key) else: path = base.GetPathFromLink(database_or_Container_link, 'docs') collection_id = base.GetResourceIdOrFullNameFromLink(database_or_Container_link) def fetch_fn(options): return self.__QueryFeed(path, 'docs', collection_id, lambda r: r['Documents'], lambda _, b: b, query, options), self.last_response_headers return query_iterable.QueryIterable(self, query, options, fetch_fn, database_or_Container_link)
python
def QueryItems(self, database_or_Container_link, query, options=None, partition_key=None): """Queries documents in a collection. :param str database_or_Container_link: The link to the database when using partitioning, otherwise link to the document collection. :param (str or dict) query: :param dict options: The request options for the request. :param str partition_key: Partition key for the query(default value None) :return: Query Iterable of Documents. :rtype: query_iterable.QueryIterable """ database_or_Container_link = base.TrimBeginningAndEndingSlashes(database_or_Container_link) if options is None: options = {} if(base.IsDatabaseLink(database_or_Container_link)): # Python doesn't have a good way of specifying an overloaded constructor, and this is how it's generally overloaded constructors are specified(by calling a @classmethod) and returning the 'self' instance return query_iterable.QueryIterable.PartitioningQueryIterable(self, query, options, database_or_Container_link, partition_key) else: path = base.GetPathFromLink(database_or_Container_link, 'docs') collection_id = base.GetResourceIdOrFullNameFromLink(database_or_Container_link) def fetch_fn(options): return self.__QueryFeed(path, 'docs', collection_id, lambda r: r['Documents'], lambda _, b: b, query, options), self.last_response_headers return query_iterable.QueryIterable(self, query, options, fetch_fn, database_or_Container_link)
[ "def", "QueryItems", "(", "self", ",", "database_or_Container_link", ",", "query", ",", "options", "=", "None", ",", "partition_key", "=", "None", ")", ":", "database_or_Container_link", "=", "base", ".", "TrimBeginningAndEndingSlashes", "(", "database_or_Container_li...
Queries documents in a collection. :param str database_or_Container_link: The link to the database when using partitioning, otherwise link to the document collection. :param (str or dict) query: :param dict options: The request options for the request. :param str partition_key: Partition key for the query(default value None) :return: Query Iterable of Documents. :rtype: query_iterable.QueryIterable
[ "Queries", "documents", "in", "a", "collection", "." ]
dd01b3c5d308c6da83cfcaa0ab7083351a476353
https://github.com/Azure/azure-cosmos-python/blob/dd01b3c5d308c6da83cfcaa0ab7083351a476353/azure/cosmos/cosmos_client.py#L837-L873
train
224,396
Azure/azure-cosmos-python
azure/cosmos/cosmos_client.py
CosmosClient.QueryItemsChangeFeed
def QueryItemsChangeFeed(self, collection_link, options=None): """Queries documents change feed in a collection. :param str collection_link: The link to the document collection. :param dict options: The request options for the request. options may also specify partition key range id. :return: Query Iterable of Documents. :rtype: query_iterable.QueryIterable """ partition_key_range_id = None if options is not None and 'partitionKeyRangeId' in options: partition_key_range_id = options['partitionKeyRangeId'] return self._QueryChangeFeed(collection_link, "Documents" , options, partition_key_range_id)
python
def QueryItemsChangeFeed(self, collection_link, options=None): """Queries documents change feed in a collection. :param str collection_link: The link to the document collection. :param dict options: The request options for the request. options may also specify partition key range id. :return: Query Iterable of Documents. :rtype: query_iterable.QueryIterable """ partition_key_range_id = None if options is not None and 'partitionKeyRangeId' in options: partition_key_range_id = options['partitionKeyRangeId'] return self._QueryChangeFeed(collection_link, "Documents" , options, partition_key_range_id)
[ "def", "QueryItemsChangeFeed", "(", "self", ",", "collection_link", ",", "options", "=", "None", ")", ":", "partition_key_range_id", "=", "None", "if", "options", "is", "not", "None", "and", "'partitionKeyRangeId'", "in", "options", ":", "partition_key_range_id", ...
Queries documents change feed in a collection. :param str collection_link: The link to the document collection. :param dict options: The request options for the request. options may also specify partition key range id. :return: Query Iterable of Documents. :rtype: query_iterable.QueryIterable
[ "Queries", "documents", "change", "feed", "in", "a", "collection", "." ]
dd01b3c5d308c6da83cfcaa0ab7083351a476353
https://github.com/Azure/azure-cosmos-python/blob/dd01b3c5d308c6da83cfcaa0ab7083351a476353/azure/cosmos/cosmos_client.py#L875-L895
train
224,397
Azure/azure-cosmos-python
azure/cosmos/cosmos_client.py
CosmosClient._QueryChangeFeed
def _QueryChangeFeed(self, collection_link, resource_type, options=None, partition_key_range_id=None): """Queries change feed of a resource in a collection. :param str collection_link: The link to the document collection. :param str resource_type: The type of the resource. :param dict options: The request options for the request. :param str partition_key_range_id: Specifies partition key range id. :return: Query Iterable of Documents. :rtype: query_iterable.QueryIterable """ if options is None: options = {} options['changeFeed'] = True resource_key_map = {'Documents' : 'docs'} # For now, change feed only supports Documents and Partition Key Range resouce type if resource_type not in resource_key_map: raise NotImplementedError(resource_type + " change feed query is not supported.") resource_key = resource_key_map[resource_type] path = base.GetPathFromLink(collection_link, resource_key) collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) def fetch_fn(options): return self.__QueryFeed(path, resource_key, collection_id, lambda r: r[resource_type], lambda _, b: b, None, options, partition_key_range_id), self.last_response_headers return query_iterable.QueryIterable(self, None, options, fetch_fn, collection_link)
python
def _QueryChangeFeed(self, collection_link, resource_type, options=None, partition_key_range_id=None): """Queries change feed of a resource in a collection. :param str collection_link: The link to the document collection. :param str resource_type: The type of the resource. :param dict options: The request options for the request. :param str partition_key_range_id: Specifies partition key range id. :return: Query Iterable of Documents. :rtype: query_iterable.QueryIterable """ if options is None: options = {} options['changeFeed'] = True resource_key_map = {'Documents' : 'docs'} # For now, change feed only supports Documents and Partition Key Range resouce type if resource_type not in resource_key_map: raise NotImplementedError(resource_type + " change feed query is not supported.") resource_key = resource_key_map[resource_type] path = base.GetPathFromLink(collection_link, resource_key) collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) def fetch_fn(options): return self.__QueryFeed(path, resource_key, collection_id, lambda r: r[resource_type], lambda _, b: b, None, options, partition_key_range_id), self.last_response_headers return query_iterable.QueryIterable(self, None, options, fetch_fn, collection_link)
[ "def", "_QueryChangeFeed", "(", "self", ",", "collection_link", ",", "resource_type", ",", "options", "=", "None", ",", "partition_key_range_id", "=", "None", ")", ":", "if", "options", "is", "None", ":", "options", "=", "{", "}", "options", "[", "'changeFee...
Queries change feed of a resource in a collection. :param str collection_link: The link to the document collection. :param str resource_type: The type of the resource. :param dict options: The request options for the request. :param str partition_key_range_id: Specifies partition key range id. :return: Query Iterable of Documents. :rtype: query_iterable.QueryIterable
[ "Queries", "change", "feed", "of", "a", "resource", "in", "a", "collection", "." ]
dd01b3c5d308c6da83cfcaa0ab7083351a476353
https://github.com/Azure/azure-cosmos-python/blob/dd01b3c5d308c6da83cfcaa0ab7083351a476353/azure/cosmos/cosmos_client.py#L897-L937
train
224,398
Azure/azure-cosmos-python
azure/cosmos/cosmos_client.py
CosmosClient._ReadPartitionKeyRanges
def _ReadPartitionKeyRanges(self, collection_link, feed_options=None): """Reads Partition Key Ranges. :param str collection_link: The link to the document collection. :param dict feed_options: :return: Query Iterable of PartitionKeyRanges. :rtype: query_iterable.QueryIterable """ if feed_options is None: feed_options = {} return self._QueryPartitionKeyRanges(collection_link, None, feed_options)
python
def _ReadPartitionKeyRanges(self, collection_link, feed_options=None): """Reads Partition Key Ranges. :param str collection_link: The link to the document collection. :param dict feed_options: :return: Query Iterable of PartitionKeyRanges. :rtype: query_iterable.QueryIterable """ if feed_options is None: feed_options = {} return self._QueryPartitionKeyRanges(collection_link, None, feed_options)
[ "def", "_ReadPartitionKeyRanges", "(", "self", ",", "collection_link", ",", "feed_options", "=", "None", ")", ":", "if", "feed_options", "is", "None", ":", "feed_options", "=", "{", "}", "return", "self", ".", "_QueryPartitionKeyRanges", "(", "collection_link", ...
Reads Partition Key Ranges. :param str collection_link: The link to the document collection. :param dict feed_options: :return: Query Iterable of PartitionKeyRanges. :rtype: query_iterable.QueryIterable
[ "Reads", "Partition", "Key", "Ranges", "." ]
dd01b3c5d308c6da83cfcaa0ab7083351a476353
https://github.com/Azure/azure-cosmos-python/blob/dd01b3c5d308c6da83cfcaa0ab7083351a476353/azure/cosmos/cosmos_client.py#L939-L955
train
224,399