id
int32
0
252k
repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
51
19.8k
code_tokens
list
docstring
stringlengths
3
17.3k
docstring_tokens
list
sha
stringlengths
40
40
url
stringlengths
87
242
226,300
droope/droopescan
dscan/common/update_api.py
GitRepo.clone
def clone(self): """ Clones a directory based on the clone_url and plugin_name given to the constructor. The clone will be located at self.path. """ base_dir = '/'.join(self.path.split('/')[:-2]) try: os.makedirs(base_dir, 0o700) except OSError: # Raises an error exception if the leaf directory already exists. pass self._cmd(['git', 'clone', self._clone_url, self.path], cwd=os.getcwd())
python
def clone(self): base_dir = '/'.join(self.path.split('/')[:-2]) try: os.makedirs(base_dir, 0o700) except OSError: # Raises an error exception if the leaf directory already exists. pass self._cmd(['git', 'clone', self._clone_url, self.path], cwd=os.getcwd())
[ "def", "clone", "(", "self", ")", ":", "base_dir", "=", "'/'", ".", "join", "(", "self", ".", "path", ".", "split", "(", "'/'", ")", "[", ":", "-", "2", "]", ")", "try", ":", "os", ".", "makedirs", "(", "base_dir", ",", "0o700", ")", "except", ...
Clones a directory based on the clone_url and plugin_name given to the constructor. The clone will be located at self.path.
[ "Clones", "a", "directory", "based", "on", "the", "clone_url", "and", "plugin_name", "given", "to", "the", "constructor", ".", "The", "clone", "will", "be", "located", "at", "self", ".", "path", "." ]
424c48a0f9d12b4536dbef5a786f0fbd4ce9519a
https://github.com/droope/droopescan/blob/424c48a0f9d12b4536dbef5a786f0fbd4ce9519a/dscan/common/update_api.py#L317-L329
226,301
droope/droopescan
dscan/common/update_api.py
GitRepo.tags_newer
def tags_newer(self, versions_file, majors): """ Checks this git repo tags for newer versions. @param versions_file: a common.VersionsFile instance to check against. @param majors: a list of major branches to check. E.g. ['6', '7'] @raise RuntimeError: no newer tags were found. @raise MissingMajorException: A new version from a newer major branch is exists, but hasn't been downloaded due to it not being in majors. """ highest = versions_file.highest_version_major(majors) all = self.tags_get() newer = _newer_tags_get(highest, all) if len(newer) == 0: raise RuntimeError("No new tags found.") return newer
python
def tags_newer(self, versions_file, majors): highest = versions_file.highest_version_major(majors) all = self.tags_get() newer = _newer_tags_get(highest, all) if len(newer) == 0: raise RuntimeError("No new tags found.") return newer
[ "def", "tags_newer", "(", "self", ",", "versions_file", ",", "majors", ")", ":", "highest", "=", "versions_file", ".", "highest_version_major", "(", "majors", ")", "all", "=", "self", ".", "tags_get", "(", ")", "newer", "=", "_newer_tags_get", "(", "highest"...
Checks this git repo tags for newer versions. @param versions_file: a common.VersionsFile instance to check against. @param majors: a list of major branches to check. E.g. ['6', '7'] @raise RuntimeError: no newer tags were found. @raise MissingMajorException: A new version from a newer major branch is exists, but hasn't been downloaded due to it not being in majors.
[ "Checks", "this", "git", "repo", "tags", "for", "newer", "versions", "." ]
424c48a0f9d12b4536dbef5a786f0fbd4ce9519a
https://github.com/droope/droopescan/blob/424c48a0f9d12b4536dbef5a786f0fbd4ce9519a/dscan/common/update_api.py#L337-L355
226,302
droope/droopescan
dscan/common/plugins_util.py
get_rfu
def get_rfu(): """ Returns a list of al "regular file urls" for all plugins. """ global _rfu if _rfu: return _rfu plugins = plugins_base_get() rfu = [] for plugin in plugins: if isinstance(plugin.regular_file_url, str): rfu.append(plugin.regular_file_url) else: rfu += plugin.regular_file_url _rfu = rfu return rfu
python
def get_rfu(): global _rfu if _rfu: return _rfu plugins = plugins_base_get() rfu = [] for plugin in plugins: if isinstance(plugin.regular_file_url, str): rfu.append(plugin.regular_file_url) else: rfu += plugin.regular_file_url _rfu = rfu return rfu
[ "def", "get_rfu", "(", ")", ":", "global", "_rfu", "if", "_rfu", ":", "return", "_rfu", "plugins", "=", "plugins_base_get", "(", ")", "rfu", "=", "[", "]", "for", "plugin", "in", "plugins", ":", "if", "isinstance", "(", "plugin", ".", "regular_file_url",...
Returns a list of al "regular file urls" for all plugins.
[ "Returns", "a", "list", "of", "al", "regular", "file", "urls", "for", "all", "plugins", "." ]
424c48a0f9d12b4536dbef5a786f0fbd4ce9519a
https://github.com/droope/droopescan/blob/424c48a0f9d12b4536dbef5a786f0fbd4ce9519a/dscan/common/plugins_util.py#L46-L64
226,303
droope/droopescan
dscan/common/plugins_util.py
plugin_get_rfu
def plugin_get_rfu(plugin): """ Returns "regular file urls" for a particular plugin. @param plugin: plugin class. """ if isinstance(plugin.regular_file_url, str): rfu = [plugin.regular_file_url] else: rfu = plugin.regular_file_url return rfu
python
def plugin_get_rfu(plugin): if isinstance(plugin.regular_file_url, str): rfu = [plugin.regular_file_url] else: rfu = plugin.regular_file_url return rfu
[ "def", "plugin_get_rfu", "(", "plugin", ")", ":", "if", "isinstance", "(", "plugin", ".", "regular_file_url", ",", "str", ")", ":", "rfu", "=", "[", "plugin", ".", "regular_file_url", "]", "else", ":", "rfu", "=", "plugin", ".", "regular_file_url", "return...
Returns "regular file urls" for a particular plugin. @param plugin: plugin class.
[ "Returns", "regular", "file", "urls", "for", "a", "particular", "plugin", "." ]
424c48a0f9d12b4536dbef5a786f0fbd4ce9519a
https://github.com/droope/droopescan/blob/424c48a0f9d12b4536dbef5a786f0fbd4ce9519a/dscan/common/plugins_util.py#L66-L76
226,304
droope/droopescan
dscan/common/plugins_util.py
plugin_get
def plugin_get(name): """ Return plugin class. @param name: the cms label. """ plugins = plugins_base_get() for plugin in plugins: if plugin.Meta.label == name: return plugin raise RuntimeError('CMS "%s" not known.' % name)
python
def plugin_get(name): plugins = plugins_base_get() for plugin in plugins: if plugin.Meta.label == name: return plugin raise RuntimeError('CMS "%s" not known.' % name)
[ "def", "plugin_get", "(", "name", ")", ":", "plugins", "=", "plugins_base_get", "(", ")", "for", "plugin", "in", "plugins", ":", "if", "plugin", ".", "Meta", ".", "label", "==", "name", ":", "return", "plugin", "raise", "RuntimeError", "(", "'CMS \"%s\" no...
Return plugin class. @param name: the cms label.
[ "Return", "plugin", "class", "." ]
424c48a0f9d12b4536dbef5a786f0fbd4ce9519a
https://github.com/droope/droopescan/blob/424c48a0f9d12b4536dbef5a786f0fbd4ce9519a/dscan/common/plugins_util.py#L100-L110
226,305
stefanfoulis/django-phonenumber-field
phonenumber_field/modelfields.py
PhoneNumberField.get_prep_value
def get_prep_value(self, value): """ Perform preliminary non-db specific value checks and conversions. """ if value: if not isinstance(value, PhoneNumber): value = to_python(value) if value.is_valid(): format_string = getattr(settings, "PHONENUMBER_DB_FORMAT", "E164") fmt = PhoneNumber.format_map[format_string] value = value.format_as(fmt) else: value = self.get_default() return super(PhoneNumberField, self).get_prep_value(value)
python
def get_prep_value(self, value): if value: if not isinstance(value, PhoneNumber): value = to_python(value) if value.is_valid(): format_string = getattr(settings, "PHONENUMBER_DB_FORMAT", "E164") fmt = PhoneNumber.format_map[format_string] value = value.format_as(fmt) else: value = self.get_default() return super(PhoneNumberField, self).get_prep_value(value)
[ "def", "get_prep_value", "(", "self", ",", "value", ")", ":", "if", "value", ":", "if", "not", "isinstance", "(", "value", ",", "PhoneNumber", ")", ":", "value", "=", "to_python", "(", "value", ")", "if", "value", ".", "is_valid", "(", ")", ":", "for...
Perform preliminary non-db specific value checks and conversions.
[ "Perform", "preliminary", "non", "-", "db", "specific", "value", "checks", "and", "conversions", "." ]
b0e5dd0d4cc74523751aec6ae181f74b8b93b5d4
https://github.com/stefanfoulis/django-phonenumber-field/blob/b0e5dd0d4cc74523751aec6ae181f74b8b93b5d4/phonenumber_field/modelfields.py#L73-L86
226,306
erikrose/more-itertools
more_itertools/more.py
unique_to_each
def unique_to_each(*iterables): """Return the elements from each of the input iterables that aren't in the other input iterables. For example, suppose you have a set of packages, each with a set of dependencies:: {'pkg_1': {'A', 'B'}, 'pkg_2': {'B', 'C'}, 'pkg_3': {'B', 'D'}} If you remove one package, which dependencies can also be removed? If ``pkg_1`` is removed, then ``A`` is no longer necessary - it is not associated with ``pkg_2`` or ``pkg_3``. Similarly, ``C`` is only needed for ``pkg_2``, and ``D`` is only needed for ``pkg_3``:: >>> unique_to_each({'A', 'B'}, {'B', 'C'}, {'B', 'D'}) [['A'], ['C'], ['D']] If there are duplicates in one input iterable that aren't in the others they will be duplicated in the output. Input order is preserved:: >>> unique_to_each("mississippi", "missouri") [['p', 'p'], ['o', 'u', 'r']] It is assumed that the elements of each iterable are hashable. """ pool = [list(it) for it in iterables] counts = Counter(chain.from_iterable(map(set, pool))) uniques = {element for element in counts if counts[element] == 1} return [list(filter(uniques.__contains__, it)) for it in pool]
python
def unique_to_each(*iterables): pool = [list(it) for it in iterables] counts = Counter(chain.from_iterable(map(set, pool))) uniques = {element for element in counts if counts[element] == 1} return [list(filter(uniques.__contains__, it)) for it in pool]
[ "def", "unique_to_each", "(", "*", "iterables", ")", ":", "pool", "=", "[", "list", "(", "it", ")", "for", "it", "in", "iterables", "]", "counts", "=", "Counter", "(", "chain", ".", "from_iterable", "(", "map", "(", "set", ",", "pool", ")", ")", ")...
Return the elements from each of the input iterables that aren't in the other input iterables. For example, suppose you have a set of packages, each with a set of dependencies:: {'pkg_1': {'A', 'B'}, 'pkg_2': {'B', 'C'}, 'pkg_3': {'B', 'D'}} If you remove one package, which dependencies can also be removed? If ``pkg_1`` is removed, then ``A`` is no longer necessary - it is not associated with ``pkg_2`` or ``pkg_3``. Similarly, ``C`` is only needed for ``pkg_2``, and ``D`` is only needed for ``pkg_3``:: >>> unique_to_each({'A', 'B'}, {'B', 'C'}, {'B', 'D'}) [['A'], ['C'], ['D']] If there are duplicates in one input iterable that aren't in the others they will be duplicated in the output. Input order is preserved:: >>> unique_to_each("mississippi", "missouri") [['p', 'p'], ['o', 'u', 'r']] It is assumed that the elements of each iterable are hashable.
[ "Return", "the", "elements", "from", "each", "of", "the", "input", "iterables", "that", "aren", "t", "in", "the", "other", "input", "iterables", "." ]
6a91b4e25c8e12fcf9fc2b53cf8ee0fba293e6f9
https://github.com/erikrose/more-itertools/blob/6a91b4e25c8e12fcf9fc2b53cf8ee0fba293e6f9/more_itertools/more.py#L595-L625
226,307
erikrose/more-itertools
more_itertools/more.py
interleave_longest
def interleave_longest(*iterables): """Return a new iterable yielding from each iterable in turn, skipping any that are exhausted. >>> list(interleave_longest([1, 2, 3], [4, 5], [6, 7, 8])) [1, 4, 6, 2, 5, 7, 3, 8] This function produces the same output as :func:`roundrobin`, but may perform better for some inputs (in particular when the number of iterables is large). """ i = chain.from_iterable(zip_longest(*iterables, fillvalue=_marker)) return (x for x in i if x is not _marker)
python
def interleave_longest(*iterables): i = chain.from_iterable(zip_longest(*iterables, fillvalue=_marker)) return (x for x in i if x is not _marker)
[ "def", "interleave_longest", "(", "*", "iterables", ")", ":", "i", "=", "chain", ".", "from_iterable", "(", "zip_longest", "(", "*", "iterables", ",", "fillvalue", "=", "_marker", ")", ")", "return", "(", "x", "for", "x", "in", "i", "if", "x", "is", ...
Return a new iterable yielding from each iterable in turn, skipping any that are exhausted. >>> list(interleave_longest([1, 2, 3], [4, 5], [6, 7, 8])) [1, 4, 6, 2, 5, 7, 3, 8] This function produces the same output as :func:`roundrobin`, but may perform better for some inputs (in particular when the number of iterables is large).
[ "Return", "a", "new", "iterable", "yielding", "from", "each", "iterable", "in", "turn", "skipping", "any", "that", "are", "exhausted", "." ]
6a91b4e25c8e12fcf9fc2b53cf8ee0fba293e6f9
https://github.com/erikrose/more-itertools/blob/6a91b4e25c8e12fcf9fc2b53cf8ee0fba293e6f9/more_itertools/more.py#L891-L904
226,308
erikrose/more-itertools
more_itertools/recipes.py
unique_everseen
def unique_everseen(iterable, key=None): """ Yield unique elements, preserving order. >>> list(unique_everseen('AAAABBBCCDAABBB')) ['A', 'B', 'C', 'D'] >>> list(unique_everseen('ABBCcAD', str.lower)) ['A', 'B', 'C', 'D'] Sequences with a mix of hashable and unhashable items can be used. The function will be slower (i.e., `O(n^2)`) for unhashable items. Remember that ``list`` objects are unhashable - you can use the *key* parameter to transform the list to a tuple (which is hashable) to avoid a slowdown. >>> iterable = ([1, 2], [2, 3], [1, 2]) >>> list(unique_everseen(iterable)) # Slow [[1, 2], [2, 3]] >>> list(unique_everseen(iterable, key=tuple)) # Faster [[1, 2], [2, 3]] Similary, you may want to convert unhashable ``set`` objects with ``key=frozenset``. For ``dict`` objects, ``key=lambda x: frozenset(x.items())`` can be used. """ seenset = set() seenset_add = seenset.add seenlist = [] seenlist_add = seenlist.append if key is None: for element in iterable: try: if element not in seenset: seenset_add(element) yield element except TypeError: if element not in seenlist: seenlist_add(element) yield element else: for element in iterable: k = key(element) try: if k not in seenset: seenset_add(k) yield element except TypeError: if k not in seenlist: seenlist_add(k) yield element
python
def unique_everseen(iterable, key=None): seenset = set() seenset_add = seenset.add seenlist = [] seenlist_add = seenlist.append if key is None: for element in iterable: try: if element not in seenset: seenset_add(element) yield element except TypeError: if element not in seenlist: seenlist_add(element) yield element else: for element in iterable: k = key(element) try: if k not in seenset: seenset_add(k) yield element except TypeError: if k not in seenlist: seenlist_add(k) yield element
[ "def", "unique_everseen", "(", "iterable", ",", "key", "=", "None", ")", ":", "seenset", "=", "set", "(", ")", "seenset_add", "=", "seenset", ".", "add", "seenlist", "=", "[", "]", "seenlist_add", "=", "seenlist", ".", "append", "if", "key", "is", "Non...
Yield unique elements, preserving order. >>> list(unique_everseen('AAAABBBCCDAABBB')) ['A', 'B', 'C', 'D'] >>> list(unique_everseen('ABBCcAD', str.lower)) ['A', 'B', 'C', 'D'] Sequences with a mix of hashable and unhashable items can be used. The function will be slower (i.e., `O(n^2)`) for unhashable items. Remember that ``list`` objects are unhashable - you can use the *key* parameter to transform the list to a tuple (which is hashable) to avoid a slowdown. >>> iterable = ([1, 2], [2, 3], [1, 2]) >>> list(unique_everseen(iterable)) # Slow [[1, 2], [2, 3]] >>> list(unique_everseen(iterable, key=tuple)) # Faster [[1, 2], [2, 3]] Similary, you may want to convert unhashable ``set`` objects with ``key=frozenset``. For ``dict`` objects, ``key=lambda x: frozenset(x.items())`` can be used.
[ "Yield", "unique", "elements", "preserving", "order", "." ]
6a91b4e25c8e12fcf9fc2b53cf8ee0fba293e6f9
https://github.com/erikrose/more-itertools/blob/6a91b4e25c8e12fcf9fc2b53cf8ee0fba293e6f9/more_itertools/recipes.py#L346-L397
226,309
erikrose/more-itertools
more_itertools/recipes.py
unique_justseen
def unique_justseen(iterable, key=None): """Yields elements in order, ignoring serial duplicates >>> list(unique_justseen('AAAABBBCCDAABBB')) ['A', 'B', 'C', 'D', 'A', 'B'] >>> list(unique_justseen('ABBCcAD', str.lower)) ['A', 'B', 'C', 'A', 'D'] """ return map(next, map(operator.itemgetter(1), groupby(iterable, key)))
python
def unique_justseen(iterable, key=None): return map(next, map(operator.itemgetter(1), groupby(iterable, key)))
[ "def", "unique_justseen", "(", "iterable", ",", "key", "=", "None", ")", ":", "return", "map", "(", "next", ",", "map", "(", "operator", ".", "itemgetter", "(", "1", ")", ",", "groupby", "(", "iterable", ",", "key", ")", ")", ")" ]
Yields elements in order, ignoring serial duplicates >>> list(unique_justseen('AAAABBBCCDAABBB')) ['A', 'B', 'C', 'D', 'A', 'B'] >>> list(unique_justseen('ABBCcAD', str.lower)) ['A', 'B', 'C', 'A', 'D']
[ "Yields", "elements", "in", "order", "ignoring", "serial", "duplicates" ]
6a91b4e25c8e12fcf9fc2b53cf8ee0fba293e6f9
https://github.com/erikrose/more-itertools/blob/6a91b4e25c8e12fcf9fc2b53cf8ee0fba293e6f9/more_itertools/recipes.py#L400-L409
226,310
erikrose/more-itertools
more_itertools/recipes.py
random_product
def random_product(*args, **kwds): """Draw an item at random from each of the input iterables. >>> random_product('abc', range(4), 'XYZ') # doctest:+SKIP ('c', 3, 'Z') If *repeat* is provided as a keyword argument, that many items will be drawn from each iterable. >>> random_product('abcd', range(4), repeat=2) # doctest:+SKIP ('a', 2, 'd', 3) This equivalent to taking a random selection from ``itertools.product(*args, **kwarg)``. """ pools = [tuple(pool) for pool in args] * kwds.get('repeat', 1) return tuple(choice(pool) for pool in pools)
python
def random_product(*args, **kwds): pools = [tuple(pool) for pool in args] * kwds.get('repeat', 1) return tuple(choice(pool) for pool in pools)
[ "def", "random_product", "(", "*", "args", ",", "*", "*", "kwds", ")", ":", "pools", "=", "[", "tuple", "(", "pool", ")", "for", "pool", "in", "args", "]", "*", "kwds", ".", "get", "(", "'repeat'", ",", "1", ")", "return", "tuple", "(", "choice",...
Draw an item at random from each of the input iterables. >>> random_product('abc', range(4), 'XYZ') # doctest:+SKIP ('c', 3, 'Z') If *repeat* is provided as a keyword argument, that many items will be drawn from each iterable. >>> random_product('abcd', range(4), repeat=2) # doctest:+SKIP ('a', 2, 'd', 3) This equivalent to taking a random selection from ``itertools.product(*args, **kwarg)``.
[ "Draw", "an", "item", "at", "random", "from", "each", "of", "the", "input", "iterables", "." ]
6a91b4e25c8e12fcf9fc2b53cf8ee0fba293e6f9
https://github.com/erikrose/more-itertools/blob/6a91b4e25c8e12fcf9fc2b53cf8ee0fba293e6f9/more_itertools/recipes.py#L453-L470
226,311
adrienverge/yamllint
yamllint/linter.py
run
def run(input, conf, filepath=None): """Lints a YAML source. Returns a generator of LintProblem objects. :param input: buffer, string or stream to read from :param conf: yamllint configuration object """ if conf.is_file_ignored(filepath): return () if isinstance(input, (type(b''), type(u''))): # compat with Python 2 & 3 return _run(input, conf, filepath) elif hasattr(input, 'read'): # Python 2's file or Python 3's io.IOBase # We need to have everything in memory to parse correctly content = input.read() return _run(content, conf, filepath) else: raise TypeError('input should be a string or a stream')
python
def run(input, conf, filepath=None): if conf.is_file_ignored(filepath): return () if isinstance(input, (type(b''), type(u''))): # compat with Python 2 & 3 return _run(input, conf, filepath) elif hasattr(input, 'read'): # Python 2's file or Python 3's io.IOBase # We need to have everything in memory to parse correctly content = input.read() return _run(content, conf, filepath) else: raise TypeError('input should be a string or a stream')
[ "def", "run", "(", "input", ",", "conf", ",", "filepath", "=", "None", ")", ":", "if", "conf", ".", "is_file_ignored", "(", "filepath", ")", ":", "return", "(", ")", "if", "isinstance", "(", "input", ",", "(", "type", "(", "b''", ")", ",", "type", ...
Lints a YAML source. Returns a generator of LintProblem objects. :param input: buffer, string or stream to read from :param conf: yamllint configuration object
[ "Lints", "a", "YAML", "source", "." ]
fec2c2fba736cabf6bee6b5eeb905cab0dc820f6
https://github.com/adrienverge/yamllint/blob/fec2c2fba736cabf6bee6b5eeb905cab0dc820f6/yamllint/linter.py#L218-L236
226,312
adrienverge/yamllint
yamllint/rules/common.py
get_line_indent
def get_line_indent(token): """Finds the indent of the line the token starts in.""" start = token.start_mark.buffer.rfind('\n', 0, token.start_mark.pointer) + 1 content = start while token.start_mark.buffer[content] == ' ': content += 1 return content - start
python
def get_line_indent(token): start = token.start_mark.buffer.rfind('\n', 0, token.start_mark.pointer) + 1 content = start while token.start_mark.buffer[content] == ' ': content += 1 return content - start
[ "def", "get_line_indent", "(", "token", ")", ":", "start", "=", "token", ".", "start_mark", ".", "buffer", ".", "rfind", "(", "'\\n'", ",", "0", ",", "token", ".", "start_mark", ".", "pointer", ")", "+", "1", "content", "=", "start", "while", "token", ...
Finds the indent of the line the token starts in.
[ "Finds", "the", "indent", "of", "the", "line", "the", "token", "starts", "in", "." ]
fec2c2fba736cabf6bee6b5eeb905cab0dc820f6
https://github.com/adrienverge/yamllint/blob/fec2c2fba736cabf6bee6b5eeb905cab0dc820f6/yamllint/rules/common.py#L51-L58
226,313
adrienverge/yamllint
yamllint/rules/common.py
get_real_end_line
def get_real_end_line(token): """Finds the line on which the token really ends. With pyyaml, scalar tokens often end on a next line. """ end_line = token.end_mark.line + 1 if not isinstance(token, yaml.ScalarToken): return end_line pos = token.end_mark.pointer - 1 while (pos >= token.start_mark.pointer - 1 and token.end_mark.buffer[pos] in string.whitespace): if token.end_mark.buffer[pos] == '\n': end_line -= 1 pos -= 1 return end_line
python
def get_real_end_line(token): end_line = token.end_mark.line + 1 if not isinstance(token, yaml.ScalarToken): return end_line pos = token.end_mark.pointer - 1 while (pos >= token.start_mark.pointer - 1 and token.end_mark.buffer[pos] in string.whitespace): if token.end_mark.buffer[pos] == '\n': end_line -= 1 pos -= 1 return end_line
[ "def", "get_real_end_line", "(", "token", ")", ":", "end_line", "=", "token", ".", "end_mark", ".", "line", "+", "1", "if", "not", "isinstance", "(", "token", ",", "yaml", ".", "ScalarToken", ")", ":", "return", "end_line", "pos", "=", "token", ".", "e...
Finds the line on which the token really ends. With pyyaml, scalar tokens often end on a next line.
[ "Finds", "the", "line", "on", "which", "the", "token", "really", "ends", "." ]
fec2c2fba736cabf6bee6b5eeb905cab0dc820f6
https://github.com/adrienverge/yamllint/blob/fec2c2fba736cabf6bee6b5eeb905cab0dc820f6/yamllint/rules/common.py#L61-L77
226,314
adrienverge/yamllint
yamllint/parser.py
comments_between_tokens
def comments_between_tokens(token1, token2): """Find all comments between two tokens""" if token2 is None: buf = token1.end_mark.buffer[token1.end_mark.pointer:] elif (token1.end_mark.line == token2.start_mark.line and not isinstance(token1, yaml.StreamStartToken) and not isinstance(token2, yaml.StreamEndToken)): return else: buf = token1.end_mark.buffer[token1.end_mark.pointer: token2.start_mark.pointer] line_no = token1.end_mark.line + 1 column_no = token1.end_mark.column + 1 pointer = token1.end_mark.pointer comment_before = None for line in buf.split('\n'): pos = line.find('#') if pos != -1: comment = Comment(line_no, column_no + pos, token1.end_mark.buffer, pointer + pos, token1, token2, comment_before) yield comment comment_before = comment pointer += len(line) + 1 line_no += 1 column_no = 1
python
def comments_between_tokens(token1, token2): if token2 is None: buf = token1.end_mark.buffer[token1.end_mark.pointer:] elif (token1.end_mark.line == token2.start_mark.line and not isinstance(token1, yaml.StreamStartToken) and not isinstance(token2, yaml.StreamEndToken)): return else: buf = token1.end_mark.buffer[token1.end_mark.pointer: token2.start_mark.pointer] line_no = token1.end_mark.line + 1 column_no = token1.end_mark.column + 1 pointer = token1.end_mark.pointer comment_before = None for line in buf.split('\n'): pos = line.find('#') if pos != -1: comment = Comment(line_no, column_no + pos, token1.end_mark.buffer, pointer + pos, token1, token2, comment_before) yield comment comment_before = comment pointer += len(line) + 1 line_no += 1 column_no = 1
[ "def", "comments_between_tokens", "(", "token1", ",", "token2", ")", ":", "if", "token2", "is", "None", ":", "buf", "=", "token1", ".", "end_mark", ".", "buffer", "[", "token1", ".", "end_mark", ".", "pointer", ":", "]", "elif", "(", "token1", ".", "en...
Find all comments between two tokens
[ "Find", "all", "comments", "between", "two", "tokens" ]
fec2c2fba736cabf6bee6b5eeb905cab0dc820f6
https://github.com/adrienverge/yamllint/blob/fec2c2fba736cabf6bee6b5eeb905cab0dc820f6/yamllint/parser.py#L91-L120
226,315
adrienverge/yamllint
yamllint/parser.py
token_or_comment_or_line_generator
def token_or_comment_or_line_generator(buffer): """Generator that mixes tokens and lines, ordering them by line number""" tok_or_com_gen = token_or_comment_generator(buffer) line_gen = line_generator(buffer) tok_or_com = next(tok_or_com_gen, None) line = next(line_gen, None) while tok_or_com is not None or line is not None: if tok_or_com is None or (line is not None and tok_or_com.line_no > line.line_no): yield line line = next(line_gen, None) else: yield tok_or_com tok_or_com = next(tok_or_com_gen, None)
python
def token_or_comment_or_line_generator(buffer): tok_or_com_gen = token_or_comment_generator(buffer) line_gen = line_generator(buffer) tok_or_com = next(tok_or_com_gen, None) line = next(line_gen, None) while tok_or_com is not None or line is not None: if tok_or_com is None or (line is not None and tok_or_com.line_no > line.line_no): yield line line = next(line_gen, None) else: yield tok_or_com tok_or_com = next(tok_or_com_gen, None)
[ "def", "token_or_comment_or_line_generator", "(", "buffer", ")", ":", "tok_or_com_gen", "=", "token_or_comment_generator", "(", "buffer", ")", "line_gen", "=", "line_generator", "(", "buffer", ")", "tok_or_com", "=", "next", "(", "tok_or_com_gen", ",", "None", ")", ...
Generator that mixes tokens and lines, ordering them by line number
[ "Generator", "that", "mixes", "tokens", "and", "lines", "ordering", "them", "by", "line", "number" ]
fec2c2fba736cabf6bee6b5eeb905cab0dc820f6
https://github.com/adrienverge/yamllint/blob/fec2c2fba736cabf6bee6b5eeb905cab0dc820f6/yamllint/parser.py#L146-L161
226,316
korfuri/django-prometheus
django_prometheus/migrations.py
ExportMigrations
def ExportMigrations(): """Exports counts of unapplied migrations. This is meant to be called during app startup, ideally by django_prometheus.apps.AppConfig. """ # Import MigrationExecutor lazily. MigrationExecutor checks at # import time that the apps are ready, and they are not when # django_prometheus is imported. ExportMigrations() should be # called in AppConfig.ready(), which signals that all apps are # ready. from django.db.migrations.executor import MigrationExecutor if 'default' in connections and ( type(connections['default']) == DatabaseWrapper): # This is the case where DATABASES = {} in the configuration, # i.e. the user is not using any databases. Django "helpfully" # adds a dummy database and then throws when you try to # actually use it. So we don't do anything, because trying to # export stats would crash the app on startup. return for alias in connections.databases: executor = MigrationExecutor(connections[alias]) ExportMigrationsForDatabase(alias, executor)
python
def ExportMigrations(): # Import MigrationExecutor lazily. MigrationExecutor checks at # import time that the apps are ready, and they are not when # django_prometheus is imported. ExportMigrations() should be # called in AppConfig.ready(), which signals that all apps are # ready. from django.db.migrations.executor import MigrationExecutor if 'default' in connections and ( type(connections['default']) == DatabaseWrapper): # This is the case where DATABASES = {} in the configuration, # i.e. the user is not using any databases. Django "helpfully" # adds a dummy database and then throws when you try to # actually use it. So we don't do anything, because trying to # export stats would crash the app on startup. return for alias in connections.databases: executor = MigrationExecutor(connections[alias]) ExportMigrationsForDatabase(alias, executor)
[ "def", "ExportMigrations", "(", ")", ":", "# Import MigrationExecutor lazily. MigrationExecutor checks at", "# import time that the apps are ready, and they are not when", "# django_prometheus is imported. ExportMigrations() should be", "# called in AppConfig.ready(), which signals that all apps are...
Exports counts of unapplied migrations. This is meant to be called during app startup, ideally by django_prometheus.apps.AppConfig.
[ "Exports", "counts", "of", "unapplied", "migrations", "." ]
c3a19ce46d812f76d9316e50a232878c27c9bdf5
https://github.com/korfuri/django-prometheus/blob/c3a19ce46d812f76d9316e50a232878c27c9bdf5/django_prometheus/migrations.py#L23-L47
226,317
korfuri/django-prometheus
django_prometheus/db/common.py
ExportingCursorWrapper
def ExportingCursorWrapper(cursor_class, alias, vendor): """Returns a CursorWrapper class that knows its database's alias and vendor name. """ class CursorWrapper(cursor_class): """Extends the base CursorWrapper to count events.""" def execute(self, *args, **kwargs): execute_total.labels(alias, vendor).inc() with ExceptionCounterByType(errors_total, extra_labels={ 'alias': alias, 'vendor': vendor}): return super(CursorWrapper, self).execute(*args, **kwargs) def executemany(self, query, param_list, *args, **kwargs): execute_total.labels(alias, vendor).inc(len(param_list)) execute_many_total.labels(alias, vendor).inc(len(param_list)) with ExceptionCounterByType(errors_total, extra_labels={ 'alias': alias, 'vendor': vendor}): return super(CursorWrapper, self).executemany( query, param_list, *args, **kwargs) return CursorWrapper
python
def ExportingCursorWrapper(cursor_class, alias, vendor): class CursorWrapper(cursor_class): """Extends the base CursorWrapper to count events.""" def execute(self, *args, **kwargs): execute_total.labels(alias, vendor).inc() with ExceptionCounterByType(errors_total, extra_labels={ 'alias': alias, 'vendor': vendor}): return super(CursorWrapper, self).execute(*args, **kwargs) def executemany(self, query, param_list, *args, **kwargs): execute_total.labels(alias, vendor).inc(len(param_list)) execute_many_total.labels(alias, vendor).inc(len(param_list)) with ExceptionCounterByType(errors_total, extra_labels={ 'alias': alias, 'vendor': vendor}): return super(CursorWrapper, self).executemany( query, param_list, *args, **kwargs) return CursorWrapper
[ "def", "ExportingCursorWrapper", "(", "cursor_class", ",", "alias", ",", "vendor", ")", ":", "class", "CursorWrapper", "(", "cursor_class", ")", ":", "\"\"\"Extends the base CursorWrapper to count events.\"\"\"", "def", "execute", "(", "self", ",", "*", "args", ",", ...
Returns a CursorWrapper class that knows its database's alias and vendor name.
[ "Returns", "a", "CursorWrapper", "class", "that", "knows", "its", "database", "s", "alias", "and", "vendor", "name", "." ]
c3a19ce46d812f76d9316e50a232878c27c9bdf5
https://github.com/korfuri/django-prometheus/blob/c3a19ce46d812f76d9316e50a232878c27c9bdf5/django_prometheus/db/common.py#L51-L72
226,318
korfuri/django-prometheus
django_prometheus/models.py
ExportModelOperationsMixin
def ExportModelOperationsMixin(model_name): """Returns a mixin for models to export counters for lifecycle operations. Usage: class User(ExportModelOperationsMixin('user'), Model): ... """ # Force create the labels for this model in the counters. This # is not necessary but it avoids gaps in the aggregated data. model_inserts.labels(model_name) model_updates.labels(model_name) model_deletes.labels(model_name) class Mixin(object): def _do_insert(self, *args, **kwargs): model_inserts.labels(model_name).inc() return super(Mixin, self)._do_insert(*args, **kwargs) def _do_update(self, *args, **kwargs): model_updates.labels(model_name).inc() return super(Mixin, self)._do_update(*args, **kwargs) def delete(self, *args, **kwargs): model_deletes.labels(model_name).inc() return super(Mixin, self).delete(*args, **kwargs) return Mixin
python
def ExportModelOperationsMixin(model_name): # Force create the labels for this model in the counters. This # is not necessary but it avoids gaps in the aggregated data. model_inserts.labels(model_name) model_updates.labels(model_name) model_deletes.labels(model_name) class Mixin(object): def _do_insert(self, *args, **kwargs): model_inserts.labels(model_name).inc() return super(Mixin, self)._do_insert(*args, **kwargs) def _do_update(self, *args, **kwargs): model_updates.labels(model_name).inc() return super(Mixin, self)._do_update(*args, **kwargs) def delete(self, *args, **kwargs): model_deletes.labels(model_name).inc() return super(Mixin, self).delete(*args, **kwargs) return Mixin
[ "def", "ExportModelOperationsMixin", "(", "model_name", ")", ":", "# Force create the labels for this model in the counters. This", "# is not necessary but it avoids gaps in the aggregated data.", "model_inserts", ".", "labels", "(", "model_name", ")", "model_updates", ".", "labels",...
Returns a mixin for models to export counters for lifecycle operations. Usage: class User(ExportModelOperationsMixin('user'), Model): ...
[ "Returns", "a", "mixin", "for", "models", "to", "export", "counters", "for", "lifecycle", "operations", "." ]
c3a19ce46d812f76d9316e50a232878c27c9bdf5
https://github.com/korfuri/django-prometheus/blob/c3a19ce46d812f76d9316e50a232878c27c9bdf5/django_prometheus/models.py#L19-L44
226,319
korfuri/django-prometheus
django_prometheus/exports.py
SetupPrometheusEndpointOnPort
def SetupPrometheusEndpointOnPort(port, addr=''): """Exports Prometheus metrics on an HTTPServer running in its own thread. The server runs on the given port and is by default listenning on all interfaces. This HTTPServer is fully independent of Django and its stack. This offers the advantage that even if Django becomes unable to respond, the HTTPServer will continue to function and export metrics. However, this also means that the features offered by Django (like middlewares or WSGI) can't be used. Now here's the really weird part. When Django runs with the auto-reloader enabled (which is the default, you can disable it with `manage.py runserver --noreload`), it forks and executes manage.py twice. That's wasteful but usually OK. It starts being a problem when you try to open a port, like we do. We can detect that we're running under an autoreloader through the presence of the RUN_MAIN environment variable, so we abort if we're trying to export under an autoreloader and trying to open a port. """ assert os.environ.get('RUN_MAIN') != 'true', ( 'The thread-based exporter can\'t be safely used when django\'s ' 'autoreloader is active. Use the URL exporter, or start django ' 'with --noreload. See documentation/exports.md.') prometheus_client.start_http_server(port, addr=addr)
python
def SetupPrometheusEndpointOnPort(port, addr=''): assert os.environ.get('RUN_MAIN') != 'true', ( 'The thread-based exporter can\'t be safely used when django\'s ' 'autoreloader is active. Use the URL exporter, or start django ' 'with --noreload. See documentation/exports.md.') prometheus_client.start_http_server(port, addr=addr)
[ "def", "SetupPrometheusEndpointOnPort", "(", "port", ",", "addr", "=", "''", ")", ":", "assert", "os", ".", "environ", ".", "get", "(", "'RUN_MAIN'", ")", "!=", "'true'", ",", "(", "'The thread-based exporter can\\'t be safely used when django\\'s '", "'autoreloader i...
Exports Prometheus metrics on an HTTPServer running in its own thread. The server runs on the given port and is by default listenning on all interfaces. This HTTPServer is fully independent of Django and its stack. This offers the advantage that even if Django becomes unable to respond, the HTTPServer will continue to function and export metrics. However, this also means that the features offered by Django (like middlewares or WSGI) can't be used. Now here's the really weird part. When Django runs with the auto-reloader enabled (which is the default, you can disable it with `manage.py runserver --noreload`), it forks and executes manage.py twice. That's wasteful but usually OK. It starts being a problem when you try to open a port, like we do. We can detect that we're running under an autoreloader through the presence of the RUN_MAIN environment variable, so we abort if we're trying to export under an autoreloader and trying to open a port.
[ "Exports", "Prometheus", "metrics", "on", "an", "HTTPServer", "running", "in", "its", "own", "thread", "." ]
c3a19ce46d812f76d9316e50a232878c27c9bdf5
https://github.com/korfuri/django-prometheus/blob/c3a19ce46d812f76d9316e50a232878c27c9bdf5/django_prometheus/exports.py#L21-L44
226,320
korfuri/django-prometheus
django_prometheus/exports.py
SetupPrometheusEndpointOnPortRange
def SetupPrometheusEndpointOnPortRange(port_range, addr=''): """Like SetupPrometheusEndpointOnPort, but tries several ports. This is useful when you're running Django as a WSGI application with multiple processes and you want Prometheus to discover all workers. Each worker will grab a port and you can use Prometheus to aggregate across workers. port_range may be any iterable object that contains a list of ports. Typically this would be an xrange of contiguous ports. As soon as one port is found that can serve, use this one and stop trying. The same caveats regarding autoreload apply. Do not use this when Django's autoreloader is active. """ assert os.environ.get('RUN_MAIN') != 'true', ( 'The thread-based exporter can\'t be safely used when django\'s ' 'autoreloader is active. Use the URL exporter, or start django ' 'with --noreload. See documentation/exports.md.') for port in port_range: try: httpd = HTTPServer((addr, port), prometheus_client.MetricsHandler) except (OSError, socket.error): # Python 2 raises socket.error, in Python 3 socket.error is an # alias for OSError continue # Try next port thread = PrometheusEndpointServer(httpd) thread.daemon = True thread.start() logger.info('Exporting Prometheus /metrics/ on port %s' % port) return
python
def SetupPrometheusEndpointOnPortRange(port_range, addr=''): assert os.environ.get('RUN_MAIN') != 'true', ( 'The thread-based exporter can\'t be safely used when django\'s ' 'autoreloader is active. Use the URL exporter, or start django ' 'with --noreload. See documentation/exports.md.') for port in port_range: try: httpd = HTTPServer((addr, port), prometheus_client.MetricsHandler) except (OSError, socket.error): # Python 2 raises socket.error, in Python 3 socket.error is an # alias for OSError continue # Try next port thread = PrometheusEndpointServer(httpd) thread.daemon = True thread.start() logger.info('Exporting Prometheus /metrics/ on port %s' % port) return
[ "def", "SetupPrometheusEndpointOnPortRange", "(", "port_range", ",", "addr", "=", "''", ")", ":", "assert", "os", ".", "environ", ".", "get", "(", "'RUN_MAIN'", ")", "!=", "'true'", ",", "(", "'The thread-based exporter can\\'t be safely used when django\\'s '", "'aut...
Like SetupPrometheusEndpointOnPort, but tries several ports. This is useful when you're running Django as a WSGI application with multiple processes and you want Prometheus to discover all workers. Each worker will grab a port and you can use Prometheus to aggregate across workers. port_range may be any iterable object that contains a list of ports. Typically this would be an xrange of contiguous ports. As soon as one port is found that can serve, use this one and stop trying. The same caveats regarding autoreload apply. Do not use this when Django's autoreloader is active.
[ "Like", "SetupPrometheusEndpointOnPort", "but", "tries", "several", "ports", "." ]
c3a19ce46d812f76d9316e50a232878c27c9bdf5
https://github.com/korfuri/django-prometheus/blob/c3a19ce46d812f76d9316e50a232878c27c9bdf5/django_prometheus/exports.py#L57-L90
226,321
korfuri/django-prometheus
django_prometheus/exports.py
SetupPrometheusExportsFromConfig
def SetupPrometheusExportsFromConfig(): """Exports metrics so Prometheus can collect them.""" port = getattr(settings, 'PROMETHEUS_METRICS_EXPORT_PORT', None) port_range = getattr( settings, 'PROMETHEUS_METRICS_EXPORT_PORT_RANGE', None) addr = getattr(settings, 'PROMETHEUS_METRICS_EXPORT_ADDRESS', '') if port_range: SetupPrometheusEndpointOnPortRange(port_range, addr) elif port: SetupPrometheusEndpointOnPort(port, addr)
python
def SetupPrometheusExportsFromConfig(): port = getattr(settings, 'PROMETHEUS_METRICS_EXPORT_PORT', None) port_range = getattr( settings, 'PROMETHEUS_METRICS_EXPORT_PORT_RANGE', None) addr = getattr(settings, 'PROMETHEUS_METRICS_EXPORT_ADDRESS', '') if port_range: SetupPrometheusEndpointOnPortRange(port_range, addr) elif port: SetupPrometheusEndpointOnPort(port, addr)
[ "def", "SetupPrometheusExportsFromConfig", "(", ")", ":", "port", "=", "getattr", "(", "settings", ",", "'PROMETHEUS_METRICS_EXPORT_PORT'", ",", "None", ")", "port_range", "=", "getattr", "(", "settings", ",", "'PROMETHEUS_METRICS_EXPORT_PORT_RANGE'", ",", "None", ")"...
Exports metrics so Prometheus can collect them.
[ "Exports", "metrics", "so", "Prometheus", "can", "collect", "them", "." ]
c3a19ce46d812f76d9316e50a232878c27c9bdf5
https://github.com/korfuri/django-prometheus/blob/c3a19ce46d812f76d9316e50a232878c27c9bdf5/django_prometheus/exports.py#L93-L102
226,322
Azure/azure-storage-python
azure-storage-file/azure/storage/file/_serialization.py
_get_path
def _get_path(share_name=None, directory_name=None, file_name=None): ''' Creates the path to access a file resource. share_name: Name of share. directory_name: The path to the directory. file_name: Name of file. ''' if share_name and directory_name and file_name: return '/{0}/{1}/{2}'.format( _str(share_name), _str(directory_name), _str(file_name)) elif share_name and directory_name: return '/{0}/{1}'.format( _str(share_name), _str(directory_name)) elif share_name and file_name: return '/{0}/{1}'.format( _str(share_name), _str(file_name)) elif share_name: return '/{0}'.format(_str(share_name)) else: return '/'
python
def _get_path(share_name=None, directory_name=None, file_name=None): ''' Creates the path to access a file resource. share_name: Name of share. directory_name: The path to the directory. file_name: Name of file. ''' if share_name and directory_name and file_name: return '/{0}/{1}/{2}'.format( _str(share_name), _str(directory_name), _str(file_name)) elif share_name and directory_name: return '/{0}/{1}'.format( _str(share_name), _str(directory_name)) elif share_name and file_name: return '/{0}/{1}'.format( _str(share_name), _str(file_name)) elif share_name: return '/{0}'.format(_str(share_name)) else: return '/'
[ "def", "_get_path", "(", "share_name", "=", "None", ",", "directory_name", "=", "None", ",", "file_name", "=", "None", ")", ":", "if", "share_name", "and", "directory_name", "and", "file_name", ":", "return", "'/{0}/{1}/{2}'", ".", "format", "(", "_str", "("...
Creates the path to access a file resource. share_name: Name of share. directory_name: The path to the directory. file_name: Name of file.
[ "Creates", "the", "path", "to", "access", "a", "file", "resource", "." ]
52327354b192cbcf6b7905118ec6b5d57fa46275
https://github.com/Azure/azure-storage-python/blob/52327354b192cbcf6b7905118ec6b5d57fa46275/azure-storage-file/azure/storage/file/_serialization.py#L14-L41
226,323
Azure/azure-storage-python
azure-storage-file/azure/storage/file/_deserialization.py
_parse_snapshot_share
def _parse_snapshot_share(response, name): ''' Extracts snapshot return header. ''' snapshot = response.headers.get('x-ms-snapshot') return _parse_share(response, name, snapshot)
python
def _parse_snapshot_share(response, name): ''' Extracts snapshot return header. ''' snapshot = response.headers.get('x-ms-snapshot') return _parse_share(response, name, snapshot)
[ "def", "_parse_snapshot_share", "(", "response", ",", "name", ")", ":", "snapshot", "=", "response", ".", "headers", ".", "get", "(", "'x-ms-snapshot'", ")", "return", "_parse_share", "(", "response", ",", "name", ",", "snapshot", ")" ]
Extracts snapshot return header.
[ "Extracts", "snapshot", "return", "header", "." ]
52327354b192cbcf6b7905118ec6b5d57fa46275
https://github.com/Azure/azure-storage-python/blob/52327354b192cbcf6b7905118ec6b5d57fa46275/azure-storage-file/azure/storage/file/_deserialization.py#L34-L40
226,324
Azure/azure-storage-python
tool_reset_account.py
purge_blob_containers
def purge_blob_containers(account, account_key): """ Delete all blob containers in the given storage account. USE AT OWN RISK. NOT SUPPORTED BY STORAGE TEAM. """ bs = BlockBlobService(account, account_key) with concurrent.futures.ThreadPoolExecutor(max_workers=20) as executor: # use a map to keep track of futures future_to_container_map = {executor.submit(delete_container, bs, container): container for container in bs.list_containers()} # as the futures are completed, print results for future in concurrent.futures.as_completed(future_to_container_map): container_name = future_to_container_map[future].name try: is_deleted = future.result() if is_deleted: print("Deleted container {} on first try".format(container_name)) else: print("Skipped container {} as it no longer exists".format(container_name)) except AzureException as e: # if the deletion failed because there's an active lease on the container, we will break it # since it is most likely left-over from previous tests if 'lease' in str(e): bs.break_container_lease(container_name) is_deleted = bs.delete_container(container_name) if is_deleted: print("Deleted container {} after having broken lease".format(container_name)) else: print("Skipped container {} as it stopped existing after having broken lease".format(container_name)) else: raise e except Exception as e: print("Skipped container " + container_name + " due to error " + str(e))
python
def purge_blob_containers(account, account_key): bs = BlockBlobService(account, account_key) with concurrent.futures.ThreadPoolExecutor(max_workers=20) as executor: # use a map to keep track of futures future_to_container_map = {executor.submit(delete_container, bs, container): container for container in bs.list_containers()} # as the futures are completed, print results for future in concurrent.futures.as_completed(future_to_container_map): container_name = future_to_container_map[future].name try: is_deleted = future.result() if is_deleted: print("Deleted container {} on first try".format(container_name)) else: print("Skipped container {} as it no longer exists".format(container_name)) except AzureException as e: # if the deletion failed because there's an active lease on the container, we will break it # since it is most likely left-over from previous tests if 'lease' in str(e): bs.break_container_lease(container_name) is_deleted = bs.delete_container(container_name) if is_deleted: print("Deleted container {} after having broken lease".format(container_name)) else: print("Skipped container {} as it stopped existing after having broken lease".format(container_name)) else: raise e except Exception as e: print("Skipped container " + container_name + " due to error " + str(e))
[ "def", "purge_blob_containers", "(", "account", ",", "account_key", ")", ":", "bs", "=", "BlockBlobService", "(", "account", ",", "account_key", ")", "with", "concurrent", ".", "futures", ".", "ThreadPoolExecutor", "(", "max_workers", "=", "20", ")", "as", "ex...
Delete all blob containers in the given storage account. USE AT OWN RISK. NOT SUPPORTED BY STORAGE TEAM.
[ "Delete", "all", "blob", "containers", "in", "the", "given", "storage", "account", ".", "USE", "AT", "OWN", "RISK", ".", "NOT", "SUPPORTED", "BY", "STORAGE", "TEAM", "." ]
52327354b192cbcf6b7905118ec6b5d57fa46275
https://github.com/Azure/azure-storage-python/blob/52327354b192cbcf6b7905118ec6b5d57fa46275/tool_reset_account.py#L13-L48
226,325
Azure/azure-storage-python
azure-storage-common/azure/storage/common/_encryption.py
_generate_encryption_data_dict
def _generate_encryption_data_dict(kek, cek, iv): ''' Generates and returns the encryption metadata as a dict. :param object kek: The key encryption key. See calling functions for more information. :param bytes cek: The content encryption key. :param bytes iv: The initialization vector. :return: A dict containing all the encryption metadata. :rtype: dict ''' # Encrypt the cek. wrapped_cek = kek.wrap_key(cek) # Build the encryption_data dict. # Use OrderedDict to comply with Java's ordering requirement. wrapped_content_key = OrderedDict() wrapped_content_key['KeyId'] = kek.get_kid() wrapped_content_key['EncryptedKey'] = _encode_base64(wrapped_cek) wrapped_content_key['Algorithm'] = kek.get_key_wrap_algorithm() encryption_agent = OrderedDict() encryption_agent['Protocol'] = _ENCRYPTION_PROTOCOL_V1 encryption_agent['EncryptionAlgorithm'] = _EncryptionAlgorithm.AES_CBC_256 encryption_data_dict = OrderedDict() encryption_data_dict['WrappedContentKey'] = wrapped_content_key encryption_data_dict['EncryptionAgent'] = encryption_agent encryption_data_dict['ContentEncryptionIV'] = _encode_base64(iv) encryption_data_dict['KeyWrappingMetadata'] = {'EncryptionLibrary': 'Python ' + __version__} return encryption_data_dict
python
def _generate_encryption_data_dict(kek, cek, iv): ''' Generates and returns the encryption metadata as a dict. :param object kek: The key encryption key. See calling functions for more information. :param bytes cek: The content encryption key. :param bytes iv: The initialization vector. :return: A dict containing all the encryption metadata. :rtype: dict ''' # Encrypt the cek. wrapped_cek = kek.wrap_key(cek) # Build the encryption_data dict. # Use OrderedDict to comply with Java's ordering requirement. wrapped_content_key = OrderedDict() wrapped_content_key['KeyId'] = kek.get_kid() wrapped_content_key['EncryptedKey'] = _encode_base64(wrapped_cek) wrapped_content_key['Algorithm'] = kek.get_key_wrap_algorithm() encryption_agent = OrderedDict() encryption_agent['Protocol'] = _ENCRYPTION_PROTOCOL_V1 encryption_agent['EncryptionAlgorithm'] = _EncryptionAlgorithm.AES_CBC_256 encryption_data_dict = OrderedDict() encryption_data_dict['WrappedContentKey'] = wrapped_content_key encryption_data_dict['EncryptionAgent'] = encryption_agent encryption_data_dict['ContentEncryptionIV'] = _encode_base64(iv) encryption_data_dict['KeyWrappingMetadata'] = {'EncryptionLibrary': 'Python ' + __version__} return encryption_data_dict
[ "def", "_generate_encryption_data_dict", "(", "kek", ",", "cek", ",", "iv", ")", ":", "# Encrypt the cek.", "wrapped_cek", "=", "kek", ".", "wrap_key", "(", "cek", ")", "# Build the encryption_data dict.", "# Use OrderedDict to comply with Java's ordering requirement.", "wr...
Generates and returns the encryption metadata as a dict. :param object kek: The key encryption key. See calling functions for more information. :param bytes cek: The content encryption key. :param bytes iv: The initialization vector. :return: A dict containing all the encryption metadata. :rtype: dict
[ "Generates", "and", "returns", "the", "encryption", "metadata", "as", "a", "dict", "." ]
52327354b192cbcf6b7905118ec6b5d57fa46275
https://github.com/Azure/azure-storage-python/blob/52327354b192cbcf6b7905118ec6b5d57fa46275/azure-storage-common/azure/storage/common/_encryption.py#L111-L141
226,326
Azure/azure-storage-python
azure-storage-common/azure/storage/common/_encryption.py
_generate_AES_CBC_cipher
def _generate_AES_CBC_cipher(cek, iv): ''' Generates and returns an encryption cipher for AES CBC using the given cek and iv. :param bytes[] cek: The content encryption key for the cipher. :param bytes[] iv: The initialization vector for the cipher. :return: A cipher for encrypting in AES256 CBC. :rtype: ~cryptography.hazmat.primitives.ciphers.Cipher ''' backend = default_backend() algorithm = AES(cek) mode = CBC(iv) return Cipher(algorithm, mode, backend)
python
def _generate_AES_CBC_cipher(cek, iv): ''' Generates and returns an encryption cipher for AES CBC using the given cek and iv. :param bytes[] cek: The content encryption key for the cipher. :param bytes[] iv: The initialization vector for the cipher. :return: A cipher for encrypting in AES256 CBC. :rtype: ~cryptography.hazmat.primitives.ciphers.Cipher ''' backend = default_backend() algorithm = AES(cek) mode = CBC(iv) return Cipher(algorithm, mode, backend)
[ "def", "_generate_AES_CBC_cipher", "(", "cek", ",", "iv", ")", ":", "backend", "=", "default_backend", "(", ")", "algorithm", "=", "AES", "(", "cek", ")", "mode", "=", "CBC", "(", "iv", ")", "return", "Cipher", "(", "algorithm", ",", "mode", ",", "back...
Generates and returns an encryption cipher for AES CBC using the given cek and iv. :param bytes[] cek: The content encryption key for the cipher. :param bytes[] iv: The initialization vector for the cipher. :return: A cipher for encrypting in AES256 CBC. :rtype: ~cryptography.hazmat.primitives.ciphers.Cipher
[ "Generates", "and", "returns", "an", "encryption", "cipher", "for", "AES", "CBC", "using", "the", "given", "cek", "and", "iv", "." ]
52327354b192cbcf6b7905118ec6b5d57fa46275
https://github.com/Azure/azure-storage-python/blob/52327354b192cbcf6b7905118ec6b5d57fa46275/azure-storage-common/azure/storage/common/_encryption.py#L181-L194
226,327
Azure/azure-storage-python
azure-storage-common/azure/storage/common/cloudstorageaccount.py
CloudStorageAccount.create_file_service
def create_file_service(self): ''' Creates a FileService object with the settings specified in the CloudStorageAccount. :return: A service object. :rtype: :class:`~azure.storage.file.fileservice.FileService` ''' try: from azure.storage.file.fileservice import FileService return FileService(self.account_name, self.account_key, sas_token=self.sas_token, endpoint_suffix=self.endpoint_suffix) except ImportError: raise Exception('The package azure-storage-file is required. ' + 'Please install it using "pip install azure-storage-file"')
python
def create_file_service(self): ''' Creates a FileService object with the settings specified in the CloudStorageAccount. :return: A service object. :rtype: :class:`~azure.storage.file.fileservice.FileService` ''' try: from azure.storage.file.fileservice import FileService return FileService(self.account_name, self.account_key, sas_token=self.sas_token, endpoint_suffix=self.endpoint_suffix) except ImportError: raise Exception('The package azure-storage-file is required. ' + 'Please install it using "pip install azure-storage-file"')
[ "def", "create_file_service", "(", "self", ")", ":", "try", ":", "from", "azure", ".", "storage", ".", "file", ".", "fileservice", "import", "FileService", "return", "FileService", "(", "self", ".", "account_name", ",", "self", ".", "account_key", ",", "sas_...
Creates a FileService object with the settings specified in the CloudStorageAccount. :return: A service object. :rtype: :class:`~azure.storage.file.fileservice.FileService`
[ "Creates", "a", "FileService", "object", "with", "the", "settings", "specified", "in", "the", "CloudStorageAccount", "." ]
52327354b192cbcf6b7905118ec6b5d57fa46275
https://github.com/Azure/azure-storage-python/blob/52327354b192cbcf6b7905118ec6b5d57fa46275/azure-storage-common/azure/storage/common/cloudstorageaccount.py#L128-L143
226,328
Azure/azure-storage-python
azure-storage-common/azure/storage/common/_serialization.py
_get_request_body
def _get_request_body(request_body): '''Converts an object into a request body. If it's None we'll return an empty string, if it's one of our objects it'll convert it to XML and return it. Otherwise we just use the object directly''' if request_body is None: return b'' if isinstance(request_body, bytes) or isinstance(request_body, IOBase): return request_body if isinstance(request_body, _unicode_type): return request_body.encode('utf-8') request_body = str(request_body) if isinstance(request_body, _unicode_type): return request_body.encode('utf-8') return request_body
python
def _get_request_body(request_body): '''Converts an object into a request body. If it's None we'll return an empty string, if it's one of our objects it'll convert it to XML and return it. Otherwise we just use the object directly''' if request_body is None: return b'' if isinstance(request_body, bytes) or isinstance(request_body, IOBase): return request_body if isinstance(request_body, _unicode_type): return request_body.encode('utf-8') request_body = str(request_body) if isinstance(request_body, _unicode_type): return request_body.encode('utf-8') return request_body
[ "def", "_get_request_body", "(", "request_body", ")", ":", "if", "request_body", "is", "None", ":", "return", "b''", "if", "isinstance", "(", "request_body", ",", "bytes", ")", "or", "isinstance", "(", "request_body", ",", "IOBase", ")", ":", "return", "requ...
Converts an object into a request body. If it's None we'll return an empty string, if it's one of our objects it'll convert it to XML and return it. Otherwise we just use the object directly
[ "Converts", "an", "object", "into", "a", "request", "body", ".", "If", "it", "s", "None", "we", "ll", "return", "an", "empty", "string", "if", "it", "s", "one", "of", "our", "objects", "it", "ll", "convert", "it", "to", "XML", "and", "return", "it", ...
52327354b192cbcf6b7905118ec6b5d57fa46275
https://github.com/Azure/azure-storage-python/blob/52327354b192cbcf6b7905118ec6b5d57fa46275/azure-storage-common/azure/storage/common/_serialization.py#L114-L132
226,329
Azure/azure-storage-python
samples/advanced/oauth.py
AutoUpdatedTokenCredential.stop_refreshing_token
def stop_refreshing_token(self): """ The timer needs to be canceled if the application is terminating, if not the timer will keep going. """ with self.lock: self.timer_stopped = True self.timer.cancel()
python
def stop_refreshing_token(self): with self.lock: self.timer_stopped = True self.timer.cancel()
[ "def", "stop_refreshing_token", "(", "self", ")", ":", "with", "self", ".", "lock", ":", "self", ".", "timer_stopped", "=", "True", "self", ".", "timer", ".", "cancel", "(", ")" ]
The timer needs to be canceled if the application is terminating, if not the timer will keep going.
[ "The", "timer", "needs", "to", "be", "canceled", "if", "the", "application", "is", "terminating", "if", "not", "the", "timer", "will", "keep", "going", "." ]
52327354b192cbcf6b7905118ec6b5d57fa46275
https://github.com/Azure/azure-storage-python/blob/52327354b192cbcf6b7905118ec6b5d57fa46275/samples/advanced/oauth.py#L57-L63
226,330
Azure/azure-storage-python
azure-storage-queue/azure/storage/queue/queueservice.py
QueueService.delete_message
def delete_message(self, queue_name, message_id, pop_receipt, timeout=None): ''' Deletes the specified message. Normally after a client retrieves a message with the get_messages operation, the client is expected to process and delete the message. To delete the message, you must have two items of data: id and pop_receipt. The id is returned from the previous get_messages operation. The pop_receipt is returned from the most recent :func:`~get_messages` or :func:`~update_message` operation. In order for the delete_message operation to succeed, the pop_receipt specified on the request must match the pop_receipt returned from the :func:`~get_messages` or :func:`~update_message` operation. :param str queue_name: The name of the queue from which to delete the message. :param str message_id: The message id identifying the message to delete. :param str pop_receipt: A valid pop receipt value returned from an earlier call to the :func:`~get_messages` or :func:`~update_message`. :param int timeout: The server timeout, expressed in seconds. ''' _validate_not_none('queue_name', queue_name) _validate_not_none('message_id', message_id) _validate_not_none('pop_receipt', pop_receipt) request = HTTPRequest() request.method = 'DELETE' request.host_locations = self._get_host_locations() request.path = _get_path(queue_name, True, message_id) request.query = { 'popreceipt': _to_str(pop_receipt), 'timeout': _int_to_str(timeout) } self._perform_request(request)
python
def delete_message(self, queue_name, message_id, pop_receipt, timeout=None): ''' Deletes the specified message. Normally after a client retrieves a message with the get_messages operation, the client is expected to process and delete the message. To delete the message, you must have two items of data: id and pop_receipt. The id is returned from the previous get_messages operation. The pop_receipt is returned from the most recent :func:`~get_messages` or :func:`~update_message` operation. In order for the delete_message operation to succeed, the pop_receipt specified on the request must match the pop_receipt returned from the :func:`~get_messages` or :func:`~update_message` operation. :param str queue_name: The name of the queue from which to delete the message. :param str message_id: The message id identifying the message to delete. :param str pop_receipt: A valid pop receipt value returned from an earlier call to the :func:`~get_messages` or :func:`~update_message`. :param int timeout: The server timeout, expressed in seconds. ''' _validate_not_none('queue_name', queue_name) _validate_not_none('message_id', message_id) _validate_not_none('pop_receipt', pop_receipt) request = HTTPRequest() request.method = 'DELETE' request.host_locations = self._get_host_locations() request.path = _get_path(queue_name, True, message_id) request.query = { 'popreceipt': _to_str(pop_receipt), 'timeout': _int_to_str(timeout) } self._perform_request(request)
[ "def", "delete_message", "(", "self", ",", "queue_name", ",", "message_id", ",", "pop_receipt", ",", "timeout", "=", "None", ")", ":", "_validate_not_none", "(", "'queue_name'", ",", "queue_name", ")", "_validate_not_none", "(", "'message_id'", ",", "message_id", ...
Deletes the specified message. Normally after a client retrieves a message with the get_messages operation, the client is expected to process and delete the message. To delete the message, you must have two items of data: id and pop_receipt. The id is returned from the previous get_messages operation. The pop_receipt is returned from the most recent :func:`~get_messages` or :func:`~update_message` operation. In order for the delete_message operation to succeed, the pop_receipt specified on the request must match the pop_receipt returned from the :func:`~get_messages` or :func:`~update_message` operation. :param str queue_name: The name of the queue from which to delete the message. :param str message_id: The message id identifying the message to delete. :param str pop_receipt: A valid pop receipt value returned from an earlier call to the :func:`~get_messages` or :func:`~update_message`. :param int timeout: The server timeout, expressed in seconds.
[ "Deletes", "the", "specified", "message", "." ]
52327354b192cbcf6b7905118ec6b5d57fa46275
https://github.com/Azure/azure-storage-python/blob/52327354b192cbcf6b7905118ec6b5d57fa46275/azure-storage-queue/azure/storage/queue/queueservice.py#L894-L929
226,331
Azure/azure-storage-python
azure-storage-file/azure/storage/file/fileservice.py
FileService.make_file_url
def make_file_url(self, share_name, directory_name, file_name, protocol=None, sas_token=None): ''' Creates the url to access a file. :param str share_name: Name of share. :param str directory_name: The path to the directory. :param str file_name: Name of file. :param str protocol: Protocol to use: 'http' or 'https'. If not specified, uses the protocol specified when FileService was initialized. :param str sas_token: Shared access signature token created with generate_shared_access_signature. :return: file access URL. :rtype: str ''' if directory_name is None: url = '{}://{}/{}/{}'.format( protocol or self.protocol, self.primary_endpoint, share_name, file_name, ) else: url = '{}://{}/{}/{}/{}'.format( protocol or self.protocol, self.primary_endpoint, share_name, directory_name, file_name, ) if sas_token: url += '?' + sas_token return url
python
def make_file_url(self, share_name, directory_name, file_name, protocol=None, sas_token=None): ''' Creates the url to access a file. :param str share_name: Name of share. :param str directory_name: The path to the directory. :param str file_name: Name of file. :param str protocol: Protocol to use: 'http' or 'https'. If not specified, uses the protocol specified when FileService was initialized. :param str sas_token: Shared access signature token created with generate_shared_access_signature. :return: file access URL. :rtype: str ''' if directory_name is None: url = '{}://{}/{}/{}'.format( protocol or self.protocol, self.primary_endpoint, share_name, file_name, ) else: url = '{}://{}/{}/{}/{}'.format( protocol or self.protocol, self.primary_endpoint, share_name, directory_name, file_name, ) if sas_token: url += '?' + sas_token return url
[ "def", "make_file_url", "(", "self", ",", "share_name", ",", "directory_name", ",", "file_name", ",", "protocol", "=", "None", ",", "sas_token", "=", "None", ")", ":", "if", "directory_name", "is", "None", ":", "url", "=", "'{}://{}/{}/{}'", ".", "format", ...
Creates the url to access a file. :param str share_name: Name of share. :param str directory_name: The path to the directory. :param str file_name: Name of file. :param str protocol: Protocol to use: 'http' or 'https'. If not specified, uses the protocol specified when FileService was initialized. :param str sas_token: Shared access signature token created with generate_shared_access_signature. :return: file access URL. :rtype: str
[ "Creates", "the", "url", "to", "access", "a", "file", "." ]
52327354b192cbcf6b7905118ec6b5d57fa46275
https://github.com/Azure/azure-storage-python/blob/52327354b192cbcf6b7905118ec6b5d57fa46275/azure-storage-file/azure/storage/file/fileservice.py#L188-L228
226,332
Azure/azure-storage-python
azure-storage-file/azure/storage/file/fileservice.py
FileService.generate_share_shared_access_signature
def generate_share_shared_access_signature(self, share_name, permission=None, expiry=None, start=None, id=None, ip=None, protocol=None, cache_control=None, content_disposition=None, content_encoding=None, content_language=None, content_type=None): ''' Generates a shared access signature for the share. Use the returned signature with the sas_token parameter of FileService. :param str share_name: Name of share. :param SharePermissions permission: The permissions associated with the shared access signature. The user is restricted to operations allowed by the permissions. Permissions must be ordered read, create, write, delete, list. Required unless an id is given referencing a stored access policy which contains this field. This field must be omitted if it has been specified in an associated stored access policy. :param expiry: The time at which the shared access signature becomes invalid. Required unless an id is given referencing a stored access policy which contains this field. This field must be omitted if it has been specified in an associated stored access policy. Azure will always convert values to UTC. If a date is passed in without timezone info, it is assumed to be UTC. :type expiry: datetime or str :param start: The time at which the shared access signature becomes valid. If omitted, start time for this call is assumed to be the time when the storage service receives the request. Azure will always convert values to UTC. If a date is passed in without timezone info, it is assumed to be UTC. :type start: datetime or str :param str id: A unique value up to 64 characters in length that correlates to a stored access policy. To create a stored access policy, use :func:`~set_share_acl`. :param str ip: Specifies an IP address or a range of IP addresses from which to accept requests. If the IP address from which the request originates does not match the IP address or address range specified on the SAS token, the request is not authenticated. For example, specifying sip=168.1.5.65 or sip=168.1.5.60-168.1.5.70 on the SAS restricts the request to those IP addresses. :param str protocol: Specifies the protocol permitted for a request made. Possible values are both HTTPS and HTTP (https,http) or HTTPS only (https). The default value is https,http. Note that HTTP only is not a permitted value. :param str cache_control: Response header value for Cache-Control when resource is accessed using this shared access signature. :param str content_disposition: Response header value for Content-Disposition when resource is accessed using this shared access signature. :param str content_encoding: Response header value for Content-Encoding when resource is accessed using this shared access signature. :param str content_language: Response header value for Content-Language when resource is accessed using this shared access signature. :param str content_type: Response header value for Content-Type when resource is accessed using this shared access signature. :return: A Shared Access Signature (sas) token. :rtype: str ''' _validate_not_none('share_name', share_name) _validate_not_none('self.account_name', self.account_name) _validate_not_none('self.account_key', self.account_key) sas = FileSharedAccessSignature(self.account_name, self.account_key) return sas.generate_share( share_name, permission, expiry, start=start, id=id, ip=ip, protocol=protocol, cache_control=cache_control, content_disposition=content_disposition, content_encoding=content_encoding, content_language=content_language, content_type=content_type, )
python
def generate_share_shared_access_signature(self, share_name, permission=None, expiry=None, start=None, id=None, ip=None, protocol=None, cache_control=None, content_disposition=None, content_encoding=None, content_language=None, content_type=None): ''' Generates a shared access signature for the share. Use the returned signature with the sas_token parameter of FileService. :param str share_name: Name of share. :param SharePermissions permission: The permissions associated with the shared access signature. The user is restricted to operations allowed by the permissions. Permissions must be ordered read, create, write, delete, list. Required unless an id is given referencing a stored access policy which contains this field. This field must be omitted if it has been specified in an associated stored access policy. :param expiry: The time at which the shared access signature becomes invalid. Required unless an id is given referencing a stored access policy which contains this field. This field must be omitted if it has been specified in an associated stored access policy. Azure will always convert values to UTC. If a date is passed in without timezone info, it is assumed to be UTC. :type expiry: datetime or str :param start: The time at which the shared access signature becomes valid. If omitted, start time for this call is assumed to be the time when the storage service receives the request. Azure will always convert values to UTC. If a date is passed in without timezone info, it is assumed to be UTC. :type start: datetime or str :param str id: A unique value up to 64 characters in length that correlates to a stored access policy. To create a stored access policy, use :func:`~set_share_acl`. :param str ip: Specifies an IP address or a range of IP addresses from which to accept requests. If the IP address from which the request originates does not match the IP address or address range specified on the SAS token, the request is not authenticated. For example, specifying sip=168.1.5.65 or sip=168.1.5.60-168.1.5.70 on the SAS restricts the request to those IP addresses. :param str protocol: Specifies the protocol permitted for a request made. Possible values are both HTTPS and HTTP (https,http) or HTTPS only (https). The default value is https,http. Note that HTTP only is not a permitted value. :param str cache_control: Response header value for Cache-Control when resource is accessed using this shared access signature. :param str content_disposition: Response header value for Content-Disposition when resource is accessed using this shared access signature. :param str content_encoding: Response header value for Content-Encoding when resource is accessed using this shared access signature. :param str content_language: Response header value for Content-Language when resource is accessed using this shared access signature. :param str content_type: Response header value for Content-Type when resource is accessed using this shared access signature. :return: A Shared Access Signature (sas) token. :rtype: str ''' _validate_not_none('share_name', share_name) _validate_not_none('self.account_name', self.account_name) _validate_not_none('self.account_key', self.account_key) sas = FileSharedAccessSignature(self.account_name, self.account_key) return sas.generate_share( share_name, permission, expiry, start=start, id=id, ip=ip, protocol=protocol, cache_control=cache_control, content_disposition=content_disposition, content_encoding=content_encoding, content_language=content_language, content_type=content_type, )
[ "def", "generate_share_shared_access_signature", "(", "self", ",", "share_name", ",", "permission", "=", "None", ",", "expiry", "=", "None", ",", "start", "=", "None", ",", "id", "=", "None", ",", "ip", "=", "None", ",", "protocol", "=", "None", ",", "ca...
Generates a shared access signature for the share. Use the returned signature with the sas_token parameter of FileService. :param str share_name: Name of share. :param SharePermissions permission: The permissions associated with the shared access signature. The user is restricted to operations allowed by the permissions. Permissions must be ordered read, create, write, delete, list. Required unless an id is given referencing a stored access policy which contains this field. This field must be omitted if it has been specified in an associated stored access policy. :param expiry: The time at which the shared access signature becomes invalid. Required unless an id is given referencing a stored access policy which contains this field. This field must be omitted if it has been specified in an associated stored access policy. Azure will always convert values to UTC. If a date is passed in without timezone info, it is assumed to be UTC. :type expiry: datetime or str :param start: The time at which the shared access signature becomes valid. If omitted, start time for this call is assumed to be the time when the storage service receives the request. Azure will always convert values to UTC. If a date is passed in without timezone info, it is assumed to be UTC. :type start: datetime or str :param str id: A unique value up to 64 characters in length that correlates to a stored access policy. To create a stored access policy, use :func:`~set_share_acl`. :param str ip: Specifies an IP address or a range of IP addresses from which to accept requests. If the IP address from which the request originates does not match the IP address or address range specified on the SAS token, the request is not authenticated. For example, specifying sip=168.1.5.65 or sip=168.1.5.60-168.1.5.70 on the SAS restricts the request to those IP addresses. :param str protocol: Specifies the protocol permitted for a request made. Possible values are both HTTPS and HTTP (https,http) or HTTPS only (https). The default value is https,http. Note that HTTP only is not a permitted value. :param str cache_control: Response header value for Cache-Control when resource is accessed using this shared access signature. :param str content_disposition: Response header value for Content-Disposition when resource is accessed using this shared access signature. :param str content_encoding: Response header value for Content-Encoding when resource is accessed using this shared access signature. :param str content_language: Response header value for Content-Language when resource is accessed using this shared access signature. :param str content_type: Response header value for Content-Type when resource is accessed using this shared access signature. :return: A Shared Access Signature (sas) token. :rtype: str
[ "Generates", "a", "shared", "access", "signature", "for", "the", "share", ".", "Use", "the", "returned", "signature", "with", "the", "sas_token", "parameter", "of", "FileService", "." ]
52327354b192cbcf6b7905118ec6b5d57fa46275
https://github.com/Azure/azure-storage-python/blob/52327354b192cbcf6b7905118ec6b5d57fa46275/azure-storage-file/azure/storage/file/fileservice.py#L279-L368
226,333
Azure/azure-storage-python
azure-storage-file/azure/storage/file/fileservice.py
FileService.get_file_service_properties
def get_file_service_properties(self, timeout=None): ''' Gets the properties of a storage account's File service, including Azure Storage Analytics. :param int timeout: The timeout parameter is expressed in seconds. :return: The file service properties. :rtype: :class:`~azure.storage.common.models.ServiceProperties` ''' request = HTTPRequest() request.method = 'GET' request.host_locations = self._get_host_locations() request.path = _get_path() request.query = { 'restype': 'service', 'comp': 'properties', 'timeout': _int_to_str(timeout), } return self._perform_request(request, _convert_xml_to_service_properties)
python
def get_file_service_properties(self, timeout=None): ''' Gets the properties of a storage account's File service, including Azure Storage Analytics. :param int timeout: The timeout parameter is expressed in seconds. :return: The file service properties. :rtype: :class:`~azure.storage.common.models.ServiceProperties` ''' request = HTTPRequest() request.method = 'GET' request.host_locations = self._get_host_locations() request.path = _get_path() request.query = { 'restype': 'service', 'comp': 'properties', 'timeout': _int_to_str(timeout), } return self._perform_request(request, _convert_xml_to_service_properties)
[ "def", "get_file_service_properties", "(", "self", ",", "timeout", "=", "None", ")", ":", "request", "=", "HTTPRequest", "(", ")", "request", ".", "method", "=", "'GET'", "request", ".", "host_locations", "=", "self", ".", "_get_host_locations", "(", ")", "r...
Gets the properties of a storage account's File service, including Azure Storage Analytics. :param int timeout: The timeout parameter is expressed in seconds. :return: The file service properties. :rtype: :class:`~azure.storage.common.models.ServiceProperties`
[ "Gets", "the", "properties", "of", "a", "storage", "account", "s", "File", "service", "including", "Azure", "Storage", "Analytics", "." ]
52327354b192cbcf6b7905118ec6b5d57fa46275
https://github.com/Azure/azure-storage-python/blob/52327354b192cbcf6b7905118ec6b5d57fa46275/azure-storage-file/azure/storage/file/fileservice.py#L507-L528
226,334
Azure/azure-storage-python
azure-storage-file/azure/storage/file/fileservice.py
FileService._list_shares
def _list_shares(self, prefix=None, marker=None, max_results=None, include=None, timeout=None, _context=None): ''' Returns a list of the shares under the specified account. :param str prefix: Filters the results to return only shares whose names begin with the specified prefix. :param str marker: A string value that identifies the portion of the list to be returned with the next list operation. The operation returns a next_marker value within the response body if the list returned was not complete. The marker value may then be used in a subsequent call to request the next set of list items. The marker value is opaque to the client. :param int max_results: Specifies the maximum number of shares to return. A single list request may return up to 1000 shares and potentially a continuation token which should be followed to get additional resutls. :param string include: Include this parameter to specify that either the share's metadata, snapshots or both be returned as part of the response body. set this parameter to string 'metadata' to get share's metadata. set this parameter to 'snapshots' to get all the share snapshots. for both use 'snapshots,metadata'. :param int timeout: The timeout parameter is expressed in seconds. ''' request = HTTPRequest() request.method = 'GET' request.host_locations = self._get_host_locations() request.path = _get_path() request.query = { 'comp': 'list', 'prefix': _to_str(prefix), 'marker': _to_str(marker), 'maxresults': _int_to_str(max_results), 'include': _to_str(include), 'timeout': _int_to_str(timeout), } return self._perform_request(request, _convert_xml_to_shares, operation_context=_context)
python
def _list_shares(self, prefix=None, marker=None, max_results=None, include=None, timeout=None, _context=None): ''' Returns a list of the shares under the specified account. :param str prefix: Filters the results to return only shares whose names begin with the specified prefix. :param str marker: A string value that identifies the portion of the list to be returned with the next list operation. The operation returns a next_marker value within the response body if the list returned was not complete. The marker value may then be used in a subsequent call to request the next set of list items. The marker value is opaque to the client. :param int max_results: Specifies the maximum number of shares to return. A single list request may return up to 1000 shares and potentially a continuation token which should be followed to get additional resutls. :param string include: Include this parameter to specify that either the share's metadata, snapshots or both be returned as part of the response body. set this parameter to string 'metadata' to get share's metadata. set this parameter to 'snapshots' to get all the share snapshots. for both use 'snapshots,metadata'. :param int timeout: The timeout parameter is expressed in seconds. ''' request = HTTPRequest() request.method = 'GET' request.host_locations = self._get_host_locations() request.path = _get_path() request.query = { 'comp': 'list', 'prefix': _to_str(prefix), 'marker': _to_str(marker), 'maxresults': _int_to_str(max_results), 'include': _to_str(include), 'timeout': _int_to_str(timeout), } return self._perform_request(request, _convert_xml_to_shares, operation_context=_context)
[ "def", "_list_shares", "(", "self", ",", "prefix", "=", "None", ",", "marker", "=", "None", ",", "max_results", "=", "None", ",", "include", "=", "None", ",", "timeout", "=", "None", ",", "_context", "=", "None", ")", ":", "request", "=", "HTTPRequest"...
Returns a list of the shares under the specified account. :param str prefix: Filters the results to return only shares whose names begin with the specified prefix. :param str marker: A string value that identifies the portion of the list to be returned with the next list operation. The operation returns a next_marker value within the response body if the list returned was not complete. The marker value may then be used in a subsequent call to request the next set of list items. The marker value is opaque to the client. :param int max_results: Specifies the maximum number of shares to return. A single list request may return up to 1000 shares and potentially a continuation token which should be followed to get additional resutls. :param string include: Include this parameter to specify that either the share's metadata, snapshots or both be returned as part of the response body. set this parameter to string 'metadata' to get share's metadata. set this parameter to 'snapshots' to get all the share snapshots. for both use 'snapshots,metadata'. :param int timeout: The timeout parameter is expressed in seconds.
[ "Returns", "a", "list", "of", "the", "shares", "under", "the", "specified", "account", "." ]
52327354b192cbcf6b7905118ec6b5d57fa46275
https://github.com/Azure/azure-storage-python/blob/52327354b192cbcf6b7905118ec6b5d57fa46275/azure-storage-file/azure/storage/file/fileservice.py#L574-L614
226,335
Azure/azure-storage-python
azure-storage-file/azure/storage/file/fileservice.py
FileService.snapshot_share
def snapshot_share(self, share_name, metadata=None, quota=None, timeout=None): ''' Creates a snapshot of an existing share under the specified account. :param str share_name: The name of the share to create a snapshot of. :param metadata: A dict with name_value pairs to associate with the share as metadata. Example:{'Category':'test'} :type metadata: a dict of str to str: :param int quota: Specifies the maximum size of the share, in gigabytes. Must be greater than 0, and less than or equal to 5TB (5120). :param int timeout: The timeout parameter is expressed in seconds. :return: snapshot properties :rtype: azure.storage.file.models.Share ''' _validate_not_none('share_name', share_name) request = HTTPRequest() request.method = 'PUT' request.host_locations = self._get_host_locations() request.path = _get_path(share_name) request.query = { 'restype': 'share', 'comp': 'snapshot', 'timeout': _int_to_str(timeout), } request.headers = { 'x-ms-share-quota': _int_to_str(quota) } _add_metadata_headers(metadata, request) return self._perform_request(request, _parse_snapshot_share, [share_name])
python
def snapshot_share(self, share_name, metadata=None, quota=None, timeout=None): ''' Creates a snapshot of an existing share under the specified account. :param str share_name: The name of the share to create a snapshot of. :param metadata: A dict with name_value pairs to associate with the share as metadata. Example:{'Category':'test'} :type metadata: a dict of str to str: :param int quota: Specifies the maximum size of the share, in gigabytes. Must be greater than 0, and less than or equal to 5TB (5120). :param int timeout: The timeout parameter is expressed in seconds. :return: snapshot properties :rtype: azure.storage.file.models.Share ''' _validate_not_none('share_name', share_name) request = HTTPRequest() request.method = 'PUT' request.host_locations = self._get_host_locations() request.path = _get_path(share_name) request.query = { 'restype': 'share', 'comp': 'snapshot', 'timeout': _int_to_str(timeout), } request.headers = { 'x-ms-share-quota': _int_to_str(quota) } _add_metadata_headers(metadata, request) return self._perform_request(request, _parse_snapshot_share, [share_name])
[ "def", "snapshot_share", "(", "self", ",", "share_name", ",", "metadata", "=", "None", ",", "quota", "=", "None", ",", "timeout", "=", "None", ")", ":", "_validate_not_none", "(", "'share_name'", ",", "share_name", ")", "request", "=", "HTTPRequest", "(", ...
Creates a snapshot of an existing share under the specified account. :param str share_name: The name of the share to create a snapshot of. :param metadata: A dict with name_value pairs to associate with the share as metadata. Example:{'Category':'test'} :type metadata: a dict of str to str: :param int quota: Specifies the maximum size of the share, in gigabytes. Must be greater than 0, and less than or equal to 5TB (5120). :param int timeout: The timeout parameter is expressed in seconds. :return: snapshot properties :rtype: azure.storage.file.models.Share
[ "Creates", "a", "snapshot", "of", "an", "existing", "share", "under", "the", "specified", "account", "." ]
52327354b192cbcf6b7905118ec6b5d57fa46275
https://github.com/Azure/azure-storage-python/blob/52327354b192cbcf6b7905118ec6b5d57fa46275/azure-storage-file/azure/storage/file/fileservice.py#L666-L699
226,336
Azure/azure-storage-python
azure-storage-file/azure/storage/file/fileservice.py
FileService.get_share_acl
def get_share_acl(self, share_name, timeout=None): ''' Gets the permissions for the specified share. :param str share_name: Name of existing share. :param int timeout: The timeout parameter is expressed in seconds. :return: A dictionary of access policies associated with the share. :rtype: dict(str, :class:`~azure.storage.common.models.AccessPolicy`) ''' _validate_not_none('share_name', share_name) request = HTTPRequest() request.method = 'GET' request.host_locations = self._get_host_locations() request.path = _get_path(share_name) request.query = { 'restype': 'share', 'comp': 'acl', 'timeout': _int_to_str(timeout), } return self._perform_request(request, _convert_xml_to_signed_identifiers)
python
def get_share_acl(self, share_name, timeout=None): ''' Gets the permissions for the specified share. :param str share_name: Name of existing share. :param int timeout: The timeout parameter is expressed in seconds. :return: A dictionary of access policies associated with the share. :rtype: dict(str, :class:`~azure.storage.common.models.AccessPolicy`) ''' _validate_not_none('share_name', share_name) request = HTTPRequest() request.method = 'GET' request.host_locations = self._get_host_locations() request.path = _get_path(share_name) request.query = { 'restype': 'share', 'comp': 'acl', 'timeout': _int_to_str(timeout), } return self._perform_request(request, _convert_xml_to_signed_identifiers)
[ "def", "get_share_acl", "(", "self", ",", "share_name", ",", "timeout", "=", "None", ")", ":", "_validate_not_none", "(", "'share_name'", ",", "share_name", ")", "request", "=", "HTTPRequest", "(", ")", "request", ".", "method", "=", "'GET'", "request", ".",...
Gets the permissions for the specified share. :param str share_name: Name of existing share. :param int timeout: The timeout parameter is expressed in seconds. :return: A dictionary of access policies associated with the share. :rtype: dict(str, :class:`~azure.storage.common.models.AccessPolicy`)
[ "Gets", "the", "permissions", "for", "the", "specified", "share", "." ]
52327354b192cbcf6b7905118ec6b5d57fa46275
https://github.com/Azure/azure-storage-python/blob/52327354b192cbcf6b7905118ec6b5d57fa46275/azure-storage-file/azure/storage/file/fileservice.py#L816-L838
226,337
Azure/azure-storage-python
azure-storage-file/azure/storage/file/fileservice.py
FileService.delete_share
def delete_share(self, share_name, fail_not_exist=False, timeout=None, snapshot=None, delete_snapshots=None): ''' Marks the specified share for deletion. If the share does not exist, the operation fails on the service. By default, the exception is swallowed by the client. To expose the exception, specify True for fail_not_exist. :param str share_name: Name of share to delete. :param bool fail_not_exist: Specify whether to throw an exception when the share doesn't exist. False by default. :param int timeout: The timeout parameter is expressed in seconds. :param str snapshot: A string that represents the snapshot version, if applicable. Specify this argument to delete a specific snapshot only. delete_snapshots must be None if this is specified. :param ~azure.storage.file.models.DeleteSnapshot delete_snapshots: To delete a share that has snapshots, this must be specified as DeleteSnapshot.Include. :return: True if share is deleted, False share doesn't exist. :rtype: bool ''' _validate_not_none('share_name', share_name) request = HTTPRequest() request.method = 'DELETE' request.host_locations = self._get_host_locations() request.path = _get_path(share_name) request.headers = { 'x-ms-delete-snapshots': _to_str(delete_snapshots) } request.query = { 'restype': 'share', 'timeout': _int_to_str(timeout), 'sharesnapshot': _to_str(snapshot), } if not fail_not_exist: try: self._perform_request(request, expected_errors=[_SHARE_NOT_FOUND_ERROR_CODE]) return True except AzureHttpError as ex: _dont_fail_not_exist(ex) return False else: self._perform_request(request) return True
python
def delete_share(self, share_name, fail_not_exist=False, timeout=None, snapshot=None, delete_snapshots=None): ''' Marks the specified share for deletion. If the share does not exist, the operation fails on the service. By default, the exception is swallowed by the client. To expose the exception, specify True for fail_not_exist. :param str share_name: Name of share to delete. :param bool fail_not_exist: Specify whether to throw an exception when the share doesn't exist. False by default. :param int timeout: The timeout parameter is expressed in seconds. :param str snapshot: A string that represents the snapshot version, if applicable. Specify this argument to delete a specific snapshot only. delete_snapshots must be None if this is specified. :param ~azure.storage.file.models.DeleteSnapshot delete_snapshots: To delete a share that has snapshots, this must be specified as DeleteSnapshot.Include. :return: True if share is deleted, False share doesn't exist. :rtype: bool ''' _validate_not_none('share_name', share_name) request = HTTPRequest() request.method = 'DELETE' request.host_locations = self._get_host_locations() request.path = _get_path(share_name) request.headers = { 'x-ms-delete-snapshots': _to_str(delete_snapshots) } request.query = { 'restype': 'share', 'timeout': _int_to_str(timeout), 'sharesnapshot': _to_str(snapshot), } if not fail_not_exist: try: self._perform_request(request, expected_errors=[_SHARE_NOT_FOUND_ERROR_CODE]) return True except AzureHttpError as ex: _dont_fail_not_exist(ex) return False else: self._perform_request(request) return True
[ "def", "delete_share", "(", "self", ",", "share_name", ",", "fail_not_exist", "=", "False", ",", "timeout", "=", "None", ",", "snapshot", "=", "None", ",", "delete_snapshots", "=", "None", ")", ":", "_validate_not_none", "(", "'share_name'", ",", "share_name",...
Marks the specified share for deletion. If the share does not exist, the operation fails on the service. By default, the exception is swallowed by the client. To expose the exception, specify True for fail_not_exist. :param str share_name: Name of share to delete. :param bool fail_not_exist: Specify whether to throw an exception when the share doesn't exist. False by default. :param int timeout: The timeout parameter is expressed in seconds. :param str snapshot: A string that represents the snapshot version, if applicable. Specify this argument to delete a specific snapshot only. delete_snapshots must be None if this is specified. :param ~azure.storage.file.models.DeleteSnapshot delete_snapshots: To delete a share that has snapshots, this must be specified as DeleteSnapshot.Include. :return: True if share is deleted, False share doesn't exist. :rtype: bool
[ "Marks", "the", "specified", "share", "for", "deletion", ".", "If", "the", "share", "does", "not", "exist", "the", "operation", "fails", "on", "the", "service", ".", "By", "default", "the", "exception", "is", "swallowed", "by", "the", "client", ".", "To", ...
52327354b192cbcf6b7905118ec6b5d57fa46275
https://github.com/Azure/azure-storage-python/blob/52327354b192cbcf6b7905118ec6b5d57fa46275/azure-storage-file/azure/storage/file/fileservice.py#L899-L945
226,338
Azure/azure-storage-python
azure-storage-file/azure/storage/file/fileservice.py
FileService.create_directory
def create_directory(self, share_name, directory_name, metadata=None, fail_on_exist=False, timeout=None): ''' Creates a new directory under the specified share or parent directory. If the directory with the same name already exists, the operation fails on the service. By default, the exception is swallowed by the client. To expose the exception, specify True for fail_on_exists. :param str share_name: Name of existing share. :param str directory_name: Name of directory to create, including the path to the parent directory. :param metadata: A dict with name_value pairs to associate with the share as metadata. Example:{'Category':'test'} :type metadata: dict(str, str): :param bool fail_on_exist: specify whether to throw an exception when the directory exists. False by default. :param int timeout: The timeout parameter is expressed in seconds. :return: True if directory is created, False if directory already exists. :rtype: bool ''' _validate_not_none('share_name', share_name) _validate_not_none('directory_name', directory_name) request = HTTPRequest() request.method = 'PUT' request.host_locations = self._get_host_locations() request.path = _get_path(share_name, directory_name) request.query = { 'restype': 'directory', 'timeout': _int_to_str(timeout), } _add_metadata_headers(metadata, request) if not fail_on_exist: try: self._perform_request(request, expected_errors=_RESOURCE_ALREADY_EXISTS_ERROR_CODE) return True except AzureHttpError as ex: _dont_fail_on_exist(ex) return False else: self._perform_request(request) return True
python
def create_directory(self, share_name, directory_name, metadata=None, fail_on_exist=False, timeout=None): ''' Creates a new directory under the specified share or parent directory. If the directory with the same name already exists, the operation fails on the service. By default, the exception is swallowed by the client. To expose the exception, specify True for fail_on_exists. :param str share_name: Name of existing share. :param str directory_name: Name of directory to create, including the path to the parent directory. :param metadata: A dict with name_value pairs to associate with the share as metadata. Example:{'Category':'test'} :type metadata: dict(str, str): :param bool fail_on_exist: specify whether to throw an exception when the directory exists. False by default. :param int timeout: The timeout parameter is expressed in seconds. :return: True if directory is created, False if directory already exists. :rtype: bool ''' _validate_not_none('share_name', share_name) _validate_not_none('directory_name', directory_name) request = HTTPRequest() request.method = 'PUT' request.host_locations = self._get_host_locations() request.path = _get_path(share_name, directory_name) request.query = { 'restype': 'directory', 'timeout': _int_to_str(timeout), } _add_metadata_headers(metadata, request) if not fail_on_exist: try: self._perform_request(request, expected_errors=_RESOURCE_ALREADY_EXISTS_ERROR_CODE) return True except AzureHttpError as ex: _dont_fail_on_exist(ex) return False else: self._perform_request(request) return True
[ "def", "create_directory", "(", "self", ",", "share_name", ",", "directory_name", ",", "metadata", "=", "None", ",", "fail_on_exist", "=", "False", ",", "timeout", "=", "None", ")", ":", "_validate_not_none", "(", "'share_name'", ",", "share_name", ")", "_vali...
Creates a new directory under the specified share or parent directory. If the directory with the same name already exists, the operation fails on the service. By default, the exception is swallowed by the client. To expose the exception, specify True for fail_on_exists. :param str share_name: Name of existing share. :param str directory_name: Name of directory to create, including the path to the parent directory. :param metadata: A dict with name_value pairs to associate with the share as metadata. Example:{'Category':'test'} :type metadata: dict(str, str): :param bool fail_on_exist: specify whether to throw an exception when the directory exists. False by default. :param int timeout: The timeout parameter is expressed in seconds. :return: True if directory is created, False if directory already exists. :rtype: bool
[ "Creates", "a", "new", "directory", "under", "the", "specified", "share", "or", "parent", "directory", ".", "If", "the", "directory", "with", "the", "same", "name", "already", "exists", "the", "operation", "fails", "on", "the", "service", ".", "By", "default...
52327354b192cbcf6b7905118ec6b5d57fa46275
https://github.com/Azure/azure-storage-python/blob/52327354b192cbcf6b7905118ec6b5d57fa46275/azure-storage-file/azure/storage/file/fileservice.py#L947-L993
226,339
Azure/azure-storage-python
azure-storage-file/azure/storage/file/fileservice.py
FileService.get_directory_properties
def get_directory_properties(self, share_name, directory_name, timeout=None, snapshot=None): ''' Returns all user-defined metadata and system properties for the specified directory. The data returned does not include the directory's list of files. :param str share_name: Name of existing share. :param str directory_name: The path to an existing directory. :param int timeout: The timeout parameter is expressed in seconds. :return: properties for the specified directory within a directory object. :param str snapshot: A string that represents the snapshot version, if applicable. :rtype: :class:`~azure.storage.file.models.Directory` ''' _validate_not_none('share_name', share_name) _validate_not_none('directory_name', directory_name) request = HTTPRequest() request.method = 'GET' request.host_locations = self._get_host_locations() request.path = _get_path(share_name, directory_name) request.query = { 'restype': 'directory', 'timeout': _int_to_str(timeout), 'sharesnapshot': _to_str(snapshot) } return self._perform_request(request, _parse_directory, [directory_name])
python
def get_directory_properties(self, share_name, directory_name, timeout=None, snapshot=None): ''' Returns all user-defined metadata and system properties for the specified directory. The data returned does not include the directory's list of files. :param str share_name: Name of existing share. :param str directory_name: The path to an existing directory. :param int timeout: The timeout parameter is expressed in seconds. :return: properties for the specified directory within a directory object. :param str snapshot: A string that represents the snapshot version, if applicable. :rtype: :class:`~azure.storage.file.models.Directory` ''' _validate_not_none('share_name', share_name) _validate_not_none('directory_name', directory_name) request = HTTPRequest() request.method = 'GET' request.host_locations = self._get_host_locations() request.path = _get_path(share_name, directory_name) request.query = { 'restype': 'directory', 'timeout': _int_to_str(timeout), 'sharesnapshot': _to_str(snapshot) } return self._perform_request(request, _parse_directory, [directory_name])
[ "def", "get_directory_properties", "(", "self", ",", "share_name", ",", "directory_name", ",", "timeout", "=", "None", ",", "snapshot", "=", "None", ")", ":", "_validate_not_none", "(", "'share_name'", ",", "share_name", ")", "_validate_not_none", "(", "'directory...
Returns all user-defined metadata and system properties for the specified directory. The data returned does not include the directory's list of files. :param str share_name: Name of existing share. :param str directory_name: The path to an existing directory. :param int timeout: The timeout parameter is expressed in seconds. :return: properties for the specified directory within a directory object. :param str snapshot: A string that represents the snapshot version, if applicable. :rtype: :class:`~azure.storage.file.models.Directory`
[ "Returns", "all", "user", "-", "defined", "metadata", "and", "system", "properties", "for", "the", "specified", "directory", ".", "The", "data", "returned", "does", "not", "include", "the", "directory", "s", "list", "of", "files", "." ]
52327354b192cbcf6b7905118ec6b5d57fa46275
https://github.com/Azure/azure-storage-python/blob/52327354b192cbcf6b7905118ec6b5d57fa46275/azure-storage-file/azure/storage/file/fileservice.py#L1041-L1070
226,340
Azure/azure-storage-python
azure-storage-file/azure/storage/file/fileservice.py
FileService.copy_file
def copy_file(self, share_name, directory_name, file_name, copy_source, metadata=None, timeout=None): ''' Copies a file asynchronously. This operation returns a copy operation properties object, including a copy ID you can use to check or abort the copy operation. The File service copies files on a best-effort basis. If the destination file exists, it will be overwritten. The destination file cannot be modified while the copy operation is in progress. :param str share_name: Name of the destination share. The share must exist. :param str directory_name: Name of the destination directory. The directory must exist. :param str file_name: Name of the destination file. If the destination file exists, it will be overwritten. Otherwise, it will be created. :param str copy_source: A URL of up to 2 KB in length that specifies an Azure file or blob. The value should be URL-encoded as it would appear in a request URI. If the source is in another account, the source must either be public or must be authenticated via a shared access signature. If the source is public, no authentication is required. Examples: https://myaccount.file.core.windows.net/myshare/mydir/myfile https://otheraccount.file.core.windows.net/myshare/mydir/myfile?sastoken :param metadata: Name-value pairs associated with the file as metadata. If no name-value pairs are specified, the operation will copy the metadata from the source blob or file to the destination file. If one or more name-value pairs are specified, the destination file is created with the specified metadata, and the metadata is not copied from the source blob or file. :type metadata: dict(str, str). :param int timeout: The timeout parameter is expressed in seconds. :return: Copy operation properties such as status, source, and ID. :rtype: :class:`~azure.storage.file.models.CopyProperties` ''' _validate_not_none('share_name', share_name) _validate_not_none('file_name', file_name) _validate_not_none('copy_source', copy_source) request = HTTPRequest() request.method = 'PUT' request.host_locations = self._get_host_locations() request.path = _get_path(share_name, directory_name, file_name) request.query = {'timeout': _int_to_str(timeout)} request.headers = { 'x-ms-copy-source': _to_str(copy_source), } _add_metadata_headers(metadata, request) return self._perform_request(request, _parse_properties, [FileProperties]).copy
python
def copy_file(self, share_name, directory_name, file_name, copy_source, metadata=None, timeout=None): ''' Copies a file asynchronously. This operation returns a copy operation properties object, including a copy ID you can use to check or abort the copy operation. The File service copies files on a best-effort basis. If the destination file exists, it will be overwritten. The destination file cannot be modified while the copy operation is in progress. :param str share_name: Name of the destination share. The share must exist. :param str directory_name: Name of the destination directory. The directory must exist. :param str file_name: Name of the destination file. If the destination file exists, it will be overwritten. Otherwise, it will be created. :param str copy_source: A URL of up to 2 KB in length that specifies an Azure file or blob. The value should be URL-encoded as it would appear in a request URI. If the source is in another account, the source must either be public or must be authenticated via a shared access signature. If the source is public, no authentication is required. Examples: https://myaccount.file.core.windows.net/myshare/mydir/myfile https://otheraccount.file.core.windows.net/myshare/mydir/myfile?sastoken :param metadata: Name-value pairs associated with the file as metadata. If no name-value pairs are specified, the operation will copy the metadata from the source blob or file to the destination file. If one or more name-value pairs are specified, the destination file is created with the specified metadata, and the metadata is not copied from the source blob or file. :type metadata: dict(str, str). :param int timeout: The timeout parameter is expressed in seconds. :return: Copy operation properties such as status, source, and ID. :rtype: :class:`~azure.storage.file.models.CopyProperties` ''' _validate_not_none('share_name', share_name) _validate_not_none('file_name', file_name) _validate_not_none('copy_source', copy_source) request = HTTPRequest() request.method = 'PUT' request.host_locations = self._get_host_locations() request.path = _get_path(share_name, directory_name, file_name) request.query = {'timeout': _int_to_str(timeout)} request.headers = { 'x-ms-copy-source': _to_str(copy_source), } _add_metadata_headers(metadata, request) return self._perform_request(request, _parse_properties, [FileProperties]).copy
[ "def", "copy_file", "(", "self", ",", "share_name", ",", "directory_name", ",", "file_name", ",", "copy_source", ",", "metadata", "=", "None", ",", "timeout", "=", "None", ")", ":", "_validate_not_none", "(", "'share_name'", ",", "share_name", ")", "_validate_...
Copies a file asynchronously. This operation returns a copy operation properties object, including a copy ID you can use to check or abort the copy operation. The File service copies files on a best-effort basis. If the destination file exists, it will be overwritten. The destination file cannot be modified while the copy operation is in progress. :param str share_name: Name of the destination share. The share must exist. :param str directory_name: Name of the destination directory. The directory must exist. :param str file_name: Name of the destination file. If the destination file exists, it will be overwritten. Otherwise, it will be created. :param str copy_source: A URL of up to 2 KB in length that specifies an Azure file or blob. The value should be URL-encoded as it would appear in a request URI. If the source is in another account, the source must either be public or must be authenticated via a shared access signature. If the source is public, no authentication is required. Examples: https://myaccount.file.core.windows.net/myshare/mydir/myfile https://otheraccount.file.core.windows.net/myshare/mydir/myfile?sastoken :param metadata: Name-value pairs associated with the file as metadata. If no name-value pairs are specified, the operation will copy the metadata from the source blob or file to the destination file. If one or more name-value pairs are specified, the destination file is created with the specified metadata, and the metadata is not copied from the source blob or file. :type metadata: dict(str, str). :param int timeout: The timeout parameter is expressed in seconds. :return: Copy operation properties such as status, source, and ID. :rtype: :class:`~azure.storage.file.models.CopyProperties`
[ "Copies", "a", "file", "asynchronously", ".", "This", "operation", "returns", "a", "copy", "operation", "properties", "object", "including", "a", "copy", "ID", "you", "can", "use", "to", "check", "or", "abort", "the", "copy", "operation", ".", "The", "File",...
52327354b192cbcf6b7905118ec6b5d57fa46275
https://github.com/Azure/azure-storage-python/blob/52327354b192cbcf6b7905118ec6b5d57fa46275/azure-storage-file/azure/storage/file/fileservice.py#L1442-L1494
226,341
Azure/azure-storage-python
azure-storage-file/azure/storage/file/fileservice.py
FileService.abort_copy_file
def abort_copy_file(self, share_name, directory_name, file_name, copy_id, timeout=None): ''' Aborts a pending copy_file operation, and leaves a destination file with zero length and full metadata. :param str share_name: Name of destination share. :param str directory_name: The path to the directory. :param str file_name: Name of destination file. :param str copy_id: Copy identifier provided in the copy.id of the original copy_file operation. :param int timeout: The timeout parameter is expressed in seconds. ''' _validate_not_none('share_name', share_name) _validate_not_none('file_name', file_name) _validate_not_none('copy_id', copy_id) request = HTTPRequest() request.method = 'PUT' request.host_locations = self._get_host_locations() request.path = _get_path(share_name, directory_name, file_name) request.query = { 'comp': 'copy', 'copyid': _to_str(copy_id), 'timeout': _int_to_str(timeout), } request.headers = { 'x-ms-copy-action': 'abort', } self._perform_request(request)
python
def abort_copy_file(self, share_name, directory_name, file_name, copy_id, timeout=None): ''' Aborts a pending copy_file operation, and leaves a destination file with zero length and full metadata. :param str share_name: Name of destination share. :param str directory_name: The path to the directory. :param str file_name: Name of destination file. :param str copy_id: Copy identifier provided in the copy.id of the original copy_file operation. :param int timeout: The timeout parameter is expressed in seconds. ''' _validate_not_none('share_name', share_name) _validate_not_none('file_name', file_name) _validate_not_none('copy_id', copy_id) request = HTTPRequest() request.method = 'PUT' request.host_locations = self._get_host_locations() request.path = _get_path(share_name, directory_name, file_name) request.query = { 'comp': 'copy', 'copyid': _to_str(copy_id), 'timeout': _int_to_str(timeout), } request.headers = { 'x-ms-copy-action': 'abort', } self._perform_request(request)
[ "def", "abort_copy_file", "(", "self", ",", "share_name", ",", "directory_name", ",", "file_name", ",", "copy_id", ",", "timeout", "=", "None", ")", ":", "_validate_not_none", "(", "'share_name'", ",", "share_name", ")", "_validate_not_none", "(", "'file_name'", ...
Aborts a pending copy_file operation, and leaves a destination file with zero length and full metadata. :param str share_name: Name of destination share. :param str directory_name: The path to the directory. :param str file_name: Name of destination file. :param str copy_id: Copy identifier provided in the copy.id of the original copy_file operation. :param int timeout: The timeout parameter is expressed in seconds.
[ "Aborts", "a", "pending", "copy_file", "operation", "and", "leaves", "a", "destination", "file", "with", "zero", "length", "and", "full", "metadata", "." ]
52327354b192cbcf6b7905118ec6b5d57fa46275
https://github.com/Azure/azure-storage-python/blob/52327354b192cbcf6b7905118ec6b5d57fa46275/azure-storage-file/azure/storage/file/fileservice.py#L1496-L1529
226,342
Azure/azure-storage-python
azure-storage-file/azure/storage/file/fileservice.py
FileService.create_file_from_path
def create_file_from_path(self, share_name, directory_name, file_name, local_file_path, content_settings=None, metadata=None, validate_content=False, progress_callback=None, max_connections=2, timeout=None): ''' Creates a new azure file from a local file path, or updates the content of an existing file, with automatic chunking and progress notifications. :param str share_name: Name of existing share. :param str directory_name: The path to the directory. :param str file_name: Name of file to create or update. :param str local_file_path: Path of the local file to upload as the file content. :param ~azure.storage.file.models.ContentSettings content_settings: ContentSettings object used for setting file properties. :param metadata: Name-value pairs associated with the file as metadata. :type metadata: dict(str, str) :param bool validate_content: If true, calculates an MD5 hash for each range of the file. The storage service checks the hash of the content that has arrived with the hash that was sent. This is primarily valuable for detecting bitflips on the wire if using http instead of https as https (the default) will already validate. Note that this MD5 hash is not stored with the file. :param progress_callback: Callback for progress with signature function(current, total) where current is the number of bytes transfered so far and total is the size of the file, or None if the total size is unknown. :type progress_callback: func(current, total) :param int max_connections: Maximum number of parallel connections to use. :param int timeout: The timeout parameter is expressed in seconds. This method may make multiple calls to the Azure service and the timeout will apply to each call individually. ''' _validate_not_none('share_name', share_name) _validate_not_none('file_name', file_name) _validate_not_none('local_file_path', local_file_path) count = path.getsize(local_file_path) with open(local_file_path, 'rb') as stream: self.create_file_from_stream( share_name, directory_name, file_name, stream, count, content_settings, metadata, validate_content, progress_callback, max_connections, timeout)
python
def create_file_from_path(self, share_name, directory_name, file_name, local_file_path, content_settings=None, metadata=None, validate_content=False, progress_callback=None, max_connections=2, timeout=None): ''' Creates a new azure file from a local file path, or updates the content of an existing file, with automatic chunking and progress notifications. :param str share_name: Name of existing share. :param str directory_name: The path to the directory. :param str file_name: Name of file to create or update. :param str local_file_path: Path of the local file to upload as the file content. :param ~azure.storage.file.models.ContentSettings content_settings: ContentSettings object used for setting file properties. :param metadata: Name-value pairs associated with the file as metadata. :type metadata: dict(str, str) :param bool validate_content: If true, calculates an MD5 hash for each range of the file. The storage service checks the hash of the content that has arrived with the hash that was sent. This is primarily valuable for detecting bitflips on the wire if using http instead of https as https (the default) will already validate. Note that this MD5 hash is not stored with the file. :param progress_callback: Callback for progress with signature function(current, total) where current is the number of bytes transfered so far and total is the size of the file, or None if the total size is unknown. :type progress_callback: func(current, total) :param int max_connections: Maximum number of parallel connections to use. :param int timeout: The timeout parameter is expressed in seconds. This method may make multiple calls to the Azure service and the timeout will apply to each call individually. ''' _validate_not_none('share_name', share_name) _validate_not_none('file_name', file_name) _validate_not_none('local_file_path', local_file_path) count = path.getsize(local_file_path) with open(local_file_path, 'rb') as stream: self.create_file_from_stream( share_name, directory_name, file_name, stream, count, content_settings, metadata, validate_content, progress_callback, max_connections, timeout)
[ "def", "create_file_from_path", "(", "self", ",", "share_name", ",", "directory_name", ",", "file_name", ",", "local_file_path", ",", "content_settings", "=", "None", ",", "metadata", "=", "None", ",", "validate_content", "=", "False", ",", "progress_callback", "=...
Creates a new azure file from a local file path, or updates the content of an existing file, with automatic chunking and progress notifications. :param str share_name: Name of existing share. :param str directory_name: The path to the directory. :param str file_name: Name of file to create or update. :param str local_file_path: Path of the local file to upload as the file content. :param ~azure.storage.file.models.ContentSettings content_settings: ContentSettings object used for setting file properties. :param metadata: Name-value pairs associated with the file as metadata. :type metadata: dict(str, str) :param bool validate_content: If true, calculates an MD5 hash for each range of the file. The storage service checks the hash of the content that has arrived with the hash that was sent. This is primarily valuable for detecting bitflips on the wire if using http instead of https as https (the default) will already validate. Note that this MD5 hash is not stored with the file. :param progress_callback: Callback for progress with signature function(current, total) where current is the number of bytes transfered so far and total is the size of the file, or None if the total size is unknown. :type progress_callback: func(current, total) :param int max_connections: Maximum number of parallel connections to use. :param int timeout: The timeout parameter is expressed in seconds. This method may make multiple calls to the Azure service and the timeout will apply to each call individually.
[ "Creates", "a", "new", "azure", "file", "from", "a", "local", "file", "path", "or", "updates", "the", "content", "of", "an", "existing", "file", "with", "automatic", "chunking", "and", "progress", "notifications", "." ]
52327354b192cbcf6b7905118ec6b5d57fa46275
https://github.com/Azure/azure-storage-python/blob/52327354b192cbcf6b7905118ec6b5d57fa46275/azure-storage-file/azure/storage/file/fileservice.py#L1599-L1648
226,343
Azure/azure-storage-python
azure-storage-common/azure/storage/common/_http/httpclient.py
_HTTPClient.set_proxy
def set_proxy(self, host, port, user, password): ''' Sets the proxy server host and port for the HTTP CONNECT Tunnelling. Note that we set the proxies directly on the request later on rather than using the session object as requests has a bug where session proxy is ignored in favor of environment proxy. So, auth will not work unless it is passed directly when making the request as this overrides both. :param str host: Address of the proxy. Ex: '192.168.0.100' :param int port: Port of the proxy. Ex: 6000 :param str user: User for proxy authorization. :param str password: Password for proxy authorization. ''' if user and password: proxy_string = '{}:{}@{}:{}'.format(user, password, host, port) else: proxy_string = '{}:{}'.format(host, port) self.proxies = {'http': 'http://{}'.format(proxy_string), 'https': 'https://{}'.format(proxy_string)}
python
def set_proxy(self, host, port, user, password): ''' Sets the proxy server host and port for the HTTP CONNECT Tunnelling. Note that we set the proxies directly on the request later on rather than using the session object as requests has a bug where session proxy is ignored in favor of environment proxy. So, auth will not work unless it is passed directly when making the request as this overrides both. :param str host: Address of the proxy. Ex: '192.168.0.100' :param int port: Port of the proxy. Ex: 6000 :param str user: User for proxy authorization. :param str password: Password for proxy authorization. ''' if user and password: proxy_string = '{}:{}@{}:{}'.format(user, password, host, port) else: proxy_string = '{}:{}'.format(host, port) self.proxies = {'http': 'http://{}'.format(proxy_string), 'https': 'https://{}'.format(proxy_string)}
[ "def", "set_proxy", "(", "self", ",", "host", ",", "port", ",", "user", ",", "password", ")", ":", "if", "user", "and", "password", ":", "proxy_string", "=", "'{}:{}@{}:{}'", ".", "format", "(", "user", ",", "password", ",", "host", ",", "port", ")", ...
Sets the proxy server host and port for the HTTP CONNECT Tunnelling. Note that we set the proxies directly on the request later on rather than using the session object as requests has a bug where session proxy is ignored in favor of environment proxy. So, auth will not work unless it is passed directly when making the request as this overrides both. :param str host: Address of the proxy. Ex: '192.168.0.100' :param int port: Port of the proxy. Ex: 6000 :param str user: User for proxy authorization. :param str password: Password for proxy authorization.
[ "Sets", "the", "proxy", "server", "host", "and", "port", "for", "the", "HTTP", "CONNECT", "Tunnelling", "." ]
52327354b192cbcf6b7905118ec6b5d57fa46275
https://github.com/Azure/azure-storage-python/blob/52327354b192cbcf6b7905118ec6b5d57fa46275/azure-storage-common/azure/storage/common/_http/httpclient.py#L42-L66
226,344
Azure/azure-storage-python
azure-storage-common/azure/storage/common/retry.py
_Retry._should_retry
def _should_retry(self, context): ''' A function which determines whether or not to retry. :param ~azure.storage.models.RetryContext context: The retry context. This contains the request, response, and other data which can be used to determine whether or not to retry. :return: A boolean indicating whether or not to retry the request. :rtype: bool ''' # If max attempts are reached, do not retry. if context.count >= self.max_attempts: return False status = None if context.response and context.response.status: status = context.response.status if status is None: ''' If status is None, retry as this request triggered an exception. For example, network issues would trigger this. ''' return True elif 200 <= status < 300: ''' This method is called after a successful response, meaning we failed during the response body download or parsing. So, success codes should be retried. ''' return True elif 300 <= status < 500: ''' An exception occured, but in most cases it was expected. Examples could include a 309 Conflict or 412 Precondition Failed. ''' if status == 404 and context.location_mode == LocationMode.SECONDARY: # Response code 404 should be retried if secondary was used. return True if status == 408: # Response code 408 is a timeout and should be retried. return True return False elif status >= 500: ''' Response codes above 500 with the exception of 501 Not Implemented and 505 Version Not Supported indicate a server issue and should be retried. ''' if status == 501 or status == 505: return False return True else: # If something else happened, it's unexpected. Retry. return True
python
def _should_retry(self, context): ''' A function which determines whether or not to retry. :param ~azure.storage.models.RetryContext context: The retry context. This contains the request, response, and other data which can be used to determine whether or not to retry. :return: A boolean indicating whether or not to retry the request. :rtype: bool ''' # If max attempts are reached, do not retry. if context.count >= self.max_attempts: return False status = None if context.response and context.response.status: status = context.response.status if status is None: ''' If status is None, retry as this request triggered an exception. For example, network issues would trigger this. ''' return True elif 200 <= status < 300: ''' This method is called after a successful response, meaning we failed during the response body download or parsing. So, success codes should be retried. ''' return True elif 300 <= status < 500: ''' An exception occured, but in most cases it was expected. Examples could include a 309 Conflict or 412 Precondition Failed. ''' if status == 404 and context.location_mode == LocationMode.SECONDARY: # Response code 404 should be retried if secondary was used. return True if status == 408: # Response code 408 is a timeout and should be retried. return True return False elif status >= 500: ''' Response codes above 500 with the exception of 501 Not Implemented and 505 Version Not Supported indicate a server issue and should be retried. ''' if status == 501 or status == 505: return False return True else: # If something else happened, it's unexpected. Retry. return True
[ "def", "_should_retry", "(", "self", ",", "context", ")", ":", "# If max attempts are reached, do not retry.", "if", "context", ".", "count", ">=", "self", ".", "max_attempts", ":", "return", "False", "status", "=", "None", "if", "context", ".", "response", "and...
A function which determines whether or not to retry. :param ~azure.storage.models.RetryContext context: The retry context. This contains the request, response, and other data which can be used to determine whether or not to retry. :return: A boolean indicating whether or not to retry the request. :rtype: bool
[ "A", "function", "which", "determines", "whether", "or", "not", "to", "retry", "." ]
52327354b192cbcf6b7905118ec6b5d57fa46275
https://github.com/Azure/azure-storage-python/blob/52327354b192cbcf6b7905118ec6b5d57fa46275/azure-storage-common/azure/storage/common/retry.py#L38-L92
226,345
Azure/azure-storage-python
azure-storage-queue/azure/storage/queue/_serialization.py
_get_path
def _get_path(queue_name=None, include_messages=None, message_id=None): ''' Creates the path to access a queue resource. queue_name: Name of queue. include_messages: Whether or not to include messages. message_id: Message id. ''' if queue_name and include_messages and message_id: return '/{0}/messages/{1}'.format(_str(queue_name), message_id) if queue_name and include_messages: return '/{0}/messages'.format(_str(queue_name)) elif queue_name: return '/{0}'.format(_str(queue_name)) else: return '/'
python
def _get_path(queue_name=None, include_messages=None, message_id=None): ''' Creates the path to access a queue resource. queue_name: Name of queue. include_messages: Whether or not to include messages. message_id: Message id. ''' if queue_name and include_messages and message_id: return '/{0}/messages/{1}'.format(_str(queue_name), message_id) if queue_name and include_messages: return '/{0}/messages'.format(_str(queue_name)) elif queue_name: return '/{0}'.format(_str(queue_name)) else: return '/'
[ "def", "_get_path", "(", "queue_name", "=", "None", ",", "include_messages", "=", "None", ",", "message_id", "=", "None", ")", ":", "if", "queue_name", "and", "include_messages", "and", "message_id", ":", "return", "'/{0}/messages/{1}'", ".", "format", "(", "_...
Creates the path to access a queue resource. queue_name: Name of queue. include_messages: Whether or not to include messages. message_id: Message id.
[ "Creates", "the", "path", "to", "access", "a", "queue", "resource", "." ]
52327354b192cbcf6b7905118ec6b5d57fa46275
https://github.com/Azure/azure-storage-python/blob/52327354b192cbcf6b7905118ec6b5d57fa46275/azure-storage-queue/azure/storage/queue/_serialization.py#L29-L47
226,346
Azure/azure-storage-python
azure-storage-common/azure/storage/common/_deserialization.py
_parse_metadata
def _parse_metadata(response): ''' Extracts out resource metadata information. ''' if response is None or response.headers is None: return None metadata = _dict() for key, value in response.headers.items(): if key.lower().startswith('x-ms-meta-'): metadata[key[10:]] = _to_str(value) return metadata
python
def _parse_metadata(response): ''' Extracts out resource metadata information. ''' if response is None or response.headers is None: return None metadata = _dict() for key, value in response.headers.items(): if key.lower().startswith('x-ms-meta-'): metadata[key[10:]] = _to_str(value) return metadata
[ "def", "_parse_metadata", "(", "response", ")", ":", "if", "response", "is", "None", "or", "response", ".", "headers", "is", "None", ":", "return", "None", "metadata", "=", "_dict", "(", ")", "for", "key", ",", "value", "in", "response", ".", "headers", ...
Extracts out resource metadata information.
[ "Extracts", "out", "resource", "metadata", "information", "." ]
52327354b192cbcf6b7905118ec6b5d57fa46275
https://github.com/Azure/azure-storage-python/blob/52327354b192cbcf6b7905118ec6b5d57fa46275/azure-storage-common/azure/storage/common/_deserialization.py#L89-L102
226,347
dyve/django-bootstrap3
bootstrap3/components.py
render_icon
def render_icon(icon, **kwargs): """ Render a Bootstrap glyphicon icon """ attrs = { "class": add_css_class( "glyphicon glyphicon-{icon}".format(icon=icon), kwargs.get("extra_classes", ""), ) } title = kwargs.get("title") if title: attrs["title"] = title return render_tag("span", attrs=attrs)
python
def render_icon(icon, **kwargs): attrs = { "class": add_css_class( "glyphicon glyphicon-{icon}".format(icon=icon), kwargs.get("extra_classes", ""), ) } title = kwargs.get("title") if title: attrs["title"] = title return render_tag("span", attrs=attrs)
[ "def", "render_icon", "(", "icon", ",", "*", "*", "kwargs", ")", ":", "attrs", "=", "{", "\"class\"", ":", "add_css_class", "(", "\"glyphicon glyphicon-{icon}\"", ".", "format", "(", "icon", "=", "icon", ")", ",", "kwargs", ".", "get", "(", "\"extra_classe...
Render a Bootstrap glyphicon icon
[ "Render", "a", "Bootstrap", "glyphicon", "icon" ]
1d4095ba113a1faff228f9592bdad4f0b3aed653
https://github.com/dyve/django-bootstrap3/blob/1d4095ba113a1faff228f9592bdad4f0b3aed653/bootstrap3/components.py#L11-L24
226,348
dyve/django-bootstrap3
bootstrap3/components.py
render_alert
def render_alert(content, alert_type=None, dismissable=True): """ Render a Bootstrap alert """ button = "" if not alert_type: alert_type = "info" css_classes = ["alert", "alert-" + text_value(alert_type)] if dismissable: css_classes.append("alert-dismissable") button = ( '<button type="button" class="close" ' + 'data-dismiss="alert" aria-hidden="true">&times;</button>' ) button_placeholder = "__BUTTON__" return mark_safe( render_tag( "div", attrs={"class": " ".join(css_classes)}, content=button_placeholder + text_value(content), ).replace(button_placeholder, button) )
python
def render_alert(content, alert_type=None, dismissable=True): button = "" if not alert_type: alert_type = "info" css_classes = ["alert", "alert-" + text_value(alert_type)] if dismissable: css_classes.append("alert-dismissable") button = ( '<button type="button" class="close" ' + 'data-dismiss="alert" aria-hidden="true">&times;</button>' ) button_placeholder = "__BUTTON__" return mark_safe( render_tag( "div", attrs={"class": " ".join(css_classes)}, content=button_placeholder + text_value(content), ).replace(button_placeholder, button) )
[ "def", "render_alert", "(", "content", ",", "alert_type", "=", "None", ",", "dismissable", "=", "True", ")", ":", "button", "=", "\"\"", "if", "not", "alert_type", ":", "alert_type", "=", "\"info\"", "css_classes", "=", "[", "\"alert\"", ",", "\"alert-\"", ...
Render a Bootstrap alert
[ "Render", "a", "Bootstrap", "alert" ]
1d4095ba113a1faff228f9592bdad4f0b3aed653
https://github.com/dyve/django-bootstrap3/blob/1d4095ba113a1faff228f9592bdad4f0b3aed653/bootstrap3/components.py#L27-L48
226,349
dyve/django-bootstrap3
bootstrap3/text.py
text_concat
def text_concat(*args, **kwargs): """ Concatenate several values as a text string with an optional separator """ separator = text_value(kwargs.get("separator", "")) values = filter(None, [text_value(v) for v in args]) return separator.join(values)
python
def text_concat(*args, **kwargs): separator = text_value(kwargs.get("separator", "")) values = filter(None, [text_value(v) for v in args]) return separator.join(values)
[ "def", "text_concat", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "separator", "=", "text_value", "(", "kwargs", ".", "get", "(", "\"separator\"", ",", "\"\"", ")", ")", "values", "=", "filter", "(", "None", ",", "[", "text_value", "(", "v",...
Concatenate several values as a text string with an optional separator
[ "Concatenate", "several", "values", "as", "a", "text", "string", "with", "an", "optional", "separator" ]
1d4095ba113a1faff228f9592bdad4f0b3aed653
https://github.com/dyve/django-bootstrap3/blob/1d4095ba113a1faff228f9592bdad4f0b3aed653/bootstrap3/text.py#L20-L26
226,350
dyve/django-bootstrap3
bootstrap3/templatetags/bootstrap3.py
bootstrap_message_classes
def bootstrap_message_classes(message): """ Return the message classes for a message """ extra_tags = None try: extra_tags = message.extra_tags except AttributeError: pass if not extra_tags: extra_tags = "" classes = [extra_tags] try: level = message.level except AttributeError: pass else: try: classes.append(MESSAGE_LEVEL_CLASSES[level]) except KeyError: classes.append("alert alert-danger") return " ".join(classes).strip()
python
def bootstrap_message_classes(message): extra_tags = None try: extra_tags = message.extra_tags except AttributeError: pass if not extra_tags: extra_tags = "" classes = [extra_tags] try: level = message.level except AttributeError: pass else: try: classes.append(MESSAGE_LEVEL_CLASSES[level]) except KeyError: classes.append("alert alert-danger") return " ".join(classes).strip()
[ "def", "bootstrap_message_classes", "(", "message", ")", ":", "extra_tags", "=", "None", "try", ":", "extra_tags", "=", "message", ".", "extra_tags", "except", "AttributeError", ":", "pass", "if", "not", "extra_tags", ":", "extra_tags", "=", "\"\"", "classes", ...
Return the message classes for a message
[ "Return", "the", "message", "classes", "for", "a", "message" ]
1d4095ba113a1faff228f9592bdad4f0b3aed653
https://github.com/dyve/django-bootstrap3/blob/1d4095ba113a1faff228f9592bdad4f0b3aed653/bootstrap3/templatetags/bootstrap3.py#L63-L84
226,351
dyve/django-bootstrap3
bootstrap3/templatetags/bootstrap3.py
bootstrap_css
def bootstrap_css(): """ Return HTML for Bootstrap CSS. Adjust url in settings. If no url is returned, we don't want this statement to return any HTML. This is intended behavior. Default value: ``None`` This value is configurable, see Settings section **Tag name**:: bootstrap_css **Usage**:: {% bootstrap_css %} **Example**:: {% bootstrap_css %} """ rendered_urls = [render_link_tag(bootstrap_css_url())] if bootstrap_theme_url(): rendered_urls.append(render_link_tag(bootstrap_theme_url())) return mark_safe("".join([url for url in rendered_urls]))
python
def bootstrap_css(): rendered_urls = [render_link_tag(bootstrap_css_url())] if bootstrap_theme_url(): rendered_urls.append(render_link_tag(bootstrap_theme_url())) return mark_safe("".join([url for url in rendered_urls]))
[ "def", "bootstrap_css", "(", ")", ":", "rendered_urls", "=", "[", "render_link_tag", "(", "bootstrap_css_url", "(", ")", ")", "]", "if", "bootstrap_theme_url", "(", ")", ":", "rendered_urls", ".", "append", "(", "render_link_tag", "(", "bootstrap_theme_url", "("...
Return HTML for Bootstrap CSS. Adjust url in settings. If no url is returned, we don't want this statement to return any HTML. This is intended behavior. Default value: ``None`` This value is configurable, see Settings section **Tag name**:: bootstrap_css **Usage**:: {% bootstrap_css %} **Example**:: {% bootstrap_css %}
[ "Return", "HTML", "for", "Bootstrap", "CSS", ".", "Adjust", "url", "in", "settings", ".", "If", "no", "url", "is", "returned", "we", "don", "t", "want", "this", "statement", "to", "return", "any", "HTML", ".", "This", "is", "intended", "behavior", "." ]
1d4095ba113a1faff228f9592bdad4f0b3aed653
https://github.com/dyve/django-bootstrap3/blob/1d4095ba113a1faff228f9592bdad4f0b3aed653/bootstrap3/templatetags/bootstrap3.py#L184-L210
226,352
dyve/django-bootstrap3
bootstrap3/templatetags/bootstrap3.py
bootstrap_javascript
def bootstrap_javascript(jquery=None): """ Return HTML for Bootstrap JavaScript. Adjust url in settings. If no url is returned, we don't want this statement to return any HTML. This is intended behavior. Default value: ``None`` This value is configurable, see Settings section **Tag name**:: bootstrap_javascript **Parameters**: :jquery: Truthy to include jQuery as well as Bootstrap **Usage**:: {% bootstrap_javascript %} **Example**:: {% bootstrap_javascript jquery=1 %} """ javascript = "" # See if we have to include jQuery if jquery is None: jquery = get_bootstrap_setting("include_jquery", False) # NOTE: No async on scripts, not mature enough. See issue #52 and #56 if jquery: url = bootstrap_jquery_url() if url: javascript += render_script_tag(url) url = bootstrap_javascript_url() if url: javascript += render_script_tag(url) return mark_safe(javascript)
python
def bootstrap_javascript(jquery=None): javascript = "" # See if we have to include jQuery if jquery is None: jquery = get_bootstrap_setting("include_jquery", False) # NOTE: No async on scripts, not mature enough. See issue #52 and #56 if jquery: url = bootstrap_jquery_url() if url: javascript += render_script_tag(url) url = bootstrap_javascript_url() if url: javascript += render_script_tag(url) return mark_safe(javascript)
[ "def", "bootstrap_javascript", "(", "jquery", "=", "None", ")", ":", "javascript", "=", "\"\"", "# See if we have to include jQuery", "if", "jquery", "is", "None", ":", "jquery", "=", "get_bootstrap_setting", "(", "\"include_jquery\"", ",", "False", ")", "# NOTE: No...
Return HTML for Bootstrap JavaScript. Adjust url in settings. If no url is returned, we don't want this statement to return any HTML. This is intended behavior. Default value: ``None`` This value is configurable, see Settings section **Tag name**:: bootstrap_javascript **Parameters**: :jquery: Truthy to include jQuery as well as Bootstrap **Usage**:: {% bootstrap_javascript %} **Example**:: {% bootstrap_javascript jquery=1 %}
[ "Return", "HTML", "for", "Bootstrap", "JavaScript", "." ]
1d4095ba113a1faff228f9592bdad4f0b3aed653
https://github.com/dyve/django-bootstrap3/blob/1d4095ba113a1faff228f9592bdad4f0b3aed653/bootstrap3/templatetags/bootstrap3.py#L214-L255
226,353
dyve/django-bootstrap3
bootstrap3/templatetags/bootstrap3.py
bootstrap_buttons
def bootstrap_buttons(parser, token): """ Render buttons for form **Tag name**:: buttons **Parameters**: submit Text for a submit button reset Text for a reset button **Usage**:: {% buttons %}{% endbuttons %} **Example**:: {% buttons submit='OK' reset="Cancel" %}{% endbuttons %} """ kwargs = parse_token_contents(parser, token) kwargs["nodelist"] = parser.parse(("endbuttons",)) parser.delete_first_token() return ButtonsNode(**kwargs)
python
def bootstrap_buttons(parser, token): kwargs = parse_token_contents(parser, token) kwargs["nodelist"] = parser.parse(("endbuttons",)) parser.delete_first_token() return ButtonsNode(**kwargs)
[ "def", "bootstrap_buttons", "(", "parser", ",", "token", ")", ":", "kwargs", "=", "parse_token_contents", "(", "parser", ",", "token", ")", "kwargs", "[", "\"nodelist\"", "]", "=", "parser", ".", "parse", "(", "(", "\"endbuttons\"", ",", ")", ")", "parser"...
Render buttons for form **Tag name**:: buttons **Parameters**: submit Text for a submit button reset Text for a reset button **Usage**:: {% buttons %}{% endbuttons %} **Example**:: {% buttons submit='OK' reset="Cancel" %}{% endbuttons %}
[ "Render", "buttons", "for", "form" ]
1d4095ba113a1faff228f9592bdad4f0b3aed653
https://github.com/dyve/django-bootstrap3/blob/1d4095ba113a1faff228f9592bdad4f0b3aed653/bootstrap3/templatetags/bootstrap3.py#L693-L721
226,354
dyve/django-bootstrap3
bootstrap3/templatetags/bootstrap3.py
bootstrap_messages
def bootstrap_messages(context, *args, **kwargs): """ Show django.contrib.messages Messages in Bootstrap alert containers. In order to make the alerts dismissable (with the close button), we have to set the jquery parameter too when using the bootstrap_javascript tag. Uses the template ``bootstrap3/messages.html``. **Tag name**:: bootstrap_messages **Parameters**: None. **Usage**:: {% bootstrap_messages %} **Example**:: {% bootstrap_javascript jquery=1 %} {% bootstrap_messages %} """ # Force Django 1.8+ style, so dicts and not Context # TODO: This may be due to a bug in Django 1.8/1.9+ if Context and isinstance(context, Context): context = context.flatten() context.update({"message_constants": message_constants}) return render_template_file("bootstrap3/messages.html", context=context)
python
def bootstrap_messages(context, *args, **kwargs): # Force Django 1.8+ style, so dicts and not Context # TODO: This may be due to a bug in Django 1.8/1.9+ if Context and isinstance(context, Context): context = context.flatten() context.update({"message_constants": message_constants}) return render_template_file("bootstrap3/messages.html", context=context)
[ "def", "bootstrap_messages", "(", "context", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "# Force Django 1.8+ style, so dicts and not Context", "# TODO: This may be due to a bug in Django 1.8/1.9+", "if", "Context", "and", "isinstance", "(", "context", ",", "Cont...
Show django.contrib.messages Messages in Bootstrap alert containers. In order to make the alerts dismissable (with the close button), we have to set the jquery parameter too when using the bootstrap_javascript tag. Uses the template ``bootstrap3/messages.html``. **Tag name**:: bootstrap_messages **Parameters**: None. **Usage**:: {% bootstrap_messages %} **Example**:: {% bootstrap_javascript jquery=1 %} {% bootstrap_messages %}
[ "Show", "django", ".", "contrib", ".", "messages", "Messages", "in", "Bootstrap", "alert", "containers", "." ]
1d4095ba113a1faff228f9592bdad4f0b3aed653
https://github.com/dyve/django-bootstrap3/blob/1d4095ba113a1faff228f9592bdad4f0b3aed653/bootstrap3/templatetags/bootstrap3.py#L753-L787
226,355
dyve/django-bootstrap3
bootstrap3/templatetags/bootstrap3.py
get_pagination_context
def get_pagination_context( page, pages_to_show=11, url=None, size=None, extra=None, parameter_name="page" ): """ Generate Bootstrap pagination context from a page object """ pages_to_show = int(pages_to_show) if pages_to_show < 1: raise ValueError( "Pagination pages_to_show should be a positive integer, you specified {pages}".format( pages=pages_to_show ) ) num_pages = page.paginator.num_pages current_page = page.number half_page_num = int(floor(pages_to_show / 2)) if half_page_num < 0: half_page_num = 0 first_page = current_page - half_page_num if first_page <= 1: first_page = 1 if first_page > 1: pages_back = first_page - half_page_num if pages_back < 1: pages_back = 1 else: pages_back = None last_page = first_page + pages_to_show - 1 if pages_back is None: last_page += 1 if last_page > num_pages: last_page = num_pages if last_page < num_pages: pages_forward = last_page + half_page_num if pages_forward > num_pages: pages_forward = num_pages else: pages_forward = None if first_page > 1: first_page -= 1 if pages_back is not None and pages_back > 1: pages_back -= 1 else: pages_back = None pages_shown = [] for i in range(first_page, last_page + 1): pages_shown.append(i) # Append proper character to url if url: # Remove existing page GET parameters url = force_text(url) url = re.sub(r"\?{0}\=[^\&]+".format(parameter_name), "?", url) url = re.sub(r"\&{0}\=[^\&]+".format(parameter_name), "", url) # Append proper separator if "?" in url: url += "&" else: url += "?" # Append extra string to url if extra: if not url: url = "?" url += force_text(extra) + "&" if url: url = url.replace("?&", "?") # Set CSS classes, see http://getbootstrap.com/components/#pagination pagination_css_classes = ["pagination"] if size == "small": pagination_css_classes.append("pagination-sm") elif size == "large": pagination_css_classes.append("pagination-lg") # Build context object return { "bootstrap_pagination_url": url, "num_pages": num_pages, "current_page": current_page, "first_page": first_page, "last_page": last_page, "pages_shown": pages_shown, "pages_back": pages_back, "pages_forward": pages_forward, "pagination_css_classes": " ".join(pagination_css_classes), "parameter_name": parameter_name, }
python
def get_pagination_context( page, pages_to_show=11, url=None, size=None, extra=None, parameter_name="page" ): pages_to_show = int(pages_to_show) if pages_to_show < 1: raise ValueError( "Pagination pages_to_show should be a positive integer, you specified {pages}".format( pages=pages_to_show ) ) num_pages = page.paginator.num_pages current_page = page.number half_page_num = int(floor(pages_to_show / 2)) if half_page_num < 0: half_page_num = 0 first_page = current_page - half_page_num if first_page <= 1: first_page = 1 if first_page > 1: pages_back = first_page - half_page_num if pages_back < 1: pages_back = 1 else: pages_back = None last_page = first_page + pages_to_show - 1 if pages_back is None: last_page += 1 if last_page > num_pages: last_page = num_pages if last_page < num_pages: pages_forward = last_page + half_page_num if pages_forward > num_pages: pages_forward = num_pages else: pages_forward = None if first_page > 1: first_page -= 1 if pages_back is not None and pages_back > 1: pages_back -= 1 else: pages_back = None pages_shown = [] for i in range(first_page, last_page + 1): pages_shown.append(i) # Append proper character to url if url: # Remove existing page GET parameters url = force_text(url) url = re.sub(r"\?{0}\=[^\&]+".format(parameter_name), "?", url) url = re.sub(r"\&{0}\=[^\&]+".format(parameter_name), "", url) # Append proper separator if "?" in url: url += "&" else: url += "?" # Append extra string to url if extra: if not url: url = "?" url += force_text(extra) + "&" if url: url = url.replace("?&", "?") # Set CSS classes, see http://getbootstrap.com/components/#pagination pagination_css_classes = ["pagination"] if size == "small": pagination_css_classes.append("pagination-sm") elif size == "large": pagination_css_classes.append("pagination-lg") # Build context object return { "bootstrap_pagination_url": url, "num_pages": num_pages, "current_page": current_page, "first_page": first_page, "last_page": last_page, "pages_shown": pages_shown, "pages_back": pages_back, "pages_forward": pages_forward, "pagination_css_classes": " ".join(pagination_css_classes), "parameter_name": parameter_name, }
[ "def", "get_pagination_context", "(", "page", ",", "pages_to_show", "=", "11", ",", "url", "=", "None", ",", "size", "=", "None", ",", "extra", "=", "None", ",", "parameter_name", "=", "\"page\"", ")", ":", "pages_to_show", "=", "int", "(", "pages_to_show"...
Generate Bootstrap pagination context from a page object
[ "Generate", "Bootstrap", "pagination", "context", "from", "a", "page", "object" ]
1d4095ba113a1faff228f9592bdad4f0b3aed653
https://github.com/dyve/django-bootstrap3/blob/1d4095ba113a1faff228f9592bdad4f0b3aed653/bootstrap3/templatetags/bootstrap3.py#L856-L939
226,356
dyve/django-bootstrap3
bootstrap3/utils.py
handle_var
def handle_var(value, context): """ Handle template tag variable """ # Resolve FilterExpression and Variable immediately if isinstance(value, FilterExpression) or isinstance(value, Variable): return value.resolve(context) # Return quoted strings unquoted # http://djangosnippets.org/snippets/886 stringval = QUOTED_STRING.search(value) if stringval: return stringval.group("noquotes") # Resolve variable or return string value try: return Variable(value).resolve(context) except VariableDoesNotExist: return value
python
def handle_var(value, context): # Resolve FilterExpression and Variable immediately if isinstance(value, FilterExpression) or isinstance(value, Variable): return value.resolve(context) # Return quoted strings unquoted # http://djangosnippets.org/snippets/886 stringval = QUOTED_STRING.search(value) if stringval: return stringval.group("noquotes") # Resolve variable or return string value try: return Variable(value).resolve(context) except VariableDoesNotExist: return value
[ "def", "handle_var", "(", "value", ",", "context", ")", ":", "# Resolve FilterExpression and Variable immediately", "if", "isinstance", "(", "value", ",", "FilterExpression", ")", "or", "isinstance", "(", "value", ",", "Variable", ")", ":", "return", "value", ".",...
Handle template tag variable
[ "Handle", "template", "tag", "variable" ]
1d4095ba113a1faff228f9592bdad4f0b3aed653
https://github.com/dyve/django-bootstrap3/blob/1d4095ba113a1faff228f9592bdad4f0b3aed653/bootstrap3/utils.py#L35-L51
226,357
dyve/django-bootstrap3
bootstrap3/utils.py
parse_token_contents
def parse_token_contents(parser, token): """ Parse template tag contents """ bits = token.split_contents() tag = bits.pop(0) args = [] kwargs = {} asvar = None if len(bits) >= 2 and bits[-2] == "as": asvar = bits[-1] bits = bits[:-2] for bit in bits: match = kwarg_re.match(bit) if not match: raise TemplateSyntaxError('Malformed arguments to tag "{}"'.format(tag)) name, value = match.groups() if name: kwargs[name] = parser.compile_filter(value) else: args.append(parser.compile_filter(value)) return {"tag": tag, "args": args, "kwargs": kwargs, "asvar": asvar}
python
def parse_token_contents(parser, token): bits = token.split_contents() tag = bits.pop(0) args = [] kwargs = {} asvar = None if len(bits) >= 2 and bits[-2] == "as": asvar = bits[-1] bits = bits[:-2] for bit in bits: match = kwarg_re.match(bit) if not match: raise TemplateSyntaxError('Malformed arguments to tag "{}"'.format(tag)) name, value = match.groups() if name: kwargs[name] = parser.compile_filter(value) else: args.append(parser.compile_filter(value)) return {"tag": tag, "args": args, "kwargs": kwargs, "asvar": asvar}
[ "def", "parse_token_contents", "(", "parser", ",", "token", ")", ":", "bits", "=", "token", ".", "split_contents", "(", ")", "tag", "=", "bits", ".", "pop", "(", "0", ")", "args", "=", "[", "]", "kwargs", "=", "{", "}", "asvar", "=", "None", "if", ...
Parse template tag contents
[ "Parse", "template", "tag", "contents" ]
1d4095ba113a1faff228f9592bdad4f0b3aed653
https://github.com/dyve/django-bootstrap3/blob/1d4095ba113a1faff228f9592bdad4f0b3aed653/bootstrap3/utils.py#L54-L75
226,358
dyve/django-bootstrap3
bootstrap3/utils.py
split_css_classes
def split_css_classes(css_classes): """ Turn string into a list of CSS classes """ classes_list = text_value(css_classes).split(" ") return [c for c in classes_list if c]
python
def split_css_classes(css_classes): classes_list = text_value(css_classes).split(" ") return [c for c in classes_list if c]
[ "def", "split_css_classes", "(", "css_classes", ")", ":", "classes_list", "=", "text_value", "(", "css_classes", ")", ".", "split", "(", "\" \"", ")", "return", "[", "c", "for", "c", "in", "classes_list", "if", "c", "]" ]
Turn string into a list of CSS classes
[ "Turn", "string", "into", "a", "list", "of", "CSS", "classes" ]
1d4095ba113a1faff228f9592bdad4f0b3aed653
https://github.com/dyve/django-bootstrap3/blob/1d4095ba113a1faff228f9592bdad4f0b3aed653/bootstrap3/utils.py#L78-L83
226,359
dyve/django-bootstrap3
bootstrap3/utils.py
add_css_class
def add_css_class(css_classes, css_class, prepend=False): """ Add a CSS class to a string of CSS classes """ classes_list = split_css_classes(css_classes) classes_to_add = [c for c in split_css_classes(css_class) if c not in classes_list] if prepend: classes_list = classes_to_add + classes_list else: classes_list += classes_to_add return " ".join(classes_list)
python
def add_css_class(css_classes, css_class, prepend=False): classes_list = split_css_classes(css_classes) classes_to_add = [c for c in split_css_classes(css_class) if c not in classes_list] if prepend: classes_list = classes_to_add + classes_list else: classes_list += classes_to_add return " ".join(classes_list)
[ "def", "add_css_class", "(", "css_classes", ",", "css_class", ",", "prepend", "=", "False", ")", ":", "classes_list", "=", "split_css_classes", "(", "css_classes", ")", "classes_to_add", "=", "[", "c", "for", "c", "in", "split_css_classes", "(", "css_class", "...
Add a CSS class to a string of CSS classes
[ "Add", "a", "CSS", "class", "to", "a", "string", "of", "CSS", "classes" ]
1d4095ba113a1faff228f9592bdad4f0b3aed653
https://github.com/dyve/django-bootstrap3/blob/1d4095ba113a1faff228f9592bdad4f0b3aed653/bootstrap3/utils.py#L86-L96
226,360
dyve/django-bootstrap3
bootstrap3/utils.py
remove_css_class
def remove_css_class(css_classes, css_class): """ Remove a CSS class from a string of CSS classes """ remove = set(split_css_classes(css_class)) classes_list = [c for c in split_css_classes(css_classes) if c not in remove] return " ".join(classes_list)
python
def remove_css_class(css_classes, css_class): remove = set(split_css_classes(css_class)) classes_list = [c for c in split_css_classes(css_classes) if c not in remove] return " ".join(classes_list)
[ "def", "remove_css_class", "(", "css_classes", ",", "css_class", ")", ":", "remove", "=", "set", "(", "split_css_classes", "(", "css_class", ")", ")", "classes_list", "=", "[", "c", "for", "c", "in", "split_css_classes", "(", "css_classes", ")", "if", "c", ...
Remove a CSS class from a string of CSS classes
[ "Remove", "a", "CSS", "class", "from", "a", "string", "of", "CSS", "classes" ]
1d4095ba113a1faff228f9592bdad4f0b3aed653
https://github.com/dyve/django-bootstrap3/blob/1d4095ba113a1faff228f9592bdad4f0b3aed653/bootstrap3/utils.py#L99-L105
226,361
dyve/django-bootstrap3
bootstrap3/utils.py
render_link_tag
def render_link_tag(url, rel="stylesheet", media=None): """ Build a link tag """ url_dict = url_to_attrs_dict(url, url_attr="href") url_dict.setdefault("href", url_dict.pop("url", None)) url_dict["rel"] = rel if media: url_dict["media"] = media return render_tag("link", attrs=url_dict, close=False)
python
def render_link_tag(url, rel="stylesheet", media=None): url_dict = url_to_attrs_dict(url, url_attr="href") url_dict.setdefault("href", url_dict.pop("url", None)) url_dict["rel"] = rel if media: url_dict["media"] = media return render_tag("link", attrs=url_dict, close=False)
[ "def", "render_link_tag", "(", "url", ",", "rel", "=", "\"stylesheet\"", ",", "media", "=", "None", ")", ":", "url_dict", "=", "url_to_attrs_dict", "(", "url", ",", "url_attr", "=", "\"href\"", ")", "url_dict", ".", "setdefault", "(", "\"href\"", ",", "url...
Build a link tag
[ "Build", "a", "link", "tag" ]
1d4095ba113a1faff228f9592bdad4f0b3aed653
https://github.com/dyve/django-bootstrap3/blob/1d4095ba113a1faff228f9592bdad4f0b3aed653/bootstrap3/utils.py#L116-L125
226,362
dyve/django-bootstrap3
bootstrap3/utils.py
render_template_file
def render_template_file(template, context=None): """ Render a Template to unicode """ assert isinstance(context, Mapping) template = get_template(template) return template.render(context)
python
def render_template_file(template, context=None): assert isinstance(context, Mapping) template = get_template(template) return template.render(context)
[ "def", "render_template_file", "(", "template", ",", "context", "=", "None", ")", ":", "assert", "isinstance", "(", "context", ",", "Mapping", ")", "template", "=", "get_template", "(", "template", ")", "return", "template", ".", "render", "(", "context", ")...
Render a Template to unicode
[ "Render", "a", "Template", "to", "unicode" ]
1d4095ba113a1faff228f9592bdad4f0b3aed653
https://github.com/dyve/django-bootstrap3/blob/1d4095ba113a1faff228f9592bdad4f0b3aed653/bootstrap3/utils.py#L143-L149
226,363
dyve/django-bootstrap3
bootstrap3/utils.py
url_replace_param
def url_replace_param(url, name, value): """ Replace a GET parameter in an URL """ url_components = urlparse(force_str(url)) query_params = parse_qs(url_components.query) query_params[name] = value query = urlencode(query_params, doseq=True) return force_text( urlunparse( [ url_components.scheme, url_components.netloc, url_components.path, url_components.params, query, url_components.fragment, ] ) )
python
def url_replace_param(url, name, value): url_components = urlparse(force_str(url)) query_params = parse_qs(url_components.query) query_params[name] = value query = urlencode(query_params, doseq=True) return force_text( urlunparse( [ url_components.scheme, url_components.netloc, url_components.path, url_components.params, query, url_components.fragment, ] ) )
[ "def", "url_replace_param", "(", "url", ",", "name", ",", "value", ")", ":", "url_components", "=", "urlparse", "(", "force_str", "(", "url", ")", ")", "query_params", "=", "parse_qs", "(", "url_components", ".", "query", ")", "query_params", "[", "name", ...
Replace a GET parameter in an URL
[ "Replace", "a", "GET", "parameter", "in", "an", "URL" ]
1d4095ba113a1faff228f9592bdad4f0b3aed653
https://github.com/dyve/django-bootstrap3/blob/1d4095ba113a1faff228f9592bdad4f0b3aed653/bootstrap3/utils.py#L152-L171
226,364
dyve/django-bootstrap3
bootstrap3/utils.py
url_to_attrs_dict
def url_to_attrs_dict(url, url_attr): """ Sanitize url dict as used in django-bootstrap3 settings. """ result = dict() # If url is not a string, it should be a dict if isinstance(url, six.string_types): url_value = url else: try: url_value = url["url"] except TypeError: raise BootstrapError( 'Function "url_to_attrs_dict" expects a string or a dict with key "url".' ) crossorigin = url.get("crossorigin", None) integrity = url.get("integrity", None) if crossorigin: result["crossorigin"] = crossorigin if integrity: result["integrity"] = integrity result[url_attr] = url_value return result
python
def url_to_attrs_dict(url, url_attr): result = dict() # If url is not a string, it should be a dict if isinstance(url, six.string_types): url_value = url else: try: url_value = url["url"] except TypeError: raise BootstrapError( 'Function "url_to_attrs_dict" expects a string or a dict with key "url".' ) crossorigin = url.get("crossorigin", None) integrity = url.get("integrity", None) if crossorigin: result["crossorigin"] = crossorigin if integrity: result["integrity"] = integrity result[url_attr] = url_value return result
[ "def", "url_to_attrs_dict", "(", "url", ",", "url_attr", ")", ":", "result", "=", "dict", "(", ")", "# If url is not a string, it should be a dict", "if", "isinstance", "(", "url", ",", "six", ".", "string_types", ")", ":", "url_value", "=", "url", "else", ":"...
Sanitize url dict as used in django-bootstrap3 settings.
[ "Sanitize", "url", "dict", "as", "used", "in", "django", "-", "bootstrap3", "settings", "." ]
1d4095ba113a1faff228f9592bdad4f0b3aed653
https://github.com/dyve/django-bootstrap3/blob/1d4095ba113a1faff228f9592bdad4f0b3aed653/bootstrap3/utils.py#L174-L196
226,365
dyve/django-bootstrap3
bootstrap3/forms.py
render_formset
def render_formset(formset, **kwargs): """ Render a formset to a Bootstrap layout """ renderer_cls = get_formset_renderer(**kwargs) return renderer_cls(formset, **kwargs).render()
python
def render_formset(formset, **kwargs): renderer_cls = get_formset_renderer(**kwargs) return renderer_cls(formset, **kwargs).render()
[ "def", "render_formset", "(", "formset", ",", "*", "*", "kwargs", ")", ":", "renderer_cls", "=", "get_formset_renderer", "(", "*", "*", "kwargs", ")", "return", "renderer_cls", "(", "formset", ",", "*", "*", "kwargs", ")", ".", "render", "(", ")" ]
Render a formset to a Bootstrap layout
[ "Render", "a", "formset", "to", "a", "Bootstrap", "layout" ]
1d4095ba113a1faff228f9592bdad4f0b3aed653
https://github.com/dyve/django-bootstrap3/blob/1d4095ba113a1faff228f9592bdad4f0b3aed653/bootstrap3/forms.py#L41-L46
226,366
dyve/django-bootstrap3
bootstrap3/forms.py
render_formset_errors
def render_formset_errors(formset, **kwargs): """ Render formset errors to a Bootstrap layout """ renderer_cls = get_formset_renderer(**kwargs) return renderer_cls(formset, **kwargs).render_errors()
python
def render_formset_errors(formset, **kwargs): renderer_cls = get_formset_renderer(**kwargs) return renderer_cls(formset, **kwargs).render_errors()
[ "def", "render_formset_errors", "(", "formset", ",", "*", "*", "kwargs", ")", ":", "renderer_cls", "=", "get_formset_renderer", "(", "*", "*", "kwargs", ")", "return", "renderer_cls", "(", "formset", ",", "*", "*", "kwargs", ")", ".", "render_errors", "(", ...
Render formset errors to a Bootstrap layout
[ "Render", "formset", "errors", "to", "a", "Bootstrap", "layout" ]
1d4095ba113a1faff228f9592bdad4f0b3aed653
https://github.com/dyve/django-bootstrap3/blob/1d4095ba113a1faff228f9592bdad4f0b3aed653/bootstrap3/forms.py#L49-L54
226,367
dyve/django-bootstrap3
bootstrap3/forms.py
render_form
def render_form(form, **kwargs): """ Render a form to a Bootstrap layout """ renderer_cls = get_form_renderer(**kwargs) return renderer_cls(form, **kwargs).render()
python
def render_form(form, **kwargs): renderer_cls = get_form_renderer(**kwargs) return renderer_cls(form, **kwargs).render()
[ "def", "render_form", "(", "form", ",", "*", "*", "kwargs", ")", ":", "renderer_cls", "=", "get_form_renderer", "(", "*", "*", "kwargs", ")", "return", "renderer_cls", "(", "form", ",", "*", "*", "kwargs", ")", ".", "render", "(", ")" ]
Render a form to a Bootstrap layout
[ "Render", "a", "form", "to", "a", "Bootstrap", "layout" ]
1d4095ba113a1faff228f9592bdad4f0b3aed653
https://github.com/dyve/django-bootstrap3/blob/1d4095ba113a1faff228f9592bdad4f0b3aed653/bootstrap3/forms.py#L57-L62
226,368
dyve/django-bootstrap3
bootstrap3/forms.py
render_form_errors
def render_form_errors(form, error_types="non_field_errors", **kwargs): """ Render form errors to a Bootstrap layout """ renderer_cls = get_form_renderer(**kwargs) return renderer_cls(form, **kwargs).render_errors(error_types)
python
def render_form_errors(form, error_types="non_field_errors", **kwargs): renderer_cls = get_form_renderer(**kwargs) return renderer_cls(form, **kwargs).render_errors(error_types)
[ "def", "render_form_errors", "(", "form", ",", "error_types", "=", "\"non_field_errors\"", ",", "*", "*", "kwargs", ")", ":", "renderer_cls", "=", "get_form_renderer", "(", "*", "*", "kwargs", ")", "return", "renderer_cls", "(", "form", ",", "*", "*", "kwarg...
Render form errors to a Bootstrap layout
[ "Render", "form", "errors", "to", "a", "Bootstrap", "layout" ]
1d4095ba113a1faff228f9592bdad4f0b3aed653
https://github.com/dyve/django-bootstrap3/blob/1d4095ba113a1faff228f9592bdad4f0b3aed653/bootstrap3/forms.py#L65-L70
226,369
dyve/django-bootstrap3
bootstrap3/forms.py
render_field
def render_field(field, **kwargs): """ Render a field to a Bootstrap layout """ renderer_cls = get_field_renderer(**kwargs) return renderer_cls(field, **kwargs).render()
python
def render_field(field, **kwargs): renderer_cls = get_field_renderer(**kwargs) return renderer_cls(field, **kwargs).render()
[ "def", "render_field", "(", "field", ",", "*", "*", "kwargs", ")", ":", "renderer_cls", "=", "get_field_renderer", "(", "*", "*", "kwargs", ")", "return", "renderer_cls", "(", "field", ",", "*", "*", "kwargs", ")", ".", "render", "(", ")" ]
Render a field to a Bootstrap layout
[ "Render", "a", "field", "to", "a", "Bootstrap", "layout" ]
1d4095ba113a1faff228f9592bdad4f0b3aed653
https://github.com/dyve/django-bootstrap3/blob/1d4095ba113a1faff228f9592bdad4f0b3aed653/bootstrap3/forms.py#L73-L78
226,370
dyve/django-bootstrap3
bootstrap3/forms.py
render_label
def render_label(content, label_for=None, label_class=None, label_title=""): """ Render a label with content """ attrs = {} if label_for: attrs["for"] = label_for if label_class: attrs["class"] = label_class if label_title: attrs["title"] = label_title return render_tag("label", attrs=attrs, content=content)
python
def render_label(content, label_for=None, label_class=None, label_title=""): attrs = {} if label_for: attrs["for"] = label_for if label_class: attrs["class"] = label_class if label_title: attrs["title"] = label_title return render_tag("label", attrs=attrs, content=content)
[ "def", "render_label", "(", "content", ",", "label_for", "=", "None", ",", "label_class", "=", "None", ",", "label_title", "=", "\"\"", ")", ":", "attrs", "=", "{", "}", "if", "label_for", ":", "attrs", "[", "\"for\"", "]", "=", "label_for", "if", "lab...
Render a label with content
[ "Render", "a", "label", "with", "content" ]
1d4095ba113a1faff228f9592bdad4f0b3aed653
https://github.com/dyve/django-bootstrap3/blob/1d4095ba113a1faff228f9592bdad4f0b3aed653/bootstrap3/forms.py#L81-L92
226,371
dyve/django-bootstrap3
bootstrap3/forms.py
render_button
def render_button( content, button_type=None, icon=None, button_class="btn-default", size="", href="", name=None, value=None, title=None, extra_classes="", id="", ): """ Render a button with content """ attrs = {} classes = add_css_class("btn", button_class) size = text_value(size).lower().strip() if size == "xs": classes = add_css_class(classes, "btn-xs") elif size == "sm" or size == "small": classes = add_css_class(classes, "btn-sm") elif size == "lg" or size == "large": classes = add_css_class(classes, "btn-lg") elif size == "md" or size == "medium": pass elif size: raise BootstrapError( 'Parameter "size" should be "xs", "sm", "lg" or ' + 'empty ("{}" given).'.format(size) ) if button_type: if button_type not in ("submit", "reset", "button", "link"): raise BootstrapError( 'Parameter "button_type" should be "submit", "reset", ' + '"button", "link" or empty ("{}" given).'.format(button_type) ) attrs["type"] = button_type classes = add_css_class(classes, extra_classes) attrs["class"] = classes icon_content = render_icon(icon) if icon else "" if href: attrs["href"] = href tag = "a" else: tag = "button" if id: attrs["id"] = id if name: attrs["name"] = name if value: attrs["value"] = value if title: attrs["title"] = title return render_tag( tag, attrs=attrs, content=mark_safe(text_concat(icon_content, content, separator=" ")), )
python
def render_button( content, button_type=None, icon=None, button_class="btn-default", size="", href="", name=None, value=None, title=None, extra_classes="", id="", ): attrs = {} classes = add_css_class("btn", button_class) size = text_value(size).lower().strip() if size == "xs": classes = add_css_class(classes, "btn-xs") elif size == "sm" or size == "small": classes = add_css_class(classes, "btn-sm") elif size == "lg" or size == "large": classes = add_css_class(classes, "btn-lg") elif size == "md" or size == "medium": pass elif size: raise BootstrapError( 'Parameter "size" should be "xs", "sm", "lg" or ' + 'empty ("{}" given).'.format(size) ) if button_type: if button_type not in ("submit", "reset", "button", "link"): raise BootstrapError( 'Parameter "button_type" should be "submit", "reset", ' + '"button", "link" or empty ("{}" given).'.format(button_type) ) attrs["type"] = button_type classes = add_css_class(classes, extra_classes) attrs["class"] = classes icon_content = render_icon(icon) if icon else "" if href: attrs["href"] = href tag = "a" else: tag = "button" if id: attrs["id"] = id if name: attrs["name"] = name if value: attrs["value"] = value if title: attrs["title"] = title return render_tag( tag, attrs=attrs, content=mark_safe(text_concat(icon_content, content, separator=" ")), )
[ "def", "render_button", "(", "content", ",", "button_type", "=", "None", ",", "icon", "=", "None", ",", "button_class", "=", "\"btn-default\"", ",", "size", "=", "\"\"", ",", "href", "=", "\"\"", ",", "name", "=", "None", ",", "value", "=", "None", ","...
Render a button with content
[ "Render", "a", "button", "with", "content" ]
1d4095ba113a1faff228f9592bdad4f0b3aed653
https://github.com/dyve/django-bootstrap3/blob/1d4095ba113a1faff228f9592bdad4f0b3aed653/bootstrap3/forms.py#L95-L154
226,372
dyve/django-bootstrap3
bootstrap3/forms.py
render_field_and_label
def render_field_and_label( field, label, field_class="", label_for=None, label_class="", layout="", **kwargs ): """ Render a field with its label """ if layout == "horizontal": if not label_class: label_class = get_bootstrap_setting("horizontal_label_class") if not field_class: field_class = get_bootstrap_setting("horizontal_field_class") if not label: label = mark_safe("&#160;") label_class = add_css_class(label_class, "control-label") html = field if field_class: html = '<div class="{klass}">{html}</div>'.format(klass=field_class, html=html) if label: html = render_label(label, label_for=label_for, label_class=label_class) + html return html
python
def render_field_and_label( field, label, field_class="", label_for=None, label_class="", layout="", **kwargs ): if layout == "horizontal": if not label_class: label_class = get_bootstrap_setting("horizontal_label_class") if not field_class: field_class = get_bootstrap_setting("horizontal_field_class") if not label: label = mark_safe("&#160;") label_class = add_css_class(label_class, "control-label") html = field if field_class: html = '<div class="{klass}">{html}</div>'.format(klass=field_class, html=html) if label: html = render_label(label, label_for=label_for, label_class=label_class) + html return html
[ "def", "render_field_and_label", "(", "field", ",", "label", ",", "field_class", "=", "\"\"", ",", "label_for", "=", "None", ",", "label_class", "=", "\"\"", ",", "layout", "=", "\"\"", ",", "*", "*", "kwargs", ")", ":", "if", "layout", "==", "\"horizont...
Render a field with its label
[ "Render", "a", "field", "with", "its", "label" ]
1d4095ba113a1faff228f9592bdad4f0b3aed653
https://github.com/dyve/django-bootstrap3/blob/1d4095ba113a1faff228f9592bdad4f0b3aed653/bootstrap3/forms.py#L157-L176
226,373
dyve/django-bootstrap3
bootstrap3/forms.py
is_widget_required_attribute
def is_widget_required_attribute(widget): """ Is this widget required? """ if not widget.is_required: return False if isinstance(widget, WIDGETS_NO_REQUIRED): return False return True
python
def is_widget_required_attribute(widget): if not widget.is_required: return False if isinstance(widget, WIDGETS_NO_REQUIRED): return False return True
[ "def", "is_widget_required_attribute", "(", "widget", ")", ":", "if", "not", "widget", ".", "is_required", ":", "return", "False", "if", "isinstance", "(", "widget", ",", "WIDGETS_NO_REQUIRED", ")", ":", "return", "False", "return", "True" ]
Is this widget required?
[ "Is", "this", "widget", "required?" ]
1d4095ba113a1faff228f9592bdad4f0b3aed653
https://github.com/dyve/django-bootstrap3/blob/1d4095ba113a1faff228f9592bdad4f0b3aed653/bootstrap3/forms.py#L188-L196
226,374
dyve/django-bootstrap3
bootstrap3/forms.py
is_widget_with_placeholder
def is_widget_with_placeholder(widget): """ Is this a widget that should have a placeholder? Only text, search, url, tel, e-mail, password, number have placeholders """ return isinstance( widget, (TextInput, Textarea, NumberInput, EmailInput, URLInput, PasswordInput) )
python
def is_widget_with_placeholder(widget): return isinstance( widget, (TextInput, Textarea, NumberInput, EmailInput, URLInput, PasswordInput) )
[ "def", "is_widget_with_placeholder", "(", "widget", ")", ":", "return", "isinstance", "(", "widget", ",", "(", "TextInput", ",", "Textarea", ",", "NumberInput", ",", "EmailInput", ",", "URLInput", ",", "PasswordInput", ")", ")" ]
Is this a widget that should have a placeholder? Only text, search, url, tel, e-mail, password, number have placeholders
[ "Is", "this", "a", "widget", "that", "should", "have", "a", "placeholder?", "Only", "text", "search", "url", "tel", "e", "-", "mail", "password", "number", "have", "placeholders" ]
1d4095ba113a1faff228f9592bdad4f0b3aed653
https://github.com/dyve/django-bootstrap3/blob/1d4095ba113a1faff228f9592bdad4f0b3aed653/bootstrap3/forms.py#L199-L206
226,375
ForensicArtifacts/artifacts
tools/stats.py
ArtifactStatistics._PrintDictAsTable
def _PrintDictAsTable(self, src_dict): """Prints a table of artifact definitions. Args: src_dict (dict[str, ArtifactDefinition]): artifact definitions by name. """ key_list = list(src_dict.keys()) key_list.sort() print('|', end='') for key in key_list: print(' {0:s} |'.format(key), end='') print('') print('|', end='') for key in key_list: print(' :---: |', end='') print('') print('|', end='') for key in key_list: print(' {0!s} |'.format(src_dict[key]), end='') print('\n')
python
def _PrintDictAsTable(self, src_dict): key_list = list(src_dict.keys()) key_list.sort() print('|', end='') for key in key_list: print(' {0:s} |'.format(key), end='') print('') print('|', end='') for key in key_list: print(' :---: |', end='') print('') print('|', end='') for key in key_list: print(' {0!s} |'.format(src_dict[key]), end='') print('\n')
[ "def", "_PrintDictAsTable", "(", "self", ",", "src_dict", ")", ":", "key_list", "=", "list", "(", "src_dict", ".", "keys", "(", ")", ")", "key_list", ".", "sort", "(", ")", "print", "(", "'|'", ",", "end", "=", "''", ")", "for", "key", "in", "key_l...
Prints a table of artifact definitions. Args: src_dict (dict[str, ArtifactDefinition]): artifact definitions by name.
[ "Prints", "a", "table", "of", "artifact", "definitions", "." ]
044a63bfb4448af33d085c69066c80f9505ae7ca
https://github.com/ForensicArtifacts/artifacts/blob/044a63bfb4448af33d085c69066c80f9505ae7ca/tools/stats.py#L28-L50
226,376
ForensicArtifacts/artifacts
tools/stats.py
ArtifactStatistics.BuildStats
def BuildStats(self): """Builds the statistics.""" artifact_reader = reader.YamlArtifactsReader() self.label_counts = {} self.os_counts = {} self.path_count = 0 self.reg_key_count = 0 self.source_type_counts = {} self.total_count = 0 for artifact_definition in artifact_reader.ReadDirectory('data'): if hasattr(artifact_definition, 'labels'): for label in artifact_definition.labels: self.label_counts[label] = self.label_counts.get(label, 0) + 1 for source in artifact_definition.sources: self.total_count += 1 source_type = source.type_indicator self.source_type_counts[source_type] = self.source_type_counts.get( source_type, 0) + 1 if source_type == definitions.TYPE_INDICATOR_WINDOWS_REGISTRY_KEY: self.reg_key_count += len(source.keys) elif source_type == definitions.TYPE_INDICATOR_WINDOWS_REGISTRY_VALUE: self.reg_key_count += len(source.key_value_pairs) elif source_type in (definitions.TYPE_INDICATOR_FILE, definitions.TYPE_INDICATOR_DIRECTORY): self.path_count += len(source.paths) os_list = source.supported_os for os_str in os_list: self.os_counts[os_str] = self.os_counts.get(os_str, 0) + 1
python
def BuildStats(self): artifact_reader = reader.YamlArtifactsReader() self.label_counts = {} self.os_counts = {} self.path_count = 0 self.reg_key_count = 0 self.source_type_counts = {} self.total_count = 0 for artifact_definition in artifact_reader.ReadDirectory('data'): if hasattr(artifact_definition, 'labels'): for label in artifact_definition.labels: self.label_counts[label] = self.label_counts.get(label, 0) + 1 for source in artifact_definition.sources: self.total_count += 1 source_type = source.type_indicator self.source_type_counts[source_type] = self.source_type_counts.get( source_type, 0) + 1 if source_type == definitions.TYPE_INDICATOR_WINDOWS_REGISTRY_KEY: self.reg_key_count += len(source.keys) elif source_type == definitions.TYPE_INDICATOR_WINDOWS_REGISTRY_VALUE: self.reg_key_count += len(source.key_value_pairs) elif source_type in (definitions.TYPE_INDICATOR_FILE, definitions.TYPE_INDICATOR_DIRECTORY): self.path_count += len(source.paths) os_list = source.supported_os for os_str in os_list: self.os_counts[os_str] = self.os_counts.get(os_str, 0) + 1
[ "def", "BuildStats", "(", "self", ")", ":", "artifact_reader", "=", "reader", ".", "YamlArtifactsReader", "(", ")", "self", ".", "label_counts", "=", "{", "}", "self", ".", "os_counts", "=", "{", "}", "self", ".", "path_count", "=", "0", "self", ".", "...
Builds the statistics.
[ "Builds", "the", "statistics", "." ]
044a63bfb4448af33d085c69066c80f9505ae7ca
https://github.com/ForensicArtifacts/artifacts/blob/044a63bfb4448af33d085c69066c80f9505ae7ca/tools/stats.py#L81-L112
226,377
ForensicArtifacts/artifacts
tools/stats.py
ArtifactStatistics.PrintStats
def PrintStats(self): """Build stats and print in MarkDown format.""" self.BuildStats() self.PrintSummaryTable() self.PrintSourceTypeTable() self.PrintOSTable() self.PrintLabelTable()
python
def PrintStats(self): self.BuildStats() self.PrintSummaryTable() self.PrintSourceTypeTable() self.PrintOSTable() self.PrintLabelTable()
[ "def", "PrintStats", "(", "self", ")", ":", "self", ".", "BuildStats", "(", ")", "self", ".", "PrintSummaryTable", "(", ")", "self", ".", "PrintSourceTypeTable", "(", ")", "self", ".", "PrintOSTable", "(", ")", "self", ".", "PrintLabelTable", "(", ")" ]
Build stats and print in MarkDown format.
[ "Build", "stats", "and", "print", "in", "MarkDown", "format", "." ]
044a63bfb4448af33d085c69066c80f9505ae7ca
https://github.com/ForensicArtifacts/artifacts/blob/044a63bfb4448af33d085c69066c80f9505ae7ca/tools/stats.py#L114-L120
226,378
ForensicArtifacts/artifacts
artifacts/source_type.py
WindowsRegistryKeySourceType.ValidateKey
def ValidateKey(cls, key_path): """Validates this key against supported key names. Args: key_path (str): path of a Windows Registry key. Raises: FormatError: when key is not supported. """ for prefix in cls.VALID_PREFIXES: if key_path.startswith(prefix): return # TODO: move check to validator. if key_path.startswith('HKEY_CURRENT_USER\\'): raise errors.FormatError( 'HKEY_CURRENT_USER\\ is not supported instead use: ' 'HKEY_USERS\\%%users.sid%%\\') raise errors.FormatError( 'Unupported Registry key path: {0:s}'.format(key_path))
python
def ValidateKey(cls, key_path): for prefix in cls.VALID_PREFIXES: if key_path.startswith(prefix): return # TODO: move check to validator. if key_path.startswith('HKEY_CURRENT_USER\\'): raise errors.FormatError( 'HKEY_CURRENT_USER\\ is not supported instead use: ' 'HKEY_USERS\\%%users.sid%%\\') raise errors.FormatError( 'Unupported Registry key path: {0:s}'.format(key_path))
[ "def", "ValidateKey", "(", "cls", ",", "key_path", ")", ":", "for", "prefix", "in", "cls", ".", "VALID_PREFIXES", ":", "if", "key_path", ".", "startswith", "(", "prefix", ")", ":", "return", "# TODO: move check to validator.", "if", "key_path", ".", "startswit...
Validates this key against supported key names. Args: key_path (str): path of a Windows Registry key. Raises: FormatError: when key is not supported.
[ "Validates", "this", "key", "against", "supported", "key", "names", "." ]
044a63bfb4448af33d085c69066c80f9505ae7ca
https://github.com/ForensicArtifacts/artifacts/blob/044a63bfb4448af33d085c69066c80f9505ae7ca/artifacts/source_type.py#L272-L292
226,379
ForensicArtifacts/artifacts
artifacts/source_type.py
SourceTypeFactory.CreateSourceType
def CreateSourceType(cls, type_indicator, attributes): """Creates a source type. Args: type_indicator (str): source type indicator. attributes (dict[str, object]): source type attributes. Returns: SourceType: a source type. Raises: FormatError: if the type indicator is not set or unsupported, or if required attributes are missing. """ if type_indicator not in cls._source_type_classes: raise errors.FormatError( 'Unsupported type indicator: {0:s}.'.format(type_indicator)) return cls._source_type_classes[type_indicator](**attributes)
python
def CreateSourceType(cls, type_indicator, attributes): if type_indicator not in cls._source_type_classes: raise errors.FormatError( 'Unsupported type indicator: {0:s}.'.format(type_indicator)) return cls._source_type_classes[type_indicator](**attributes)
[ "def", "CreateSourceType", "(", "cls", ",", "type_indicator", ",", "attributes", ")", ":", "if", "type_indicator", "not", "in", "cls", ".", "_source_type_classes", ":", "raise", "errors", ".", "FormatError", "(", "'Unsupported type indicator: {0:s}.'", ".", "format"...
Creates a source type. Args: type_indicator (str): source type indicator. attributes (dict[str, object]): source type attributes. Returns: SourceType: a source type. Raises: FormatError: if the type indicator is not set or unsupported, or if required attributes are missing.
[ "Creates", "a", "source", "type", "." ]
044a63bfb4448af33d085c69066c80f9505ae7ca
https://github.com/ForensicArtifacts/artifacts/blob/044a63bfb4448af33d085c69066c80f9505ae7ca/artifacts/source_type.py#L401-L419
226,380
ForensicArtifacts/artifacts
artifacts/source_type.py
SourceTypeFactory.DeregisterSourceType
def DeregisterSourceType(cls, source_type_class): """Deregisters a source type. Source types are identified based on their type indicator. Args: source_type_class (type): source type. Raises: KeyError: if a source type is not set for the corresponding type indicator. """ if source_type_class.TYPE_INDICATOR not in cls._source_type_classes: raise KeyError( 'Source type not set for type: {0:s}.'.format( source_type_class.TYPE_INDICATOR)) del cls._source_type_classes[source_type_class.TYPE_INDICATOR]
python
def DeregisterSourceType(cls, source_type_class): if source_type_class.TYPE_INDICATOR not in cls._source_type_classes: raise KeyError( 'Source type not set for type: {0:s}.'.format( source_type_class.TYPE_INDICATOR)) del cls._source_type_classes[source_type_class.TYPE_INDICATOR]
[ "def", "DeregisterSourceType", "(", "cls", ",", "source_type_class", ")", ":", "if", "source_type_class", ".", "TYPE_INDICATOR", "not", "in", "cls", ".", "_source_type_classes", ":", "raise", "KeyError", "(", "'Source type not set for type: {0:s}.'", ".", "format", "(...
Deregisters a source type. Source types are identified based on their type indicator. Args: source_type_class (type): source type. Raises: KeyError: if a source type is not set for the corresponding type indicator.
[ "Deregisters", "a", "source", "type", "." ]
044a63bfb4448af33d085c69066c80f9505ae7ca
https://github.com/ForensicArtifacts/artifacts/blob/044a63bfb4448af33d085c69066c80f9505ae7ca/artifacts/source_type.py#L422-L439
226,381
ForensicArtifacts/artifacts
artifacts/source_type.py
SourceTypeFactory.RegisterSourceType
def RegisterSourceType(cls, source_type_class): """Registers a source type. Source types are identified based on their type indicator. Args: source_type_class (type): source type. Raises: KeyError: if source types is already set for the corresponding type indicator. """ if source_type_class.TYPE_INDICATOR in cls._source_type_classes: raise KeyError( 'Source type already set for type: {0:s}.'.format( source_type_class.TYPE_INDICATOR)) cls._source_type_classes[source_type_class.TYPE_INDICATOR] = ( source_type_class)
python
def RegisterSourceType(cls, source_type_class): if source_type_class.TYPE_INDICATOR in cls._source_type_classes: raise KeyError( 'Source type already set for type: {0:s}.'.format( source_type_class.TYPE_INDICATOR)) cls._source_type_classes[source_type_class.TYPE_INDICATOR] = ( source_type_class)
[ "def", "RegisterSourceType", "(", "cls", ",", "source_type_class", ")", ":", "if", "source_type_class", ".", "TYPE_INDICATOR", "in", "cls", ".", "_source_type_classes", ":", "raise", "KeyError", "(", "'Source type already set for type: {0:s}.'", ".", "format", "(", "s...
Registers a source type. Source types are identified based on their type indicator. Args: source_type_class (type): source type. Raises: KeyError: if source types is already set for the corresponding type indicator.
[ "Registers", "a", "source", "type", "." ]
044a63bfb4448af33d085c69066c80f9505ae7ca
https://github.com/ForensicArtifacts/artifacts/blob/044a63bfb4448af33d085c69066c80f9505ae7ca/artifacts/source_type.py#L460-L478
226,382
ForensicArtifacts/artifacts
tools/validator.py
ArtifactDefinitionsValidator._CheckMacOSPaths
def _CheckMacOSPaths(self, filename, artifact_definition, source, paths): """Checks if the paths are valid MacOS paths. Args: filename (str): name of the artifacts definition file. artifact_definition (ArtifactDefinition): artifact definition. source (SourceType): source definition. paths (list[str]): paths to validate. Returns: bool: True if the MacOS paths is valid. """ result = True paths_with_private = [] paths_with_symbolic_link_to_private = [] for path in paths: path_lower = path.lower() path_segments = path_lower.split(source.separator) if not path_segments: logging.warning(( 'Empty path defined by artifact definition: {0:s} in file: ' '{1:s}').format(artifact_definition.name, filename)) result = False elif len(path_segments) == 1: continue elif path_segments[1] in self._MACOS_PRIVATE_SUB_PATHS: paths_with_symbolic_link_to_private.append(path) elif path_segments[1] == 'private' and len(path_segments) >= 2: if path_segments[2] in self._MACOS_PRIVATE_SUB_PATHS: paths_with_private.append(path) else: logging.warning(( 'Unsupported private path: {0:s} defined by artifact definition: ' '{1:s} in file: {2:s}').format( path, artifact_definition.name, filename)) result = False for private_path in paths_with_private: if private_path[8:] not in paths_with_symbolic_link_to_private: logging.warning(( 'Missing symbolic link: {0:s} for path: {1:s} defined by artifact ' 'definition: {2:s} in file: {3:s}').format( private_path[8:], private_path, artifact_definition.name, filename)) result = False for path in paths_with_symbolic_link_to_private: private_path = '/private{0:s}'.format(path) if private_path not in paths_with_private: logging.warning(( 'Missing path: {0:s} for symbolic link: {1:s} defined by artifact ' 'definition: {2:s} in file: {3:s}').format( private_path, path, artifact_definition.name, filename)) result = False return result
python
def _CheckMacOSPaths(self, filename, artifact_definition, source, paths): result = True paths_with_private = [] paths_with_symbolic_link_to_private = [] for path in paths: path_lower = path.lower() path_segments = path_lower.split(source.separator) if not path_segments: logging.warning(( 'Empty path defined by artifact definition: {0:s} in file: ' '{1:s}').format(artifact_definition.name, filename)) result = False elif len(path_segments) == 1: continue elif path_segments[1] in self._MACOS_PRIVATE_SUB_PATHS: paths_with_symbolic_link_to_private.append(path) elif path_segments[1] == 'private' and len(path_segments) >= 2: if path_segments[2] in self._MACOS_PRIVATE_SUB_PATHS: paths_with_private.append(path) else: logging.warning(( 'Unsupported private path: {0:s} defined by artifact definition: ' '{1:s} in file: {2:s}').format( path, artifact_definition.name, filename)) result = False for private_path in paths_with_private: if private_path[8:] not in paths_with_symbolic_link_to_private: logging.warning(( 'Missing symbolic link: {0:s} for path: {1:s} defined by artifact ' 'definition: {2:s} in file: {3:s}').format( private_path[8:], private_path, artifact_definition.name, filename)) result = False for path in paths_with_symbolic_link_to_private: private_path = '/private{0:s}'.format(path) if private_path not in paths_with_private: logging.warning(( 'Missing path: {0:s} for symbolic link: {1:s} defined by artifact ' 'definition: {2:s} in file: {3:s}').format( private_path, path, artifact_definition.name, filename)) result = False return result
[ "def", "_CheckMacOSPaths", "(", "self", ",", "filename", ",", "artifact_definition", ",", "source", ",", "paths", ")", ":", "result", "=", "True", "paths_with_private", "=", "[", "]", "paths_with_symbolic_link_to_private", "=", "[", "]", "for", "path", "in", "...
Checks if the paths are valid MacOS paths. Args: filename (str): name of the artifacts definition file. artifact_definition (ArtifactDefinition): artifact definition. source (SourceType): source definition. paths (list[str]): paths to validate. Returns: bool: True if the MacOS paths is valid.
[ "Checks", "if", "the", "paths", "are", "valid", "MacOS", "paths", "." ]
044a63bfb4448af33d085c69066c80f9505ae7ca
https://github.com/ForensicArtifacts/artifacts/blob/044a63bfb4448af33d085c69066c80f9505ae7ca/tools/validator.py#L52-L113
226,383
ForensicArtifacts/artifacts
tools/validator.py
ArtifactDefinitionsValidator._CheckWindowsRegistryKeyPath
def _CheckWindowsRegistryKeyPath( self, filename, artifact_definition, key_path): """Checks if a path is a valid Windows Registry key path. Args: filename (str): name of the artifacts definition file. artifact_definition (ArtifactDefinition): artifact definition. key_path (str): Windows Registry key path to validate. Returns: bool: True if the Windows Registry key path is valid. """ result = True key_path_segments = key_path.lower().split('\\') if key_path_segments[0] == '%%current_control_set%%': result = False logging.warning(( 'Artifact definition: {0:s} in file: {1:s} contains Windows ' 'Registry key path that starts with ' '%%CURRENT_CONTROL_SET%%. Replace %%CURRENT_CONTROL_SET%% with ' 'HKEY_LOCAL_MACHINE\\System\\CurrentControlSet').format( artifact_definition.name, filename)) for segment_index, key_path_segment in enumerate(key_path_segments): if key_path_segment.startswith('%%') and key_path_segment.endswith('%%'): if (segment_index == 1 and key_path_segment == '%%users.sid%%' and key_path_segments[0] == 'hkey_users'): continue if key_path_segment.startswith('%%environ_'): result = False logging.warning(( 'Artifact definition: {0:s} in file: {1:s} contains Windows ' 'Registry key path that contains an environment variable: ' '"{2:s}". Usage of environment variables in key paths is not ' 'encouraged at this time.').format( artifact_definition.name, filename, key_path_segment)) elif key_path_segment.startswith('%%users.'): result = False logging.warning(( 'Artifact definition: {0:s} in file: {1:s} contains Windows ' 'Registry key path that contains a users variable: "{2:s}". ' 'Usage of users variables in key paths, except for ' '"HKEY_USERS\\%%users.sid%%", is not encouraged at this ' 'time.').format( artifact_definition.name, filename, key_path_segment)) return result
python
def _CheckWindowsRegistryKeyPath( self, filename, artifact_definition, key_path): result = True key_path_segments = key_path.lower().split('\\') if key_path_segments[0] == '%%current_control_set%%': result = False logging.warning(( 'Artifact definition: {0:s} in file: {1:s} contains Windows ' 'Registry key path that starts with ' '%%CURRENT_CONTROL_SET%%. Replace %%CURRENT_CONTROL_SET%% with ' 'HKEY_LOCAL_MACHINE\\System\\CurrentControlSet').format( artifact_definition.name, filename)) for segment_index, key_path_segment in enumerate(key_path_segments): if key_path_segment.startswith('%%') and key_path_segment.endswith('%%'): if (segment_index == 1 and key_path_segment == '%%users.sid%%' and key_path_segments[0] == 'hkey_users'): continue if key_path_segment.startswith('%%environ_'): result = False logging.warning(( 'Artifact definition: {0:s} in file: {1:s} contains Windows ' 'Registry key path that contains an environment variable: ' '"{2:s}". Usage of environment variables in key paths is not ' 'encouraged at this time.').format( artifact_definition.name, filename, key_path_segment)) elif key_path_segment.startswith('%%users.'): result = False logging.warning(( 'Artifact definition: {0:s} in file: {1:s} contains Windows ' 'Registry key path that contains a users variable: "{2:s}". ' 'Usage of users variables in key paths, except for ' '"HKEY_USERS\\%%users.sid%%", is not encouraged at this ' 'time.').format( artifact_definition.name, filename, key_path_segment)) return result
[ "def", "_CheckWindowsRegistryKeyPath", "(", "self", ",", "filename", ",", "artifact_definition", ",", "key_path", ")", ":", "result", "=", "True", "key_path_segments", "=", "key_path", ".", "lower", "(", ")", ".", "split", "(", "'\\\\'", ")", "if", "key_path_s...
Checks if a path is a valid Windows Registry key path. Args: filename (str): name of the artifacts definition file. artifact_definition (ArtifactDefinition): artifact definition. key_path (str): Windows Registry key path to validate. Returns: bool: True if the Windows Registry key path is valid.
[ "Checks", "if", "a", "path", "is", "a", "valid", "Windows", "Registry", "key", "path", "." ]
044a63bfb4448af33d085c69066c80f9505ae7ca
https://github.com/ForensicArtifacts/artifacts/blob/044a63bfb4448af33d085c69066c80f9505ae7ca/tools/validator.py#L222-L271
226,384
ForensicArtifacts/artifacts
tools/validator.py
ArtifactDefinitionsValidator._HasDuplicateRegistryKeyPaths
def _HasDuplicateRegistryKeyPaths( self, filename, artifact_definition, source): """Checks if Registry key paths are not already defined by other artifacts. Note that at the moment this function will only find exact duplicate Registry key paths. Args: filename (str): name of the artifacts definition file. artifact_definition (ArtifactDefinition): artifact definition. source (SourceType): source definition. Returns: bool: True if the Registry key paths defined by the source type are used in other artifacts. """ result = False intersection = self._artifact_registry_key_paths.intersection( set(source.keys)) if intersection: duplicate_key_paths = '\n'.join(intersection) logging.warning(( 'Artifact definition: {0:s} in file: {1:s} has duplicate ' 'Registry key paths:\n{2:s}').format( artifact_definition.name, filename, duplicate_key_paths)) result = True self._artifact_registry_key_paths.update(source.keys) return result
python
def _HasDuplicateRegistryKeyPaths( self, filename, artifact_definition, source): result = False intersection = self._artifact_registry_key_paths.intersection( set(source.keys)) if intersection: duplicate_key_paths = '\n'.join(intersection) logging.warning(( 'Artifact definition: {0:s} in file: {1:s} has duplicate ' 'Registry key paths:\n{2:s}').format( artifact_definition.name, filename, duplicate_key_paths)) result = True self._artifact_registry_key_paths.update(source.keys) return result
[ "def", "_HasDuplicateRegistryKeyPaths", "(", "self", ",", "filename", ",", "artifact_definition", ",", "source", ")", ":", "result", "=", "False", "intersection", "=", "self", ".", "_artifact_registry_key_paths", ".", "intersection", "(", "set", "(", "source", "."...
Checks if Registry key paths are not already defined by other artifacts. Note that at the moment this function will only find exact duplicate Registry key paths. Args: filename (str): name of the artifacts definition file. artifact_definition (ArtifactDefinition): artifact definition. source (SourceType): source definition. Returns: bool: True if the Registry key paths defined by the source type are used in other artifacts.
[ "Checks", "if", "Registry", "key", "paths", "are", "not", "already", "defined", "by", "other", "artifacts", "." ]
044a63bfb4448af33d085c69066c80f9505ae7ca
https://github.com/ForensicArtifacts/artifacts/blob/044a63bfb4448af33d085c69066c80f9505ae7ca/tools/validator.py#L273-L301
226,385
ForensicArtifacts/artifacts
tools/validator.py
ArtifactDefinitionsValidator.CheckFile
def CheckFile(self, filename): """Validates the artifacts definition in a specific file. Args: filename (str): name of the artifacts definition file. Returns: bool: True if the file contains valid artifacts definitions. """ result = True artifact_reader = reader.YamlArtifactsReader() try: for artifact_definition in artifact_reader.ReadFile(filename): try: self._artifact_registry.RegisterDefinition(artifact_definition) except KeyError: logging.warning( 'Duplicate artifact definition: {0:s} in file: {1:s}'.format( artifact_definition.name, filename)) result = False artifact_definition_supports_macos = ( definitions.SUPPORTED_OS_DARWIN in ( artifact_definition.supported_os)) artifact_definition_supports_windows = ( definitions.SUPPORTED_OS_WINDOWS in ( artifact_definition.supported_os)) for source in artifact_definition.sources: if source.type_indicator in ( definitions.TYPE_INDICATOR_FILE, definitions.TYPE_INDICATOR_PATH): if (definitions.SUPPORTED_OS_DARWIN in source.supported_os or ( artifact_definition_supports_macos and not source.supported_os)): if not self._CheckMacOSPaths( filename, artifact_definition, source, source.paths): result = False elif (artifact_definition_supports_windows or definitions.SUPPORTED_OS_WINDOWS in source.supported_os): for path in source.paths: if not self._CheckWindowsPath( filename, artifact_definition, source, path): result = False elif source.type_indicator == ( definitions.TYPE_INDICATOR_WINDOWS_REGISTRY_KEY): # Exempt the legacy file from duplicate checking because it has # duplicates intentionally. if (filename != self.LEGACY_PATH and self._HasDuplicateRegistryKeyPaths( filename, artifact_definition, source)): result = False for key_path in source.keys: if not self._CheckWindowsRegistryKeyPath( filename, artifact_definition, key_path): result = False elif source.type_indicator == ( definitions.TYPE_INDICATOR_WINDOWS_REGISTRY_VALUE): for key_value_pair in source.key_value_pairs: if not self._CheckWindowsRegistryKeyPath( filename, artifact_definition, key_value_pair['key']): result = False except errors.FormatError as exception: logging.warning( 'Unable to validate file: {0:s} with error: {1!s}'.format( filename, exception)) result = False return result
python
def CheckFile(self, filename): result = True artifact_reader = reader.YamlArtifactsReader() try: for artifact_definition in artifact_reader.ReadFile(filename): try: self._artifact_registry.RegisterDefinition(artifact_definition) except KeyError: logging.warning( 'Duplicate artifact definition: {0:s} in file: {1:s}'.format( artifact_definition.name, filename)) result = False artifact_definition_supports_macos = ( definitions.SUPPORTED_OS_DARWIN in ( artifact_definition.supported_os)) artifact_definition_supports_windows = ( definitions.SUPPORTED_OS_WINDOWS in ( artifact_definition.supported_os)) for source in artifact_definition.sources: if source.type_indicator in ( definitions.TYPE_INDICATOR_FILE, definitions.TYPE_INDICATOR_PATH): if (definitions.SUPPORTED_OS_DARWIN in source.supported_os or ( artifact_definition_supports_macos and not source.supported_os)): if not self._CheckMacOSPaths( filename, artifact_definition, source, source.paths): result = False elif (artifact_definition_supports_windows or definitions.SUPPORTED_OS_WINDOWS in source.supported_os): for path in source.paths: if not self._CheckWindowsPath( filename, artifact_definition, source, path): result = False elif source.type_indicator == ( definitions.TYPE_INDICATOR_WINDOWS_REGISTRY_KEY): # Exempt the legacy file from duplicate checking because it has # duplicates intentionally. if (filename != self.LEGACY_PATH and self._HasDuplicateRegistryKeyPaths( filename, artifact_definition, source)): result = False for key_path in source.keys: if not self._CheckWindowsRegistryKeyPath( filename, artifact_definition, key_path): result = False elif source.type_indicator == ( definitions.TYPE_INDICATOR_WINDOWS_REGISTRY_VALUE): for key_value_pair in source.key_value_pairs: if not self._CheckWindowsRegistryKeyPath( filename, artifact_definition, key_value_pair['key']): result = False except errors.FormatError as exception: logging.warning( 'Unable to validate file: {0:s} with error: {1!s}'.format( filename, exception)) result = False return result
[ "def", "CheckFile", "(", "self", ",", "filename", ")", ":", "result", "=", "True", "artifact_reader", "=", "reader", ".", "YamlArtifactsReader", "(", ")", "try", ":", "for", "artifact_definition", "in", "artifact_reader", ".", "ReadFile", "(", "filename", ")",...
Validates the artifacts definition in a specific file. Args: filename (str): name of the artifacts definition file. Returns: bool: True if the file contains valid artifacts definitions.
[ "Validates", "the", "artifacts", "definition", "in", "a", "specific", "file", "." ]
044a63bfb4448af33d085c69066c80f9505ae7ca
https://github.com/ForensicArtifacts/artifacts/blob/044a63bfb4448af33d085c69066c80f9505ae7ca/tools/validator.py#L303-L379
226,386
ForensicArtifacts/artifacts
artifacts/registry.py
ArtifactDefinitionsRegistry.DeregisterDefinition
def DeregisterDefinition(self, artifact_definition): """Deregisters an artifact definition. Artifact definitions are identified based on their lower case name. Args: artifact_definition (ArtifactDefinition): an artifact definition. Raises: KeyError: if an artifact definition is not set for the corresponding name. """ artifact_definition_name = artifact_definition.name.lower() if artifact_definition_name not in self._artifact_definitions: raise KeyError( 'Artifact definition not set for name: {0:s}.'.format( artifact_definition.name)) del self._artifact_definitions[artifact_definition_name]
python
def DeregisterDefinition(self, artifact_definition): artifact_definition_name = artifact_definition.name.lower() if artifact_definition_name not in self._artifact_definitions: raise KeyError( 'Artifact definition not set for name: {0:s}.'.format( artifact_definition.name)) del self._artifact_definitions[artifact_definition_name]
[ "def", "DeregisterDefinition", "(", "self", ",", "artifact_definition", ")", ":", "artifact_definition_name", "=", "artifact_definition", ".", "name", ".", "lower", "(", ")", "if", "artifact_definition_name", "not", "in", "self", ".", "_artifact_definitions", ":", "...
Deregisters an artifact definition. Artifact definitions are identified based on their lower case name. Args: artifact_definition (ArtifactDefinition): an artifact definition. Raises: KeyError: if an artifact definition is not set for the corresponding name.
[ "Deregisters", "an", "artifact", "definition", "." ]
044a63bfb4448af33d085c69066c80f9505ae7ca
https://github.com/ForensicArtifacts/artifacts/blob/044a63bfb4448af33d085c69066c80f9505ae7ca/artifacts/registry.py#L56-L73
226,387
ForensicArtifacts/artifacts
artifacts/registry.py
ArtifactDefinitionsRegistry.RegisterDefinition
def RegisterDefinition(self, artifact_definition): """Registers an artifact definition. Artifact definitions are identified based on their lower case name. Args: artifact_definition (ArtifactDefinition): an artifact definition. Raises: KeyError: if artifact definition is already set for the corresponding name. """ artifact_definition_name = artifact_definition.name.lower() if artifact_definition_name in self._artifact_definitions: raise KeyError( 'Artifact definition already set for name: {0:s}.'.format( artifact_definition.name)) self._artifact_definitions[artifact_definition_name] = artifact_definition self._defined_artifact_names.add(artifact_definition.name) for source in artifact_definition.sources: if source.type_indicator == definitions.TYPE_INDICATOR_ARTIFACT_GROUP: self._artifact_name_references.update(source.names)
python
def RegisterDefinition(self, artifact_definition): artifact_definition_name = artifact_definition.name.lower() if artifact_definition_name in self._artifact_definitions: raise KeyError( 'Artifact definition already set for name: {0:s}.'.format( artifact_definition.name)) self._artifact_definitions[artifact_definition_name] = artifact_definition self._defined_artifact_names.add(artifact_definition.name) for source in artifact_definition.sources: if source.type_indicator == definitions.TYPE_INDICATOR_ARTIFACT_GROUP: self._artifact_name_references.update(source.names)
[ "def", "RegisterDefinition", "(", "self", ",", "artifact_definition", ")", ":", "artifact_definition_name", "=", "artifact_definition", ".", "name", ".", "lower", "(", ")", "if", "artifact_definition_name", "in", "self", ".", "_artifact_definitions", ":", "raise", "...
Registers an artifact definition. Artifact definitions are identified based on their lower case name. Args: artifact_definition (ArtifactDefinition): an artifact definition. Raises: KeyError: if artifact definition is already set for the corresponding name.
[ "Registers", "an", "artifact", "definition", "." ]
044a63bfb4448af33d085c69066c80f9505ae7ca
https://github.com/ForensicArtifacts/artifacts/blob/044a63bfb4448af33d085c69066c80f9505ae7ca/artifacts/registry.py#L125-L148
226,388
ForensicArtifacts/artifacts
artifacts/registry.py
ArtifactDefinitionsRegistry.ReadFromDirectory
def ReadFromDirectory(self, artifacts_reader, path, extension='yaml'): """Reads artifact definitions into the registry from files in a directory. This function does not recurse sub directories. Args: artifacts_reader (ArtifactsReader): an artifacts reader. path (str): path of the directory to read from. extension (Optional[str]): extension of the filenames to read. Raises: KeyError: if a duplicate artifact definition is encountered. """ for artifact_definition in artifacts_reader.ReadDirectory( path, extension=extension): self.RegisterDefinition(artifact_definition)
python
def ReadFromDirectory(self, artifacts_reader, path, extension='yaml'): for artifact_definition in artifacts_reader.ReadDirectory( path, extension=extension): self.RegisterDefinition(artifact_definition)
[ "def", "ReadFromDirectory", "(", "self", ",", "artifacts_reader", ",", "path", ",", "extension", "=", "'yaml'", ")", ":", "for", "artifact_definition", "in", "artifacts_reader", ".", "ReadDirectory", "(", "path", ",", "extension", "=", "extension", ")", ":", "...
Reads artifact definitions into the registry from files in a directory. This function does not recurse sub directories. Args: artifacts_reader (ArtifactsReader): an artifacts reader. path (str): path of the directory to read from. extension (Optional[str]): extension of the filenames to read. Raises: KeyError: if a duplicate artifact definition is encountered.
[ "Reads", "artifact", "definitions", "into", "the", "registry", "from", "files", "in", "a", "directory", "." ]
044a63bfb4448af33d085c69066c80f9505ae7ca
https://github.com/ForensicArtifacts/artifacts/blob/044a63bfb4448af33d085c69066c80f9505ae7ca/artifacts/registry.py#L183-L198
226,389
ForensicArtifacts/artifacts
artifacts/registry.py
ArtifactDefinitionsRegistry.ReadFromFile
def ReadFromFile(self, artifacts_reader, filename): """Reads artifact definitions into the registry from a file. Args: artifacts_reader (ArtifactsReader): an artifacts reader. filename (str): name of the file to read from. """ for artifact_definition in artifacts_reader.ReadFile(filename): self.RegisterDefinition(artifact_definition)
python
def ReadFromFile(self, artifacts_reader, filename): for artifact_definition in artifacts_reader.ReadFile(filename): self.RegisterDefinition(artifact_definition)
[ "def", "ReadFromFile", "(", "self", ",", "artifacts_reader", ",", "filename", ")", ":", "for", "artifact_definition", "in", "artifacts_reader", ".", "ReadFile", "(", "filename", ")", ":", "self", ".", "RegisterDefinition", "(", "artifact_definition", ")" ]
Reads artifact definitions into the registry from a file. Args: artifacts_reader (ArtifactsReader): an artifacts reader. filename (str): name of the file to read from.
[ "Reads", "artifact", "definitions", "into", "the", "registry", "from", "a", "file", "." ]
044a63bfb4448af33d085c69066c80f9505ae7ca
https://github.com/ForensicArtifacts/artifacts/blob/044a63bfb4448af33d085c69066c80f9505ae7ca/artifacts/registry.py#L200-L208
226,390
ForensicArtifacts/artifacts
artifacts/registry.py
ArtifactDefinitionsRegistry.ReadFileObject
def ReadFileObject(self, artifacts_reader, file_object): """Reads artifact definitions into the registry from a file-like object. Args: artifacts_reader (ArtifactsReader): an artifacts reader. file_object (file): file-like object to read from. """ for artifact_definition in artifacts_reader.ReadFileObject(file_object): self.RegisterDefinition(artifact_definition)
python
def ReadFileObject(self, artifacts_reader, file_object): for artifact_definition in artifacts_reader.ReadFileObject(file_object): self.RegisterDefinition(artifact_definition)
[ "def", "ReadFileObject", "(", "self", ",", "artifacts_reader", ",", "file_object", ")", ":", "for", "artifact_definition", "in", "artifacts_reader", ".", "ReadFileObject", "(", "file_object", ")", ":", "self", ".", "RegisterDefinition", "(", "artifact_definition", "...
Reads artifact definitions into the registry from a file-like object. Args: artifacts_reader (ArtifactsReader): an artifacts reader. file_object (file): file-like object to read from.
[ "Reads", "artifact", "definitions", "into", "the", "registry", "from", "a", "file", "-", "like", "object", "." ]
044a63bfb4448af33d085c69066c80f9505ae7ca
https://github.com/ForensicArtifacts/artifacts/blob/044a63bfb4448af33d085c69066c80f9505ae7ca/artifacts/registry.py#L210-L218
226,391
ForensicArtifacts/artifacts
utils/dependencies.py
DependencyDefinitionReader._GetConfigValue
def _GetConfigValue(self, config_parser, section_name, value_name): """Retrieves a value from the config parser. Args: config_parser (ConfigParser): configuration parser. section_name (str): name of the section that contains the value. value_name (str): name of the value. Returns: object: configuration value or None if the value does not exists. """ try: return config_parser.get(section_name, value_name) except configparser.NoOptionError: return None
python
def _GetConfigValue(self, config_parser, section_name, value_name): try: return config_parser.get(section_name, value_name) except configparser.NoOptionError: return None
[ "def", "_GetConfigValue", "(", "self", ",", "config_parser", ",", "section_name", ",", "value_name", ")", ":", "try", ":", "return", "config_parser", ".", "get", "(", "section_name", ",", "value_name", ")", "except", "configparser", ".", "NoOptionError", ":", ...
Retrieves a value from the config parser. Args: config_parser (ConfigParser): configuration parser. section_name (str): name of the section that contains the value. value_name (str): name of the value. Returns: object: configuration value or None if the value does not exists.
[ "Retrieves", "a", "value", "from", "the", "config", "parser", "." ]
044a63bfb4448af33d085c69066c80f9505ae7ca
https://github.com/ForensicArtifacts/artifacts/blob/044a63bfb4448af33d085c69066c80f9505ae7ca/utils/dependencies.py#L72-L86
226,392
ForensicArtifacts/artifacts
utils/dependencies.py
DependencyDefinitionReader.Read
def Read(self, file_object): """Reads dependency definitions. Args: file_object (file): file-like object to read from. Yields: DependencyDefinition: dependency definition. """ config_parser = configparser.RawConfigParser() # pylint: disable=deprecated-method # TODO: replace readfp by read_file, check if Python 2 compatible config_parser.readfp(file_object) for section_name in config_parser.sections(): dependency_definition = DependencyDefinition(section_name) for value_name in self._VALUE_NAMES: value = self._GetConfigValue(config_parser, section_name, value_name) setattr(dependency_definition, value_name, value) yield dependency_definition
python
def Read(self, file_object): config_parser = configparser.RawConfigParser() # pylint: disable=deprecated-method # TODO: replace readfp by read_file, check if Python 2 compatible config_parser.readfp(file_object) for section_name in config_parser.sections(): dependency_definition = DependencyDefinition(section_name) for value_name in self._VALUE_NAMES: value = self._GetConfigValue(config_parser, section_name, value_name) setattr(dependency_definition, value_name, value) yield dependency_definition
[ "def", "Read", "(", "self", ",", "file_object", ")", ":", "config_parser", "=", "configparser", ".", "RawConfigParser", "(", ")", "# pylint: disable=deprecated-method", "# TODO: replace readfp by read_file, check if Python 2 compatible", "config_parser", ".", "readfp", "(", ...
Reads dependency definitions. Args: file_object (file): file-like object to read from. Yields: DependencyDefinition: dependency definition.
[ "Reads", "dependency", "definitions", "." ]
044a63bfb4448af33d085c69066c80f9505ae7ca
https://github.com/ForensicArtifacts/artifacts/blob/044a63bfb4448af33d085c69066c80f9505ae7ca/utils/dependencies.py#L88-L108
226,393
ForensicArtifacts/artifacts
utils/dependencies.py
DependencyHelper._CheckPythonModuleVersion
def _CheckPythonModuleVersion( self, module_name, module_object, version_property, minimum_version, maximum_version): """Checks the version of a Python module. Args: module_object (module): Python module. module_name (str): name of the Python module. version_property (str): version attribute or function. minimum_version (str): minimum version. maximum_version (str): maximum version. Returns: tuple: consists: bool: True if the Python module is available and conforms to the minimum required version, False otherwise. str: status message. """ module_version = None if not version_property.endswith('()'): module_version = getattr(module_object, version_property, None) else: version_method = getattr( module_object, version_property[:-2], None) if version_method: module_version = version_method() if not module_version: status_message = ( 'unable to determine version information for: {0:s}').format( module_name) return False, status_message # Make sure the module version is a string. module_version = '{0!s}'.format(module_version) # Split the version string and convert every digit into an integer. # A string compare of both version strings will yield an incorrect result. # Strip any semantic suffixes such as a1, b1, pre, post, rc, dev. module_version = self._VERSION_NUMBERS_REGEX.findall(module_version)[0] if module_version[-1] == '.': module_version = module_version[:-1] try: module_version_map = list( map(int, self._VERSION_SPLIT_REGEX.split(module_version))) except ValueError: status_message = 'unable to parse module version: {0:s} {1:s}'.format( module_name, module_version) return False, status_message if minimum_version: try: minimum_version_map = list( map(int, self._VERSION_SPLIT_REGEX.split(minimum_version))) except ValueError: status_message = 'unable to parse minimum version: {0:s} {1:s}'.format( module_name, minimum_version) return False, status_message if module_version_map < minimum_version_map: status_message = ( '{0:s} version: {1!s} is too old, {2!s} or later required').format( module_name, module_version, minimum_version) return False, status_message if maximum_version: try: maximum_version_map = list( map(int, self._VERSION_SPLIT_REGEX.split(maximum_version))) except ValueError: status_message = 'unable to parse maximum version: {0:s} {1:s}'.format( module_name, maximum_version) return False, status_message if module_version_map > maximum_version_map: status_message = ( '{0:s} version: {1!s} is too recent, {2!s} or earlier ' 'required').format(module_name, module_version, maximum_version) return False, status_message status_message = '{0:s} version: {1!s}'.format(module_name, module_version) return True, status_message
python
def _CheckPythonModuleVersion( self, module_name, module_object, version_property, minimum_version, maximum_version): module_version = None if not version_property.endswith('()'): module_version = getattr(module_object, version_property, None) else: version_method = getattr( module_object, version_property[:-2], None) if version_method: module_version = version_method() if not module_version: status_message = ( 'unable to determine version information for: {0:s}').format( module_name) return False, status_message # Make sure the module version is a string. module_version = '{0!s}'.format(module_version) # Split the version string and convert every digit into an integer. # A string compare of both version strings will yield an incorrect result. # Strip any semantic suffixes such as a1, b1, pre, post, rc, dev. module_version = self._VERSION_NUMBERS_REGEX.findall(module_version)[0] if module_version[-1] == '.': module_version = module_version[:-1] try: module_version_map = list( map(int, self._VERSION_SPLIT_REGEX.split(module_version))) except ValueError: status_message = 'unable to parse module version: {0:s} {1:s}'.format( module_name, module_version) return False, status_message if minimum_version: try: minimum_version_map = list( map(int, self._VERSION_SPLIT_REGEX.split(minimum_version))) except ValueError: status_message = 'unable to parse minimum version: {0:s} {1:s}'.format( module_name, minimum_version) return False, status_message if module_version_map < minimum_version_map: status_message = ( '{0:s} version: {1!s} is too old, {2!s} or later required').format( module_name, module_version, minimum_version) return False, status_message if maximum_version: try: maximum_version_map = list( map(int, self._VERSION_SPLIT_REGEX.split(maximum_version))) except ValueError: status_message = 'unable to parse maximum version: {0:s} {1:s}'.format( module_name, maximum_version) return False, status_message if module_version_map > maximum_version_map: status_message = ( '{0:s} version: {1!s} is too recent, {2!s} or earlier ' 'required').format(module_name, module_version, maximum_version) return False, status_message status_message = '{0:s} version: {1!s}'.format(module_name, module_version) return True, status_message
[ "def", "_CheckPythonModuleVersion", "(", "self", ",", "module_name", ",", "module_object", ",", "version_property", ",", "minimum_version", ",", "maximum_version", ")", ":", "module_version", "=", "None", "if", "not", "version_property", ".", "endswith", "(", "'()'"...
Checks the version of a Python module. Args: module_object (module): Python module. module_name (str): name of the Python module. version_property (str): version attribute or function. minimum_version (str): minimum version. maximum_version (str): maximum version. Returns: tuple: consists: bool: True if the Python module is available and conforms to the minimum required version, False otherwise. str: status message.
[ "Checks", "the", "version", "of", "a", "Python", "module", "." ]
044a63bfb4448af33d085c69066c80f9505ae7ca
https://github.com/ForensicArtifacts/artifacts/blob/044a63bfb4448af33d085c69066c80f9505ae7ca/utils/dependencies.py#L168-L253
226,394
ForensicArtifacts/artifacts
utils/dependencies.py
DependencyHelper._PrintCheckDependencyStatus
def _PrintCheckDependencyStatus( self, dependency, result, status_message, verbose_output=True): """Prints the check dependency status. Args: dependency (DependencyDefinition): dependency definition. result (bool): True if the Python module is available and conforms to the minimum required version, False otherwise. status_message (str): status message. verbose_output (Optional[bool]): True if output should be verbose. """ if not result or dependency.is_optional: if dependency.is_optional: status_indicator = '[OPTIONAL]' else: status_indicator = '[FAILURE]' print('{0:s}\t{1:s}'.format(status_indicator, status_message)) elif verbose_output: print('[OK]\t\t{0:s}'.format(status_message))
python
def _PrintCheckDependencyStatus( self, dependency, result, status_message, verbose_output=True): if not result or dependency.is_optional: if dependency.is_optional: status_indicator = '[OPTIONAL]' else: status_indicator = '[FAILURE]' print('{0:s}\t{1:s}'.format(status_indicator, status_message)) elif verbose_output: print('[OK]\t\t{0:s}'.format(status_message))
[ "def", "_PrintCheckDependencyStatus", "(", "self", ",", "dependency", ",", "result", ",", "status_message", ",", "verbose_output", "=", "True", ")", ":", "if", "not", "result", "or", "dependency", ".", "is_optional", ":", "if", "dependency", ".", "is_optional", ...
Prints the check dependency status. Args: dependency (DependencyDefinition): dependency definition. result (bool): True if the Python module is available and conforms to the minimum required version, False otherwise. status_message (str): status message. verbose_output (Optional[bool]): True if output should be verbose.
[ "Prints", "the", "check", "dependency", "status", "." ]
044a63bfb4448af33d085c69066c80f9505ae7ca
https://github.com/ForensicArtifacts/artifacts/blob/044a63bfb4448af33d085c69066c80f9505ae7ca/utils/dependencies.py#L306-L326
226,395
ForensicArtifacts/artifacts
artifacts/reader.py
ArtifactsReader._ReadLabels
def _ReadLabels(self, artifact_definition_values, artifact_definition, name): """Reads the optional artifact definition labels. Args: artifact_definition_values (dict[str, object]): artifact definition values. artifact_definition (ArtifactDefinition): an artifact definition. name (str): name of the artifact definition. Raises: FormatError: if there are undefined labels. """ labels = artifact_definition_values.get('labels', []) undefined_labels = set(labels).difference(self.labels) if undefined_labels: raise errors.FormatError( 'Artifact definition: {0:s} found undefined labels: {1:s}.'.format( name, ', '.join(undefined_labels))) artifact_definition.labels = labels
python
def _ReadLabels(self, artifact_definition_values, artifact_definition, name): labels = artifact_definition_values.get('labels', []) undefined_labels = set(labels).difference(self.labels) if undefined_labels: raise errors.FormatError( 'Artifact definition: {0:s} found undefined labels: {1:s}.'.format( name, ', '.join(undefined_labels))) artifact_definition.labels = labels
[ "def", "_ReadLabels", "(", "self", ",", "artifact_definition_values", ",", "artifact_definition", ",", "name", ")", ":", "labels", "=", "artifact_definition_values", ".", "get", "(", "'labels'", ",", "[", "]", ")", "undefined_labels", "=", "set", "(", "labels", ...
Reads the optional artifact definition labels. Args: artifact_definition_values (dict[str, object]): artifact definition values. artifact_definition (ArtifactDefinition): an artifact definition. name (str): name of the artifact definition. Raises: FormatError: if there are undefined labels.
[ "Reads", "the", "optional", "artifact", "definition", "labels", "." ]
044a63bfb4448af33d085c69066c80f9505ae7ca
https://github.com/ForensicArtifacts/artifacts/blob/044a63bfb4448af33d085c69066c80f9505ae7ca/artifacts/reader.py#L106-L126
226,396
ForensicArtifacts/artifacts
artifacts/reader.py
ArtifactsReader._ReadSupportedOS
def _ReadSupportedOS(self, definition_values, definition_object, name): """Reads the optional artifact or source type supported OS. Args: definition_values (dict[str, object]): artifact definition values. definition_object (ArtifactDefinition|SourceType): the definition object. name (str): name of the artifact definition. Raises: FormatError: if there are undefined supported operating systems. """ supported_os = definition_values.get('supported_os', []) if not isinstance(supported_os, list): raise errors.FormatError( 'Invalid supported_os type: {0!s}'.format(type(supported_os))) undefined_supported_os = set(supported_os).difference(self.supported_os) if undefined_supported_os: error_string = ( 'Artifact definition: {0:s} undefined supported operating system: ' '{1:s}.').format(name, ', '.join(undefined_supported_os)) raise errors.FormatError(error_string) definition_object.supported_os = supported_os
python
def _ReadSupportedOS(self, definition_values, definition_object, name): supported_os = definition_values.get('supported_os', []) if not isinstance(supported_os, list): raise errors.FormatError( 'Invalid supported_os type: {0!s}'.format(type(supported_os))) undefined_supported_os = set(supported_os).difference(self.supported_os) if undefined_supported_os: error_string = ( 'Artifact definition: {0:s} undefined supported operating system: ' '{1:s}.').format(name, ', '.join(undefined_supported_os)) raise errors.FormatError(error_string) definition_object.supported_os = supported_os
[ "def", "_ReadSupportedOS", "(", "self", ",", "definition_values", ",", "definition_object", ",", "name", ")", ":", "supported_os", "=", "definition_values", ".", "get", "(", "'supported_os'", ",", "[", "]", ")", "if", "not", "isinstance", "(", "supported_os", ...
Reads the optional artifact or source type supported OS. Args: definition_values (dict[str, object]): artifact definition values. definition_object (ArtifactDefinition|SourceType): the definition object. name (str): name of the artifact definition. Raises: FormatError: if there are undefined supported operating systems.
[ "Reads", "the", "optional", "artifact", "or", "source", "type", "supported", "OS", "." ]
044a63bfb4448af33d085c69066c80f9505ae7ca
https://github.com/ForensicArtifacts/artifacts/blob/044a63bfb4448af33d085c69066c80f9505ae7ca/artifacts/reader.py#L131-L154
226,397
ForensicArtifacts/artifacts
artifacts/reader.py
ArtifactsReader._ReadSources
def _ReadSources(self, artifact_definition_values, artifact_definition, name): """Reads the artifact definition sources. Args: artifact_definition_values (dict[str, object]): artifact definition values. artifact_definition (ArtifactDefinition): an artifact definition. name (str): name of the artifact definition. Raises: FormatError: if the type indicator is not set or unsupported, or if required attributes are missing. """ sources = artifact_definition_values.get('sources') if not sources: raise errors.FormatError( 'Invalid artifact definition: {0:s} missing sources.'.format(name)) for source in sources: type_indicator = source.get('type', None) if not type_indicator: raise errors.FormatError( 'Invalid artifact definition: {0:s} source type.'.format(name)) attributes = source.get('attributes', None) try: source_type = artifact_definition.AppendSource( type_indicator, attributes) except errors.FormatError as exception: raise errors.FormatError( 'Invalid artifact definition: {0:s}, with error: {1!s}'.format( name, exception)) # TODO: deprecate these left overs from the collector definition. if source_type: if source.get('returned_types', None): raise errors.FormatError(( 'Invalid artifact definition: {0:s} returned_types no longer ' 'supported.').format(name)) source_type.conditions = source.get('conditions', []) self._ReadSupportedOS(source, source_type, name) if set(source_type.supported_os) - set( artifact_definition.supported_os): raise errors.FormatError(( 'Invalid artifact definition: {0:s} missing ' 'supported_os.').format(name))
python
def _ReadSources(self, artifact_definition_values, artifact_definition, name): sources = artifact_definition_values.get('sources') if not sources: raise errors.FormatError( 'Invalid artifact definition: {0:s} missing sources.'.format(name)) for source in sources: type_indicator = source.get('type', None) if not type_indicator: raise errors.FormatError( 'Invalid artifact definition: {0:s} source type.'.format(name)) attributes = source.get('attributes', None) try: source_type = artifact_definition.AppendSource( type_indicator, attributes) except errors.FormatError as exception: raise errors.FormatError( 'Invalid artifact definition: {0:s}, with error: {1!s}'.format( name, exception)) # TODO: deprecate these left overs from the collector definition. if source_type: if source.get('returned_types', None): raise errors.FormatError(( 'Invalid artifact definition: {0:s} returned_types no longer ' 'supported.').format(name)) source_type.conditions = source.get('conditions', []) self._ReadSupportedOS(source, source_type, name) if set(source_type.supported_os) - set( artifact_definition.supported_os): raise errors.FormatError(( 'Invalid artifact definition: {0:s} missing ' 'supported_os.').format(name))
[ "def", "_ReadSources", "(", "self", ",", "artifact_definition_values", ",", "artifact_definition", ",", "name", ")", ":", "sources", "=", "artifact_definition_values", ".", "get", "(", "'sources'", ")", "if", "not", "sources", ":", "raise", "errors", ".", "Forma...
Reads the artifact definition sources. Args: artifact_definition_values (dict[str, object]): artifact definition values. artifact_definition (ArtifactDefinition): an artifact definition. name (str): name of the artifact definition. Raises: FormatError: if the type indicator is not set or unsupported, or if required attributes are missing.
[ "Reads", "the", "artifact", "definition", "sources", "." ]
044a63bfb4448af33d085c69066c80f9505ae7ca
https://github.com/ForensicArtifacts/artifacts/blob/044a63bfb4448af33d085c69066c80f9505ae7ca/artifacts/reader.py#L156-L203
226,398
ForensicArtifacts/artifacts
artifacts/reader.py
ArtifactsReader.ReadArtifactDefinitionValues
def ReadArtifactDefinitionValues(self, artifact_definition_values): """Reads an artifact definition from a dictionary. Args: artifact_definition_values (dict[str, object]): artifact definition values. Returns: ArtifactDefinition: an artifact definition. Raises: FormatError: if the format of the artifact definition is not set or incorrect. """ if not artifact_definition_values: raise errors.FormatError('Missing artifact definition values.') different_keys = ( set(artifact_definition_values) - definitions.TOP_LEVEL_KEYS) if different_keys: different_keys = ', '.join(different_keys) raise errors.FormatError('Undefined keys: {0:s}'.format(different_keys)) name = artifact_definition_values.get('name', None) if not name: raise errors.FormatError('Invalid artifact definition missing name.') # The description is assumed to be mandatory. description = artifact_definition_values.get('doc', None) if not description: raise errors.FormatError( 'Invalid artifact definition: {0:s} missing description.'.format( name)) artifact_definition = artifact.ArtifactDefinition( name, description=description) if artifact_definition_values.get('collectors', []): raise errors.FormatError( 'Invalid artifact definition: {0:s} still uses collectors.'.format( name)) urls = artifact_definition_values.get('urls', []) if not isinstance(urls, list): raise errors.FormatError( 'Invalid artifact definition: {0:s} urls is not a list.'.format( name)) # TODO: check conditions. artifact_definition.conditions = artifact_definition_values.get( 'conditions', []) artifact_definition.provides = artifact_definition_values.get( 'provides', []) self._ReadLabels(artifact_definition_values, artifact_definition, name) self._ReadSupportedOS(artifact_definition_values, artifact_definition, name) artifact_definition.urls = urls self._ReadSources(artifact_definition_values, artifact_definition, name) return artifact_definition
python
def ReadArtifactDefinitionValues(self, artifact_definition_values): if not artifact_definition_values: raise errors.FormatError('Missing artifact definition values.') different_keys = ( set(artifact_definition_values) - definitions.TOP_LEVEL_KEYS) if different_keys: different_keys = ', '.join(different_keys) raise errors.FormatError('Undefined keys: {0:s}'.format(different_keys)) name = artifact_definition_values.get('name', None) if not name: raise errors.FormatError('Invalid artifact definition missing name.') # The description is assumed to be mandatory. description = artifact_definition_values.get('doc', None) if not description: raise errors.FormatError( 'Invalid artifact definition: {0:s} missing description.'.format( name)) artifact_definition = artifact.ArtifactDefinition( name, description=description) if artifact_definition_values.get('collectors', []): raise errors.FormatError( 'Invalid artifact definition: {0:s} still uses collectors.'.format( name)) urls = artifact_definition_values.get('urls', []) if not isinstance(urls, list): raise errors.FormatError( 'Invalid artifact definition: {0:s} urls is not a list.'.format( name)) # TODO: check conditions. artifact_definition.conditions = artifact_definition_values.get( 'conditions', []) artifact_definition.provides = artifact_definition_values.get( 'provides', []) self._ReadLabels(artifact_definition_values, artifact_definition, name) self._ReadSupportedOS(artifact_definition_values, artifact_definition, name) artifact_definition.urls = urls self._ReadSources(artifact_definition_values, artifact_definition, name) return artifact_definition
[ "def", "ReadArtifactDefinitionValues", "(", "self", ",", "artifact_definition_values", ")", ":", "if", "not", "artifact_definition_values", ":", "raise", "errors", ".", "FormatError", "(", "'Missing artifact definition values.'", ")", "different_keys", "=", "(", "set", ...
Reads an artifact definition from a dictionary. Args: artifact_definition_values (dict[str, object]): artifact definition values. Returns: ArtifactDefinition: an artifact definition. Raises: FormatError: if the format of the artifact definition is not set or incorrect.
[ "Reads", "an", "artifact", "definition", "from", "a", "dictionary", "." ]
044a63bfb4448af33d085c69066c80f9505ae7ca
https://github.com/ForensicArtifacts/artifacts/blob/044a63bfb4448af33d085c69066c80f9505ae7ca/artifacts/reader.py#L205-L263
226,399
ForensicArtifacts/artifacts
artifacts/reader.py
ArtifactsReader.ReadDirectory
def ReadDirectory(self, path, extension='yaml'): """Reads artifact definitions from a directory. This function does not recurse sub directories. Args: path (str): path of the directory to read from. extension (Optional[str]): extension of the filenames to read. Yields: ArtifactDefinition: an artifact definition. """ if extension: glob_spec = os.path.join(path, '*.{0:s}'.format(extension)) else: glob_spec = os.path.join(path, '*') for artifact_file in glob.glob(glob_spec): for artifact_definition in self.ReadFile(artifact_file): yield artifact_definition
python
def ReadDirectory(self, path, extension='yaml'): if extension: glob_spec = os.path.join(path, '*.{0:s}'.format(extension)) else: glob_spec = os.path.join(path, '*') for artifact_file in glob.glob(glob_spec): for artifact_definition in self.ReadFile(artifact_file): yield artifact_definition
[ "def", "ReadDirectory", "(", "self", ",", "path", ",", "extension", "=", "'yaml'", ")", ":", "if", "extension", ":", "glob_spec", "=", "os", ".", "path", ".", "join", "(", "path", ",", "'*.{0:s}'", ".", "format", "(", "extension", ")", ")", "else", "...
Reads artifact definitions from a directory. This function does not recurse sub directories. Args: path (str): path of the directory to read from. extension (Optional[str]): extension of the filenames to read. Yields: ArtifactDefinition: an artifact definition.
[ "Reads", "artifact", "definitions", "from", "a", "directory", "." ]
044a63bfb4448af33d085c69066c80f9505ae7ca
https://github.com/ForensicArtifacts/artifacts/blob/044a63bfb4448af33d085c69066c80f9505ae7ca/artifacts/reader.py#L265-L284