code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def add_edge(self, u, v): """ O(log(n)) """ # print('add_edge u, v = %r, %r' % (u, v,)) if self.graph.has_edge(u, v): return for node in (u, v): if not self.graph.has_node(node): self.graph.add_node(node) for Fi in self.forests: Fi.add_node(node) # First set the level of (u, v) to 0 self.level[(u, v)] = 0 # update the adjacency lists of u and v self.graph.add_edge(u, v) # If u and v are in separate trees in F_0, add e to F_0 ru = self.forests[0].find_root(u) rv = self.forests[0].find_root(v) if ru is not rv: # If they are in different connected compoments merge compoments self.forests[0].add_edge(u, v)
def function[add_edge, parameter[self, u, v]]: constant[ O(log(n)) ] if call[name[self].graph.has_edge, parameter[name[u], name[v]]] begin[:] return[None] for taget[name[node]] in starred[tuple[[<ast.Name object at 0x7da1b25384c0>, <ast.Name object at 0x7da1b2539fc0>]]] begin[:] if <ast.UnaryOp object at 0x7da1b253a4a0> begin[:] call[name[self].graph.add_node, parameter[name[node]]] for taget[name[Fi]] in starred[name[self].forests] begin[:] call[name[Fi].add_node, parameter[name[node]]] call[name[self].level][tuple[[<ast.Name object at 0x7da1b245c190>, <ast.Name object at 0x7da1b253be80>]]] assign[=] constant[0] call[name[self].graph.add_edge, parameter[name[u], name[v]]] variable[ru] assign[=] call[call[name[self].forests][constant[0]].find_root, parameter[name[u]]] variable[rv] assign[=] call[call[name[self].forests][constant[0]].find_root, parameter[name[v]]] if compare[name[ru] is_not name[rv]] begin[:] call[call[name[self].forests][constant[0]].add_edge, parameter[name[u], name[v]]]
keyword[def] identifier[add_edge] ( identifier[self] , identifier[u] , identifier[v] ): literal[string] keyword[if] identifier[self] . identifier[graph] . identifier[has_edge] ( identifier[u] , identifier[v] ): keyword[return] keyword[for] identifier[node] keyword[in] ( identifier[u] , identifier[v] ): keyword[if] keyword[not] identifier[self] . identifier[graph] . identifier[has_node] ( identifier[node] ): identifier[self] . identifier[graph] . identifier[add_node] ( identifier[node] ) keyword[for] identifier[Fi] keyword[in] identifier[self] . identifier[forests] : identifier[Fi] . identifier[add_node] ( identifier[node] ) identifier[self] . identifier[level] [( identifier[u] , identifier[v] )]= literal[int] identifier[self] . identifier[graph] . identifier[add_edge] ( identifier[u] , identifier[v] ) identifier[ru] = identifier[self] . identifier[forests] [ literal[int] ]. identifier[find_root] ( identifier[u] ) identifier[rv] = identifier[self] . identifier[forests] [ literal[int] ]. identifier[find_root] ( identifier[v] ) keyword[if] identifier[ru] keyword[is] keyword[not] identifier[rv] : identifier[self] . identifier[forests] [ literal[int] ]. identifier[add_edge] ( identifier[u] , identifier[v] )
def add_edge(self, u, v): """ O(log(n)) """ # print('add_edge u, v = %r, %r' % (u, v,)) if self.graph.has_edge(u, v): return # depends on [control=['if'], data=[]] for node in (u, v): if not self.graph.has_node(node): self.graph.add_node(node) # depends on [control=['if'], data=[]] for Fi in self.forests: Fi.add_node(node) # depends on [control=['for'], data=['Fi']] # depends on [control=['for'], data=['node']] # First set the level of (u, v) to 0 self.level[u, v] = 0 # update the adjacency lists of u and v self.graph.add_edge(u, v) # If u and v are in separate trees in F_0, add e to F_0 ru = self.forests[0].find_root(u) rv = self.forests[0].find_root(v) if ru is not rv: # If they are in different connected compoments merge compoments self.forests[0].add_edge(u, v) # depends on [control=['if'], data=[]]
def compare_evrs(evr_a, evr_b): """Compare two EVR tuples to determine which is newer This method compares the epoch, version, and release of the provided package strings, assuming that epoch is 0 if not provided. Comparison is performed on the epoch, then the version, and then the release. If at any point a non-equality is found, the result is returned without any remaining comparisons being performed (e.g. if the epochs of the packages differ, the versions are releases are not compared). :param tuple evr_a: an EVR tuple :param tuple evr_b: an EVR tuple """ a_epoch, a_ver, a_rel = evr_a b_epoch, b_ver, b_rel = evr_b if a_epoch != b_epoch: return a_newer if a_epoch > b_epoch else b_newer ver_comp = compare_versions(a_ver, b_ver) if ver_comp != a_eq_b: return ver_comp rel_comp = compare_versions(a_rel, b_rel) return rel_comp
def function[compare_evrs, parameter[evr_a, evr_b]]: constant[Compare two EVR tuples to determine which is newer This method compares the epoch, version, and release of the provided package strings, assuming that epoch is 0 if not provided. Comparison is performed on the epoch, then the version, and then the release. If at any point a non-equality is found, the result is returned without any remaining comparisons being performed (e.g. if the epochs of the packages differ, the versions are releases are not compared). :param tuple evr_a: an EVR tuple :param tuple evr_b: an EVR tuple ] <ast.Tuple object at 0x7da1b23b19c0> assign[=] name[evr_a] <ast.Tuple object at 0x7da1b23b2aa0> assign[=] name[evr_b] if compare[name[a_epoch] not_equal[!=] name[b_epoch]] begin[:] return[<ast.IfExp object at 0x7da1b23b1780>] variable[ver_comp] assign[=] call[name[compare_versions], parameter[name[a_ver], name[b_ver]]] if compare[name[ver_comp] not_equal[!=] name[a_eq_b]] begin[:] return[name[ver_comp]] variable[rel_comp] assign[=] call[name[compare_versions], parameter[name[a_rel], name[b_rel]]] return[name[rel_comp]]
keyword[def] identifier[compare_evrs] ( identifier[evr_a] , identifier[evr_b] ): literal[string] identifier[a_epoch] , identifier[a_ver] , identifier[a_rel] = identifier[evr_a] identifier[b_epoch] , identifier[b_ver] , identifier[b_rel] = identifier[evr_b] keyword[if] identifier[a_epoch] != identifier[b_epoch] : keyword[return] identifier[a_newer] keyword[if] identifier[a_epoch] > identifier[b_epoch] keyword[else] identifier[b_newer] identifier[ver_comp] = identifier[compare_versions] ( identifier[a_ver] , identifier[b_ver] ) keyword[if] identifier[ver_comp] != identifier[a_eq_b] : keyword[return] identifier[ver_comp] identifier[rel_comp] = identifier[compare_versions] ( identifier[a_rel] , identifier[b_rel] ) keyword[return] identifier[rel_comp]
def compare_evrs(evr_a, evr_b): """Compare two EVR tuples to determine which is newer This method compares the epoch, version, and release of the provided package strings, assuming that epoch is 0 if not provided. Comparison is performed on the epoch, then the version, and then the release. If at any point a non-equality is found, the result is returned without any remaining comparisons being performed (e.g. if the epochs of the packages differ, the versions are releases are not compared). :param tuple evr_a: an EVR tuple :param tuple evr_b: an EVR tuple """ (a_epoch, a_ver, a_rel) = evr_a (b_epoch, b_ver, b_rel) = evr_b if a_epoch != b_epoch: return a_newer if a_epoch > b_epoch else b_newer # depends on [control=['if'], data=['a_epoch', 'b_epoch']] ver_comp = compare_versions(a_ver, b_ver) if ver_comp != a_eq_b: return ver_comp # depends on [control=['if'], data=['ver_comp']] rel_comp = compare_versions(a_rel, b_rel) return rel_comp
def etag(self): """ Get the ETag option of the message. :rtype: list :return: the ETag values or [] if not specified by the request """ value = [] for option in self.options: if option.number == defines.OptionRegistry.ETAG.number: value.append(option.value) return value
def function[etag, parameter[self]]: constant[ Get the ETag option of the message. :rtype: list :return: the ETag values or [] if not specified by the request ] variable[value] assign[=] list[[]] for taget[name[option]] in starred[name[self].options] begin[:] if compare[name[option].number equal[==] name[defines].OptionRegistry.ETAG.number] begin[:] call[name[value].append, parameter[name[option].value]] return[name[value]]
keyword[def] identifier[etag] ( identifier[self] ): literal[string] identifier[value] =[] keyword[for] identifier[option] keyword[in] identifier[self] . identifier[options] : keyword[if] identifier[option] . identifier[number] == identifier[defines] . identifier[OptionRegistry] . identifier[ETAG] . identifier[number] : identifier[value] . identifier[append] ( identifier[option] . identifier[value] ) keyword[return] identifier[value]
def etag(self): """ Get the ETag option of the message. :rtype: list :return: the ETag values or [] if not specified by the request """ value = [] for option in self.options: if option.number == defines.OptionRegistry.ETAG.number: value.append(option.value) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['option']] return value
def on_return(self, node): # ('value',) """Return statement: look for None, return special sentinal.""" self.retval = self.run(node.value) if self.retval is None: self.retval = ReturnedNone return
def function[on_return, parameter[self, node]]: constant[Return statement: look for None, return special sentinal.] name[self].retval assign[=] call[name[self].run, parameter[name[node].value]] if compare[name[self].retval is constant[None]] begin[:] name[self].retval assign[=] name[ReturnedNone] return[None]
keyword[def] identifier[on_return] ( identifier[self] , identifier[node] ): literal[string] identifier[self] . identifier[retval] = identifier[self] . identifier[run] ( identifier[node] . identifier[value] ) keyword[if] identifier[self] . identifier[retval] keyword[is] keyword[None] : identifier[self] . identifier[retval] = identifier[ReturnedNone] keyword[return]
def on_return(self, node): # ('value',) 'Return statement: look for None, return special sentinal.' self.retval = self.run(node.value) if self.retval is None: self.retval = ReturnedNone # depends on [control=['if'], data=[]] return
def line_intersection_2D(abarg, cdarg): ''' line_intersection((a, b), (c, d)) yields the intersection point between the lines that pass through the given pairs of points. If any lines are parallel, (numpy.nan, numpy.nan) is returned; note that a, b, c, and d can all be 2 x n matrices of x and y coordinate row-vectors. ''' ((x1,y1),(x2,y2)) = abarg ((x3,y3),(x4,y4)) = cdarg dx12 = (x1 - x2) dx34 = (x3 - x4) dy12 = (y1 - y2) dy34 = (y3 - y4) denom = dx12*dy34 - dy12*dx34 unit = np.isclose(denom, 0) if unit is True: return (np.nan, np.nan) denom = unit + denom q12 = (x1*y2 - y1*x2) / denom q34 = (x3*y4 - y3*x4) / denom xi = q12*dx34 - q34*dx12 yi = q12*dy34 - q34*dy12 if unit is False: return (xi, yi) elif unit is True: return (np.nan, np.nan) else: xi = np.asarray(xi) yi = np.asarray(yi) xi[unit] = np.nan yi[unit] = np.nan return (xi, yi)
def function[line_intersection_2D, parameter[abarg, cdarg]]: constant[ line_intersection((a, b), (c, d)) yields the intersection point between the lines that pass through the given pairs of points. If any lines are parallel, (numpy.nan, numpy.nan) is returned; note that a, b, c, and d can all be 2 x n matrices of x and y coordinate row-vectors. ] <ast.Tuple object at 0x7da204344e50> assign[=] name[abarg] <ast.Tuple object at 0x7da204344c10> assign[=] name[cdarg] variable[dx12] assign[=] binary_operation[name[x1] - name[x2]] variable[dx34] assign[=] binary_operation[name[x3] - name[x4]] variable[dy12] assign[=] binary_operation[name[y1] - name[y2]] variable[dy34] assign[=] binary_operation[name[y3] - name[y4]] variable[denom] assign[=] binary_operation[binary_operation[name[dx12] * name[dy34]] - binary_operation[name[dy12] * name[dx34]]] variable[unit] assign[=] call[name[np].isclose, parameter[name[denom], constant[0]]] if compare[name[unit] is constant[True]] begin[:] return[tuple[[<ast.Attribute object at 0x7da1b0e38280>, <ast.Attribute object at 0x7da1b0e3b0a0>]]] variable[denom] assign[=] binary_operation[name[unit] + name[denom]] variable[q12] assign[=] binary_operation[binary_operation[binary_operation[name[x1] * name[y2]] - binary_operation[name[y1] * name[x2]]] / name[denom]] variable[q34] assign[=] binary_operation[binary_operation[binary_operation[name[x3] * name[y4]] - binary_operation[name[y3] * name[x4]]] / name[denom]] variable[xi] assign[=] binary_operation[binary_operation[name[q12] * name[dx34]] - binary_operation[name[q34] * name[dx12]]] variable[yi] assign[=] binary_operation[binary_operation[name[q12] * name[dy34]] - binary_operation[name[q34] * name[dy12]]] if compare[name[unit] is constant[False]] begin[:] return[tuple[[<ast.Name object at 0x7da20e9b2980>, <ast.Name object at 0x7da20e9b3850>]]]
keyword[def] identifier[line_intersection_2D] ( identifier[abarg] , identifier[cdarg] ): literal[string] (( identifier[x1] , identifier[y1] ),( identifier[x2] , identifier[y2] ))= identifier[abarg] (( identifier[x3] , identifier[y3] ),( identifier[x4] , identifier[y4] ))= identifier[cdarg] identifier[dx12] =( identifier[x1] - identifier[x2] ) identifier[dx34] =( identifier[x3] - identifier[x4] ) identifier[dy12] =( identifier[y1] - identifier[y2] ) identifier[dy34] =( identifier[y3] - identifier[y4] ) identifier[denom] = identifier[dx12] * identifier[dy34] - identifier[dy12] * identifier[dx34] identifier[unit] = identifier[np] . identifier[isclose] ( identifier[denom] , literal[int] ) keyword[if] identifier[unit] keyword[is] keyword[True] : keyword[return] ( identifier[np] . identifier[nan] , identifier[np] . identifier[nan] ) identifier[denom] = identifier[unit] + identifier[denom] identifier[q12] =( identifier[x1] * identifier[y2] - identifier[y1] * identifier[x2] )/ identifier[denom] identifier[q34] =( identifier[x3] * identifier[y4] - identifier[y3] * identifier[x4] )/ identifier[denom] identifier[xi] = identifier[q12] * identifier[dx34] - identifier[q34] * identifier[dx12] identifier[yi] = identifier[q12] * identifier[dy34] - identifier[q34] * identifier[dy12] keyword[if] identifier[unit] keyword[is] keyword[False] : keyword[return] ( identifier[xi] , identifier[yi] ) keyword[elif] identifier[unit] keyword[is] keyword[True] : keyword[return] ( identifier[np] . identifier[nan] , identifier[np] . identifier[nan] ) keyword[else] : identifier[xi] = identifier[np] . identifier[asarray] ( identifier[xi] ) identifier[yi] = identifier[np] . identifier[asarray] ( identifier[yi] ) identifier[xi] [ identifier[unit] ]= identifier[np] . identifier[nan] identifier[yi] [ identifier[unit] ]= identifier[np] . identifier[nan] keyword[return] ( identifier[xi] , identifier[yi] )
def line_intersection_2D(abarg, cdarg): """ line_intersection((a, b), (c, d)) yields the intersection point between the lines that pass through the given pairs of points. If any lines are parallel, (numpy.nan, numpy.nan) is returned; note that a, b, c, and d can all be 2 x n matrices of x and y coordinate row-vectors. """ ((x1, y1), (x2, y2)) = abarg ((x3, y3), (x4, y4)) = cdarg dx12 = x1 - x2 dx34 = x3 - x4 dy12 = y1 - y2 dy34 = y3 - y4 denom = dx12 * dy34 - dy12 * dx34 unit = np.isclose(denom, 0) if unit is True: return (np.nan, np.nan) # depends on [control=['if'], data=[]] denom = unit + denom q12 = (x1 * y2 - y1 * x2) / denom q34 = (x3 * y4 - y3 * x4) / denom xi = q12 * dx34 - q34 * dx12 yi = q12 * dy34 - q34 * dy12 if unit is False: return (xi, yi) # depends on [control=['if'], data=[]] elif unit is True: return (np.nan, np.nan) # depends on [control=['if'], data=[]] else: xi = np.asarray(xi) yi = np.asarray(yi) xi[unit] = np.nan yi[unit] = np.nan return (xi, yi)
def verify_draft_url(url): """ Return ``True`` if the given URL has a valid draft mode HMAC in its querystring. """ url = urlparse.urlparse(url) # QueryDict requires a bytestring as its first argument query = QueryDict(force_bytes(url.query)) # TODO Support legacy 'edit' param name for now preview_hmac = query.get('preview') or query.get('edit') if preview_hmac: salt, hmac = preview_hmac.split(':') return hmac == get_draft_hmac(salt, url.path) return False
def function[verify_draft_url, parameter[url]]: constant[ Return ``True`` if the given URL has a valid draft mode HMAC in its querystring. ] variable[url] assign[=] call[name[urlparse].urlparse, parameter[name[url]]] variable[query] assign[=] call[name[QueryDict], parameter[call[name[force_bytes], parameter[name[url].query]]]] variable[preview_hmac] assign[=] <ast.BoolOp object at 0x7da204567130> if name[preview_hmac] begin[:] <ast.Tuple object at 0x7da204564940> assign[=] call[name[preview_hmac].split, parameter[constant[:]]] return[compare[name[hmac] equal[==] call[name[get_draft_hmac], parameter[name[salt], name[url].path]]]] return[constant[False]]
keyword[def] identifier[verify_draft_url] ( identifier[url] ): literal[string] identifier[url] = identifier[urlparse] . identifier[urlparse] ( identifier[url] ) identifier[query] = identifier[QueryDict] ( identifier[force_bytes] ( identifier[url] . identifier[query] )) identifier[preview_hmac] = identifier[query] . identifier[get] ( literal[string] ) keyword[or] identifier[query] . identifier[get] ( literal[string] ) keyword[if] identifier[preview_hmac] : identifier[salt] , identifier[hmac] = identifier[preview_hmac] . identifier[split] ( literal[string] ) keyword[return] identifier[hmac] == identifier[get_draft_hmac] ( identifier[salt] , identifier[url] . identifier[path] ) keyword[return] keyword[False]
def verify_draft_url(url): """ Return ``True`` if the given URL has a valid draft mode HMAC in its querystring. """ url = urlparse.urlparse(url) # QueryDict requires a bytestring as its first argument query = QueryDict(force_bytes(url.query)) # TODO Support legacy 'edit' param name for now preview_hmac = query.get('preview') or query.get('edit') if preview_hmac: (salt, hmac) = preview_hmac.split(':') return hmac == get_draft_hmac(salt, url.path) # depends on [control=['if'], data=[]] return False
def _to_stinespring(rep, data, input_dim, output_dim): """Transform a QuantumChannel to the Stinespring representation.""" if rep == 'Stinespring': return data if rep == 'Operator': return _from_operator('Stinespring', data, input_dim, output_dim) # Convert via Superoperator representation if rep != 'Kraus': data = _to_kraus(rep, data, input_dim, output_dim) return _kraus_to_stinespring(data, input_dim, output_dim)
def function[_to_stinespring, parameter[rep, data, input_dim, output_dim]]: constant[Transform a QuantumChannel to the Stinespring representation.] if compare[name[rep] equal[==] constant[Stinespring]] begin[:] return[name[data]] if compare[name[rep] equal[==] constant[Operator]] begin[:] return[call[name[_from_operator], parameter[constant[Stinespring], name[data], name[input_dim], name[output_dim]]]] if compare[name[rep] not_equal[!=] constant[Kraus]] begin[:] variable[data] assign[=] call[name[_to_kraus], parameter[name[rep], name[data], name[input_dim], name[output_dim]]] return[call[name[_kraus_to_stinespring], parameter[name[data], name[input_dim], name[output_dim]]]]
keyword[def] identifier[_to_stinespring] ( identifier[rep] , identifier[data] , identifier[input_dim] , identifier[output_dim] ): literal[string] keyword[if] identifier[rep] == literal[string] : keyword[return] identifier[data] keyword[if] identifier[rep] == literal[string] : keyword[return] identifier[_from_operator] ( literal[string] , identifier[data] , identifier[input_dim] , identifier[output_dim] ) keyword[if] identifier[rep] != literal[string] : identifier[data] = identifier[_to_kraus] ( identifier[rep] , identifier[data] , identifier[input_dim] , identifier[output_dim] ) keyword[return] identifier[_kraus_to_stinespring] ( identifier[data] , identifier[input_dim] , identifier[output_dim] )
def _to_stinespring(rep, data, input_dim, output_dim): """Transform a QuantumChannel to the Stinespring representation.""" if rep == 'Stinespring': return data # depends on [control=['if'], data=[]] if rep == 'Operator': return _from_operator('Stinespring', data, input_dim, output_dim) # depends on [control=['if'], data=[]] # Convert via Superoperator representation if rep != 'Kraus': data = _to_kraus(rep, data, input_dim, output_dim) # depends on [control=['if'], data=['rep']] return _kraus_to_stinespring(data, input_dim, output_dim)
def call_plugins(plugins, method, *arg, **kw): """Call all method on plugins in list, that define it, with provided arguments. The first response that is not None is returned. """ for plug in plugins: func = getattr(plug, method, None) if func is None: continue #LOG.debug("call plugin %s: %s", plug.name, method) result = func(*arg, **kw) if result is not None: return result return None
def function[call_plugins, parameter[plugins, method]]: constant[Call all method on plugins in list, that define it, with provided arguments. The first response that is not None is returned. ] for taget[name[plug]] in starred[name[plugins]] begin[:] variable[func] assign[=] call[name[getattr], parameter[name[plug], name[method], constant[None]]] if compare[name[func] is constant[None]] begin[:] continue variable[result] assign[=] call[name[func], parameter[<ast.Starred object at 0x7da20c76dde0>]] if compare[name[result] is_not constant[None]] begin[:] return[name[result]] return[constant[None]]
keyword[def] identifier[call_plugins] ( identifier[plugins] , identifier[method] ,* identifier[arg] ,** identifier[kw] ): literal[string] keyword[for] identifier[plug] keyword[in] identifier[plugins] : identifier[func] = identifier[getattr] ( identifier[plug] , identifier[method] , keyword[None] ) keyword[if] identifier[func] keyword[is] keyword[None] : keyword[continue] identifier[result] = identifier[func] (* identifier[arg] ,** identifier[kw] ) keyword[if] identifier[result] keyword[is] keyword[not] keyword[None] : keyword[return] identifier[result] keyword[return] keyword[None]
def call_plugins(plugins, method, *arg, **kw): """Call all method on plugins in list, that define it, with provided arguments. The first response that is not None is returned. """ for plug in plugins: func = getattr(plug, method, None) if func is None: continue # depends on [control=['if'], data=[]] #LOG.debug("call plugin %s: %s", plug.name, method) result = func(*arg, **kw) if result is not None: return result # depends on [control=['if'], data=['result']] # depends on [control=['for'], data=['plug']] return None
def find_one_node(self, *keys, value, decend=True): """Find a node on the branch of the instance with a `keys=data` item in the `data` dict. Nested values are accessed by specifying the keys in sequence. e.g. `node.get_data("country", "city")` would access `node.data["country"]["city"]` :param keys: the `data` dict key(s) referencing the required value. :type keys: str :param value: the value corresponding to `keys`. Note that `value` is a keyword-only argument. :param decend: `decend=True` traverse down the branch sub-tree starting from `self`. `decend=False` traverse up the branch from `self` towards root. :type decend: bool :returns: the first node found with `keys=data` in the `data` dict. :rtype: Node or None """ if decend: traversal = self else: traversal = self._ancestors for _node in traversal: _val = _node.get_data(*keys) if _val == value: return _node return None
def function[find_one_node, parameter[self]]: constant[Find a node on the branch of the instance with a `keys=data` item in the `data` dict. Nested values are accessed by specifying the keys in sequence. e.g. `node.get_data("country", "city")` would access `node.data["country"]["city"]` :param keys: the `data` dict key(s) referencing the required value. :type keys: str :param value: the value corresponding to `keys`. Note that `value` is a keyword-only argument. :param decend: `decend=True` traverse down the branch sub-tree starting from `self`. `decend=False` traverse up the branch from `self` towards root. :type decend: bool :returns: the first node found with `keys=data` in the `data` dict. :rtype: Node or None ] if name[decend] begin[:] variable[traversal] assign[=] name[self] for taget[name[_node]] in starred[name[traversal]] begin[:] variable[_val] assign[=] call[name[_node].get_data, parameter[<ast.Starred object at 0x7da1b23452a0>]] if compare[name[_val] equal[==] name[value]] begin[:] return[name[_node]] return[constant[None]]
keyword[def] identifier[find_one_node] ( identifier[self] ,* identifier[keys] , identifier[value] , identifier[decend] = keyword[True] ): literal[string] keyword[if] identifier[decend] : identifier[traversal] = identifier[self] keyword[else] : identifier[traversal] = identifier[self] . identifier[_ancestors] keyword[for] identifier[_node] keyword[in] identifier[traversal] : identifier[_val] = identifier[_node] . identifier[get_data] (* identifier[keys] ) keyword[if] identifier[_val] == identifier[value] : keyword[return] identifier[_node] keyword[return] keyword[None]
def find_one_node(self, *keys, value, decend=True): """Find a node on the branch of the instance with a `keys=data` item in the `data` dict. Nested values are accessed by specifying the keys in sequence. e.g. `node.get_data("country", "city")` would access `node.data["country"]["city"]` :param keys: the `data` dict key(s) referencing the required value. :type keys: str :param value: the value corresponding to `keys`. Note that `value` is a keyword-only argument. :param decend: `decend=True` traverse down the branch sub-tree starting from `self`. `decend=False` traverse up the branch from `self` towards root. :type decend: bool :returns: the first node found with `keys=data` in the `data` dict. :rtype: Node or None """ if decend: traversal = self # depends on [control=['if'], data=[]] else: traversal = self._ancestors for _node in traversal: _val = _node.get_data(*keys) if _val == value: return _node # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['_node']] return None
def create_virtualenv(srcdir, datadir, preload_image, get_container_name): """ Populate venv from preloaded image """ try: if docker.is_boot2docker(): docker.data_only_container( get_container_name('venv'), ['/usr/lib/ckan'], ) img_id = docker.web_command( '/bin/mv /usr/lib/ckan/ /usr/lib/ckan_original', image=preload_image, commit=True, ) docker.web_command( command='/bin/cp -a /usr/lib/ckan_original/. /usr/lib/ckan/.', volumes_from=get_container_name('venv'), image=img_id, ) docker.remove_image(img_id) return docker.web_command( command='/bin/cp -a /usr/lib/ckan/. /usr/lib/ckan_target/.', rw={datadir + '/venv': '/usr/lib/ckan_target'}, image=preload_image, ) finally: rw = {datadir + '/venv': '/usr/lib/ckan'} if not docker.is_boot2docker() else {} volumes_from = get_container_name('venv') if docker.is_boot2docker() else None # fix venv permissions docker.web_command( command='/bin/chown -R --reference=/project /usr/lib/ckan', rw=rw, volumes_from=volumes_from, ro={srcdir: '/project'}, )
def function[create_virtualenv, parameter[srcdir, datadir, preload_image, get_container_name]]: constant[ Populate venv from preloaded image ] <ast.Try object at 0x7da18c4ceda0>
keyword[def] identifier[create_virtualenv] ( identifier[srcdir] , identifier[datadir] , identifier[preload_image] , identifier[get_container_name] ): literal[string] keyword[try] : keyword[if] identifier[docker] . identifier[is_boot2docker] (): identifier[docker] . identifier[data_only_container] ( identifier[get_container_name] ( literal[string] ), [ literal[string] ], ) identifier[img_id] = identifier[docker] . identifier[web_command] ( literal[string] , identifier[image] = identifier[preload_image] , identifier[commit] = keyword[True] , ) identifier[docker] . identifier[web_command] ( identifier[command] = literal[string] , identifier[volumes_from] = identifier[get_container_name] ( literal[string] ), identifier[image] = identifier[img_id] , ) identifier[docker] . identifier[remove_image] ( identifier[img_id] ) keyword[return] identifier[docker] . identifier[web_command] ( identifier[command] = literal[string] , identifier[rw] ={ identifier[datadir] + literal[string] : literal[string] }, identifier[image] = identifier[preload_image] , ) keyword[finally] : identifier[rw] ={ identifier[datadir] + literal[string] : literal[string] } keyword[if] keyword[not] identifier[docker] . identifier[is_boot2docker] () keyword[else] {} identifier[volumes_from] = identifier[get_container_name] ( literal[string] ) keyword[if] identifier[docker] . identifier[is_boot2docker] () keyword[else] keyword[None] identifier[docker] . identifier[web_command] ( identifier[command] = literal[string] , identifier[rw] = identifier[rw] , identifier[volumes_from] = identifier[volumes_from] , identifier[ro] ={ identifier[srcdir] : literal[string] }, )
def create_virtualenv(srcdir, datadir, preload_image, get_container_name): """ Populate venv from preloaded image """ try: if docker.is_boot2docker(): docker.data_only_container(get_container_name('venv'), ['/usr/lib/ckan']) img_id = docker.web_command('/bin/mv /usr/lib/ckan/ /usr/lib/ckan_original', image=preload_image, commit=True) docker.web_command(command='/bin/cp -a /usr/lib/ckan_original/. /usr/lib/ckan/.', volumes_from=get_container_name('venv'), image=img_id) docker.remove_image(img_id) return # depends on [control=['if'], data=[]] docker.web_command(command='/bin/cp -a /usr/lib/ckan/. /usr/lib/ckan_target/.', rw={datadir + '/venv': '/usr/lib/ckan_target'}, image=preload_image) # depends on [control=['try'], data=[]] finally: rw = {datadir + '/venv': '/usr/lib/ckan'} if not docker.is_boot2docker() else {} volumes_from = get_container_name('venv') if docker.is_boot2docker() else None # fix venv permissions docker.web_command(command='/bin/chown -R --reference=/project /usr/lib/ckan', rw=rw, volumes_from=volumes_from, ro={srcdir: '/project'})
def logn2(n, p): """Best p-bit lower and upper bounds for log(2)/log(n), as Fractions.""" with precision(p): extra = 10 while True: with precision(p+extra): # use extra precision for intermediate step log2upper = log2(n, RoundTowardPositive) log2lower = log2(n, RoundTowardNegative) lower = div(1, log2upper, RoundTowardNegative) upper = div(1, log2lower, RoundTowardPositive) # if lower and upper are adjacent (or equal) we're done if next_up(lower) == upper: return (Fraction(*lower.as_integer_ratio()), Fraction(*upper.as_integer_ratio())) # otherwise, increase the precision and try again extra += 10
def function[logn2, parameter[n, p]]: constant[Best p-bit lower and upper bounds for log(2)/log(n), as Fractions.] with call[name[precision], parameter[name[p]]] begin[:] variable[extra] assign[=] constant[10] while constant[True] begin[:] with call[name[precision], parameter[binary_operation[name[p] + name[extra]]]] begin[:] variable[log2upper] assign[=] call[name[log2], parameter[name[n], name[RoundTowardPositive]]] variable[log2lower] assign[=] call[name[log2], parameter[name[n], name[RoundTowardNegative]]] variable[lower] assign[=] call[name[div], parameter[constant[1], name[log2upper], name[RoundTowardNegative]]] variable[upper] assign[=] call[name[div], parameter[constant[1], name[log2lower], name[RoundTowardPositive]]] if compare[call[name[next_up], parameter[name[lower]]] equal[==] name[upper]] begin[:] return[tuple[[<ast.Call object at 0x7da1b26f2860>, <ast.Call object at 0x7da1b26f31f0>]]] <ast.AugAssign object at 0x7da207f998d0>
keyword[def] identifier[logn2] ( identifier[n] , identifier[p] ): literal[string] keyword[with] identifier[precision] ( identifier[p] ): identifier[extra] = literal[int] keyword[while] keyword[True] : keyword[with] identifier[precision] ( identifier[p] + identifier[extra] ): identifier[log2upper] = identifier[log2] ( identifier[n] , identifier[RoundTowardPositive] ) identifier[log2lower] = identifier[log2] ( identifier[n] , identifier[RoundTowardNegative] ) identifier[lower] = identifier[div] ( literal[int] , identifier[log2upper] , identifier[RoundTowardNegative] ) identifier[upper] = identifier[div] ( literal[int] , identifier[log2lower] , identifier[RoundTowardPositive] ) keyword[if] identifier[next_up] ( identifier[lower] )== identifier[upper] : keyword[return] ( identifier[Fraction] (* identifier[lower] . identifier[as_integer_ratio] ()), identifier[Fraction] (* identifier[upper] . identifier[as_integer_ratio] ())) identifier[extra] += literal[int]
def logn2(n, p): """Best p-bit lower and upper bounds for log(2)/log(n), as Fractions.""" with precision(p): extra = 10 while True: with precision(p + extra): # use extra precision for intermediate step log2upper = log2(n, RoundTowardPositive) log2lower = log2(n, RoundTowardNegative) # depends on [control=['with'], data=[]] lower = div(1, log2upper, RoundTowardNegative) upper = div(1, log2lower, RoundTowardPositive) # if lower and upper are adjacent (or equal) we're done if next_up(lower) == upper: return (Fraction(*lower.as_integer_ratio()), Fraction(*upper.as_integer_ratio())) # depends on [control=['if'], data=['upper']] # otherwise, increase the precision and try again extra += 10 # depends on [control=['while'], data=[]] # depends on [control=['with'], data=['precision']]
def getlist(self, key, default=[]): """ Returns: The list of values for <key> if <key> is in the dictionary, else <default>. If <default> is not provided, an empty list is returned. """ if key in self: return [node.value for node in self._map[key]] return default
def function[getlist, parameter[self, key, default]]: constant[ Returns: The list of values for <key> if <key> is in the dictionary, else <default>. If <default> is not provided, an empty list is returned. ] if compare[name[key] in name[self]] begin[:] return[<ast.ListComp object at 0x7da1b1ea0760>] return[name[default]]
keyword[def] identifier[getlist] ( identifier[self] , identifier[key] , identifier[default] =[]): literal[string] keyword[if] identifier[key] keyword[in] identifier[self] : keyword[return] [ identifier[node] . identifier[value] keyword[for] identifier[node] keyword[in] identifier[self] . identifier[_map] [ identifier[key] ]] keyword[return] identifier[default]
def getlist(self, key, default=[]): """ Returns: The list of values for <key> if <key> is in the dictionary, else <default>. If <default> is not provided, an empty list is returned. """ if key in self: return [node.value for node in self._map[key]] # depends on [control=['if'], data=['key', 'self']] return default
def printOut(value, end='\n'): """ This function prints the given String immediately and flushes the output. """ sys.stdout.write(value) sys.stdout.write(end) sys.stdout.flush()
def function[printOut, parameter[value, end]]: constant[ This function prints the given String immediately and flushes the output. ] call[name[sys].stdout.write, parameter[name[value]]] call[name[sys].stdout.write, parameter[name[end]]] call[name[sys].stdout.flush, parameter[]]
keyword[def] identifier[printOut] ( identifier[value] , identifier[end] = literal[string] ): literal[string] identifier[sys] . identifier[stdout] . identifier[write] ( identifier[value] ) identifier[sys] . identifier[stdout] . identifier[write] ( identifier[end] ) identifier[sys] . identifier[stdout] . identifier[flush] ()
def printOut(value, end='\n'): """ This function prints the given String immediately and flushes the output. """ sys.stdout.write(value) sys.stdout.write(end) sys.stdout.flush()
def PushEvent(self, event): """Pushes an event onto the heap. Args: event (EventObject): event. """ macb_group_identifier, content_identifier = self._GetEventIdentifiers(event) # We can ignore the timestamp here because the psort engine only stores # events with the same timestamp in the event heap. heap_values = (macb_group_identifier or '', content_identifier, event) heapq.heappush(self._heap, heap_values)
def function[PushEvent, parameter[self, event]]: constant[Pushes an event onto the heap. Args: event (EventObject): event. ] <ast.Tuple object at 0x7da2044c06a0> assign[=] call[name[self]._GetEventIdentifiers, parameter[name[event]]] variable[heap_values] assign[=] tuple[[<ast.BoolOp object at 0x7da2044c1390>, <ast.Name object at 0x7da2044c0580>, <ast.Name object at 0x7da2044c3fa0>]] call[name[heapq].heappush, parameter[name[self]._heap, name[heap_values]]]
keyword[def] identifier[PushEvent] ( identifier[self] , identifier[event] ): literal[string] identifier[macb_group_identifier] , identifier[content_identifier] = identifier[self] . identifier[_GetEventIdentifiers] ( identifier[event] ) identifier[heap_values] =( identifier[macb_group_identifier] keyword[or] literal[string] , identifier[content_identifier] , identifier[event] ) identifier[heapq] . identifier[heappush] ( identifier[self] . identifier[_heap] , identifier[heap_values] )
def PushEvent(self, event): """Pushes an event onto the heap. Args: event (EventObject): event. """ (macb_group_identifier, content_identifier) = self._GetEventIdentifiers(event) # We can ignore the timestamp here because the psort engine only stores # events with the same timestamp in the event heap. heap_values = (macb_group_identifier or '', content_identifier, event) heapq.heappush(self._heap, heap_values)
def process_directories(self): """Create the rst files from the input directories in the :attr:`in_dir` attribute""" for i, (base_dir, target_dir, paths) in enumerate(zip( self.in_dir, self.out_dir, map(os.walk, self.in_dir))): self._in_dir_count = i self.recursive_processing(base_dir, target_dir, paths)
def function[process_directories, parameter[self]]: constant[Create the rst files from the input directories in the :attr:`in_dir` attribute] for taget[tuple[[<ast.Name object at 0x7da204565ed0>, <ast.Tuple object at 0x7da204566b30>]]] in starred[call[name[enumerate], parameter[call[name[zip], parameter[name[self].in_dir, name[self].out_dir, call[name[map], parameter[name[os].walk, name[self].in_dir]]]]]]] begin[:] name[self]._in_dir_count assign[=] name[i] call[name[self].recursive_processing, parameter[name[base_dir], name[target_dir], name[paths]]]
keyword[def] identifier[process_directories] ( identifier[self] ): literal[string] keyword[for] identifier[i] ,( identifier[base_dir] , identifier[target_dir] , identifier[paths] ) keyword[in] identifier[enumerate] ( identifier[zip] ( identifier[self] . identifier[in_dir] , identifier[self] . identifier[out_dir] , identifier[map] ( identifier[os] . identifier[walk] , identifier[self] . identifier[in_dir] ))): identifier[self] . identifier[_in_dir_count] = identifier[i] identifier[self] . identifier[recursive_processing] ( identifier[base_dir] , identifier[target_dir] , identifier[paths] )
def process_directories(self): """Create the rst files from the input directories in the :attr:`in_dir` attribute""" for (i, (base_dir, target_dir, paths)) in enumerate(zip(self.in_dir, self.out_dir, map(os.walk, self.in_dir))): self._in_dir_count = i self.recursive_processing(base_dir, target_dir, paths) # depends on [control=['for'], data=[]]
def interp(x, dx, dy, left=None, right=None): '''One-dimensional linear interpolation routine inspired/ reimplemented from NumPy for extra speed for scalar values (and also numpy). Returns the one-dimensional piecewise linear interpolant to a function with a given value at discrete data-points. Parameters ---------- x : float X-coordinate of the interpolated values, [-] dx : list[float] X-coordinates of the data points, must be increasing, [-] dy : list[float] Y-coordinates of the data points; same length as `dx`, [-] left : float, optional Value to return for `x < dx[0]`, default is `dy[0]`, [-] right : float, optional Value to return for `x > dx[-1]`, default is `dy[-1]`, [-] Returns ------- y : float The interpolated value, [-] Notes ----- This function is "unsafe" in that it assumes the x-coordinates are increasing. It also does not check for nan's, that `dx` and `dy` are the same length, and that `x` is scalar. Performance is 40-50% of that of NumPy under CPython. Examples -------- >>> interp(2.5, [1, 2, 3], [3, 2, 0]) 1.0 ''' lendx = len(dx) j = binary_search(x, dx, lendx) if (j == -1): if left is not None: return left else: return dy[0] elif (j == lendx - 1): return dy[j] elif (j == lendx): if right is not None: return right else: return dy[-1] else: return (dy[j + 1] - dy[j])/(dx[j + 1] - dx[j])*(x - dx[j]) + dy[j]
def function[interp, parameter[x, dx, dy, left, right]]: constant[One-dimensional linear interpolation routine inspired/ reimplemented from NumPy for extra speed for scalar values (and also numpy). Returns the one-dimensional piecewise linear interpolant to a function with a given value at discrete data-points. Parameters ---------- x : float X-coordinate of the interpolated values, [-] dx : list[float] X-coordinates of the data points, must be increasing, [-] dy : list[float] Y-coordinates of the data points; same length as `dx`, [-] left : float, optional Value to return for `x < dx[0]`, default is `dy[0]`, [-] right : float, optional Value to return for `x > dx[-1]`, default is `dy[-1]`, [-] Returns ------- y : float The interpolated value, [-] Notes ----- This function is "unsafe" in that it assumes the x-coordinates are increasing. It also does not check for nan's, that `dx` and `dy` are the same length, and that `x` is scalar. Performance is 40-50% of that of NumPy under CPython. Examples -------- >>> interp(2.5, [1, 2, 3], [3, 2, 0]) 1.0 ] variable[lendx] assign[=] call[name[len], parameter[name[dx]]] variable[j] assign[=] call[name[binary_search], parameter[name[x], name[dx], name[lendx]]] if compare[name[j] equal[==] <ast.UnaryOp object at 0x7da1b12c8940>] begin[:] if compare[name[left] is_not constant[None]] begin[:] return[name[left]]
keyword[def] identifier[interp] ( identifier[x] , identifier[dx] , identifier[dy] , identifier[left] = keyword[None] , identifier[right] = keyword[None] ): literal[string] identifier[lendx] = identifier[len] ( identifier[dx] ) identifier[j] = identifier[binary_search] ( identifier[x] , identifier[dx] , identifier[lendx] ) keyword[if] ( identifier[j] ==- literal[int] ): keyword[if] identifier[left] keyword[is] keyword[not] keyword[None] : keyword[return] identifier[left] keyword[else] : keyword[return] identifier[dy] [ literal[int] ] keyword[elif] ( identifier[j] == identifier[lendx] - literal[int] ): keyword[return] identifier[dy] [ identifier[j] ] keyword[elif] ( identifier[j] == identifier[lendx] ): keyword[if] identifier[right] keyword[is] keyword[not] keyword[None] : keyword[return] identifier[right] keyword[else] : keyword[return] identifier[dy] [- literal[int] ] keyword[else] : keyword[return] ( identifier[dy] [ identifier[j] + literal[int] ]- identifier[dy] [ identifier[j] ])/( identifier[dx] [ identifier[j] + literal[int] ]- identifier[dx] [ identifier[j] ])*( identifier[x] - identifier[dx] [ identifier[j] ])+ identifier[dy] [ identifier[j] ]
def interp(x, dx, dy, left=None, right=None): """One-dimensional linear interpolation routine inspired/ reimplemented from NumPy for extra speed for scalar values (and also numpy). Returns the one-dimensional piecewise linear interpolant to a function with a given value at discrete data-points. Parameters ---------- x : float X-coordinate of the interpolated values, [-] dx : list[float] X-coordinates of the data points, must be increasing, [-] dy : list[float] Y-coordinates of the data points; same length as `dx`, [-] left : float, optional Value to return for `x < dx[0]`, default is `dy[0]`, [-] right : float, optional Value to return for `x > dx[-1]`, default is `dy[-1]`, [-] Returns ------- y : float The interpolated value, [-] Notes ----- This function is "unsafe" in that it assumes the x-coordinates are increasing. It also does not check for nan's, that `dx` and `dy` are the same length, and that `x` is scalar. Performance is 40-50% of that of NumPy under CPython. Examples -------- >>> interp(2.5, [1, 2, 3], [3, 2, 0]) 1.0 """ lendx = len(dx) j = binary_search(x, dx, lendx) if j == -1: if left is not None: return left # depends on [control=['if'], data=['left']] else: return dy[0] # depends on [control=['if'], data=[]] elif j == lendx - 1: return dy[j] # depends on [control=['if'], data=['j']] elif j == lendx: if right is not None: return right # depends on [control=['if'], data=['right']] else: return dy[-1] # depends on [control=['if'], data=[]] else: return (dy[j + 1] - dy[j]) / (dx[j + 1] - dx[j]) * (x - dx[j]) + dy[j]
def parse(self, rec): """Retrieve row data from files associated with the ISATabRecord. """ final_studies = [] for study in rec.studies: source_data = self._parse_study(study.metadata["Study File Name"], ["Source Name", "Sample Name", "Comment[ENA_SAMPLE]"]) if source_data: study.nodes = source_data final_assays = [] for assay in study.assays: cur_assay = ISATabAssayRecord(assay) assay_data = self._parse_study(assay["Study Assay File Name"], ["Sample Name","Extract Name","Raw Data File","Derived Data File", "Image File", "Acquisition Parameter Data File", "Free Induction Decay Data File"]) cur_assay.nodes = assay_data self._get_process_nodes(assay["Study Assay File Name"], cur_assay) final_assays.append(cur_assay) study.assays = final_assays #get process nodes self._get_process_nodes(study.metadata["Study File Name"], study) final_studies.append(study) rec.studies = final_studies return rec
def function[parse, parameter[self, rec]]: constant[Retrieve row data from files associated with the ISATabRecord. ] variable[final_studies] assign[=] list[[]] for taget[name[study]] in starred[name[rec].studies] begin[:] variable[source_data] assign[=] call[name[self]._parse_study, parameter[call[name[study].metadata][constant[Study File Name]], list[[<ast.Constant object at 0x7da20c992aa0>, <ast.Constant object at 0x7da20c992410>, <ast.Constant object at 0x7da20c9920b0>]]]] if name[source_data] begin[:] name[study].nodes assign[=] name[source_data] variable[final_assays] assign[=] list[[]] for taget[name[assay]] in starred[name[study].assays] begin[:] variable[cur_assay] assign[=] call[name[ISATabAssayRecord], parameter[name[assay]]] variable[assay_data] assign[=] call[name[self]._parse_study, parameter[call[name[assay]][constant[Study Assay File Name]], list[[<ast.Constant object at 0x7da20c991360>, <ast.Constant object at 0x7da20c993eb0>, <ast.Constant object at 0x7da20c991b10>, <ast.Constant object at 0x7da20c992860>, <ast.Constant object at 0x7da20c991d80>, <ast.Constant object at 0x7da20c992c80>, <ast.Constant object at 0x7da20c992890>]]]] name[cur_assay].nodes assign[=] name[assay_data] call[name[self]._get_process_nodes, parameter[call[name[assay]][constant[Study Assay File Name]], name[cur_assay]]] call[name[final_assays].append, parameter[name[cur_assay]]] name[study].assays assign[=] name[final_assays] call[name[self]._get_process_nodes, parameter[call[name[study].metadata][constant[Study File Name]], name[study]]] call[name[final_studies].append, parameter[name[study]]] name[rec].studies assign[=] name[final_studies] return[name[rec]]
keyword[def] identifier[parse] ( identifier[self] , identifier[rec] ): literal[string] identifier[final_studies] =[] keyword[for] identifier[study] keyword[in] identifier[rec] . identifier[studies] : identifier[source_data] = identifier[self] . identifier[_parse_study] ( identifier[study] . identifier[metadata] [ literal[string] ], [ literal[string] , literal[string] , literal[string] ]) keyword[if] identifier[source_data] : identifier[study] . identifier[nodes] = identifier[source_data] identifier[final_assays] =[] keyword[for] identifier[assay] keyword[in] identifier[study] . identifier[assays] : identifier[cur_assay] = identifier[ISATabAssayRecord] ( identifier[assay] ) identifier[assay_data] = identifier[self] . identifier[_parse_study] ( identifier[assay] [ literal[string] ], [ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]) identifier[cur_assay] . identifier[nodes] = identifier[assay_data] identifier[self] . identifier[_get_process_nodes] ( identifier[assay] [ literal[string] ], identifier[cur_assay] ) identifier[final_assays] . identifier[append] ( identifier[cur_assay] ) identifier[study] . identifier[assays] = identifier[final_assays] identifier[self] . identifier[_get_process_nodes] ( identifier[study] . identifier[metadata] [ literal[string] ], identifier[study] ) identifier[final_studies] . identifier[append] ( identifier[study] ) identifier[rec] . identifier[studies] = identifier[final_studies] keyword[return] identifier[rec]
def parse(self, rec): """Retrieve row data from files associated with the ISATabRecord. """ final_studies = [] for study in rec.studies: source_data = self._parse_study(study.metadata['Study File Name'], ['Source Name', 'Sample Name', 'Comment[ENA_SAMPLE]']) if source_data: study.nodes = source_data final_assays = [] for assay in study.assays: cur_assay = ISATabAssayRecord(assay) assay_data = self._parse_study(assay['Study Assay File Name'], ['Sample Name', 'Extract Name', 'Raw Data File', 'Derived Data File', 'Image File', 'Acquisition Parameter Data File', 'Free Induction Decay Data File']) cur_assay.nodes = assay_data self._get_process_nodes(assay['Study Assay File Name'], cur_assay) final_assays.append(cur_assay) # depends on [control=['for'], data=['assay']] study.assays = final_assays #get process nodes self._get_process_nodes(study.metadata['Study File Name'], study) final_studies.append(study) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['study']] rec.studies = final_studies return rec
def naturalsize(value, binary=False, gnu=False, format='%.1f'): """Format a number of byteslike a human readable filesize (eg. 10 kB). By default, decimal suffixes (kB, MB) are used. Passing binary=true will use binary suffixes (KiB, MiB) are used and the base will be 2**10 instead of 10**3. If ``gnu`` is True, the binary argument is ignored and GNU-style (ls -sh style) prefixes are used (K, M) with the 2**10 definition. Non-gnu modes are compatible with jinja2's ``filesizeformat`` filter.""" if gnu: suffix = suffixes['gnu'] elif binary: suffix = suffixes['binary'] else: suffix = suffixes['decimal'] base = 1024 if (gnu or binary) else 1000 bytes = float(value) if bytes == 1 and not gnu: return '1 Byte' elif bytes < base and not gnu: return '%d Bytes' % bytes elif bytes < base and gnu: return '%dB' % bytes for i,s in enumerate(suffix): unit = base ** (i+2) if bytes < unit and not gnu: return (format + ' %s') % ((base * bytes / unit), s) elif bytes < unit and gnu: return (format + '%s') % ((base * bytes / unit), s) if gnu: return (format + '%s') % ((base * bytes / unit), s) return (format + ' %s') % ((base * bytes / unit), s)
def function[naturalsize, parameter[value, binary, gnu, format]]: constant[Format a number of byteslike a human readable filesize (eg. 10 kB). By default, decimal suffixes (kB, MB) are used. Passing binary=true will use binary suffixes (KiB, MiB) are used and the base will be 2**10 instead of 10**3. If ``gnu`` is True, the binary argument is ignored and GNU-style (ls -sh style) prefixes are used (K, M) with the 2**10 definition. Non-gnu modes are compatible with jinja2's ``filesizeformat`` filter.] if name[gnu] begin[:] variable[suffix] assign[=] call[name[suffixes]][constant[gnu]] variable[base] assign[=] <ast.IfExp object at 0x7da18dc04d00> variable[bytes] assign[=] call[name[float], parameter[name[value]]] if <ast.BoolOp object at 0x7da18dc049a0> begin[:] return[constant[1 Byte]] for taget[tuple[[<ast.Name object at 0x7da20c7c8160>, <ast.Name object at 0x7da20c7cb490>]]] in starred[call[name[enumerate], parameter[name[suffix]]]] begin[:] variable[unit] assign[=] binary_operation[name[base] ** binary_operation[name[i] + constant[2]]] if <ast.BoolOp object at 0x7da20c7cb070> begin[:] return[binary_operation[binary_operation[name[format] + constant[ %s]] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.BinOp object at 0x7da20c7cbbb0>, <ast.Name object at 0x7da20c7c8790>]]]] if name[gnu] begin[:] return[binary_operation[binary_operation[name[format] + constant[%s]] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.BinOp object at 0x7da2044c3580>, <ast.Name object at 0x7da1b1df89a0>]]]] return[binary_operation[binary_operation[name[format] + constant[ %s]] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.BinOp object at 0x7da1b1df8b20>, <ast.Name object at 0x7da1b1df86a0>]]]]
keyword[def] identifier[naturalsize] ( identifier[value] , identifier[binary] = keyword[False] , identifier[gnu] = keyword[False] , identifier[format] = literal[string] ): literal[string] keyword[if] identifier[gnu] : identifier[suffix] = identifier[suffixes] [ literal[string] ] keyword[elif] identifier[binary] : identifier[suffix] = identifier[suffixes] [ literal[string] ] keyword[else] : identifier[suffix] = identifier[suffixes] [ literal[string] ] identifier[base] = literal[int] keyword[if] ( identifier[gnu] keyword[or] identifier[binary] ) keyword[else] literal[int] identifier[bytes] = identifier[float] ( identifier[value] ) keyword[if] identifier[bytes] == literal[int] keyword[and] keyword[not] identifier[gnu] : keyword[return] literal[string] keyword[elif] identifier[bytes] < identifier[base] keyword[and] keyword[not] identifier[gnu] : keyword[return] literal[string] % identifier[bytes] keyword[elif] identifier[bytes] < identifier[base] keyword[and] identifier[gnu] : keyword[return] literal[string] % identifier[bytes] keyword[for] identifier[i] , identifier[s] keyword[in] identifier[enumerate] ( identifier[suffix] ): identifier[unit] = identifier[base] **( identifier[i] + literal[int] ) keyword[if] identifier[bytes] < identifier[unit] keyword[and] keyword[not] identifier[gnu] : keyword[return] ( identifier[format] + literal[string] )%(( identifier[base] * identifier[bytes] / identifier[unit] ), identifier[s] ) keyword[elif] identifier[bytes] < identifier[unit] keyword[and] identifier[gnu] : keyword[return] ( identifier[format] + literal[string] )%(( identifier[base] * identifier[bytes] / identifier[unit] ), identifier[s] ) keyword[if] identifier[gnu] : keyword[return] ( identifier[format] + literal[string] )%(( identifier[base] * identifier[bytes] / identifier[unit] ), identifier[s] ) keyword[return] ( identifier[format] + literal[string] )%(( identifier[base] * identifier[bytes] / identifier[unit] ), identifier[s] )
def naturalsize(value, binary=False, gnu=False, format='%.1f'): """Format a number of byteslike a human readable filesize (eg. 10 kB). By default, decimal suffixes (kB, MB) are used. Passing binary=true will use binary suffixes (KiB, MiB) are used and the base will be 2**10 instead of 10**3. If ``gnu`` is True, the binary argument is ignored and GNU-style (ls -sh style) prefixes are used (K, M) with the 2**10 definition. Non-gnu modes are compatible with jinja2's ``filesizeformat`` filter.""" if gnu: suffix = suffixes['gnu'] # depends on [control=['if'], data=[]] elif binary: suffix = suffixes['binary'] # depends on [control=['if'], data=[]] else: suffix = suffixes['decimal'] base = 1024 if gnu or binary else 1000 bytes = float(value) if bytes == 1 and (not gnu): return '1 Byte' # depends on [control=['if'], data=[]] elif bytes < base and (not gnu): return '%d Bytes' % bytes # depends on [control=['if'], data=[]] elif bytes < base and gnu: return '%dB' % bytes # depends on [control=['if'], data=[]] for (i, s) in enumerate(suffix): unit = base ** (i + 2) if bytes < unit and (not gnu): return (format + ' %s') % (base * bytes / unit, s) # depends on [control=['if'], data=[]] elif bytes < unit and gnu: return (format + '%s') % (base * bytes / unit, s) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] if gnu: return (format + '%s') % (base * bytes / unit, s) # depends on [control=['if'], data=[]] return (format + ' %s') % (base * bytes / unit, s)
def state_max_repeat(self, value): """Parse repeatable parts.""" min_, max_, value = value value = [val for val in Traverser(value, self.groups)] if not min_ and max_: for val in value: if isinstance(val, required): min_ = 1 break for val in value * min_: yield val
def function[state_max_repeat, parameter[self, value]]: constant[Parse repeatable parts.] <ast.Tuple object at 0x7da18ede6da0> assign[=] name[value] variable[value] assign[=] <ast.ListComp object at 0x7da18ede40d0> if <ast.BoolOp object at 0x7da18ede7430> begin[:] for taget[name[val]] in starred[name[value]] begin[:] if call[name[isinstance], parameter[name[val], name[required]]] begin[:] variable[min_] assign[=] constant[1] break for taget[name[val]] in starred[binary_operation[name[value] * name[min_]]] begin[:] <ast.Yield object at 0x7da18ede5210>
keyword[def] identifier[state_max_repeat] ( identifier[self] , identifier[value] ): literal[string] identifier[min_] , identifier[max_] , identifier[value] = identifier[value] identifier[value] =[ identifier[val] keyword[for] identifier[val] keyword[in] identifier[Traverser] ( identifier[value] , identifier[self] . identifier[groups] )] keyword[if] keyword[not] identifier[min_] keyword[and] identifier[max_] : keyword[for] identifier[val] keyword[in] identifier[value] : keyword[if] identifier[isinstance] ( identifier[val] , identifier[required] ): identifier[min_] = literal[int] keyword[break] keyword[for] identifier[val] keyword[in] identifier[value] * identifier[min_] : keyword[yield] identifier[val]
def state_max_repeat(self, value): """Parse repeatable parts.""" (min_, max_, value) = value value = [val for val in Traverser(value, self.groups)] if not min_ and max_: for val in value: if isinstance(val, required): min_ = 1 break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['val']] # depends on [control=['if'], data=[]] for val in value * min_: yield val # depends on [control=['for'], data=['val']]
def reordd(iorder, ndim, array): """ Re-order the elements of a double precision array according to a given order vector. http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/reordd_c.html :param iorder: Order vector to be used to re-order array. :type iorder: Array of ints :param ndim: Dimension of array. :type ndim: int :param array: Array to be re-ordered. :type array: Array of floats :return: Re-ordered Array. :rtype: Array of floats """ iorder = stypes.toIntVector(iorder) ndim = ctypes.c_int(ndim) array = stypes.toDoubleVector(array) libspice.reordd_c(iorder, ndim, array) return stypes.cVectorToPython(array)
def function[reordd, parameter[iorder, ndim, array]]: constant[ Re-order the elements of a double precision array according to a given order vector. http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/reordd_c.html :param iorder: Order vector to be used to re-order array. :type iorder: Array of ints :param ndim: Dimension of array. :type ndim: int :param array: Array to be re-ordered. :type array: Array of floats :return: Re-ordered Array. :rtype: Array of floats ] variable[iorder] assign[=] call[name[stypes].toIntVector, parameter[name[iorder]]] variable[ndim] assign[=] call[name[ctypes].c_int, parameter[name[ndim]]] variable[array] assign[=] call[name[stypes].toDoubleVector, parameter[name[array]]] call[name[libspice].reordd_c, parameter[name[iorder], name[ndim], name[array]]] return[call[name[stypes].cVectorToPython, parameter[name[array]]]]
keyword[def] identifier[reordd] ( identifier[iorder] , identifier[ndim] , identifier[array] ): literal[string] identifier[iorder] = identifier[stypes] . identifier[toIntVector] ( identifier[iorder] ) identifier[ndim] = identifier[ctypes] . identifier[c_int] ( identifier[ndim] ) identifier[array] = identifier[stypes] . identifier[toDoubleVector] ( identifier[array] ) identifier[libspice] . identifier[reordd_c] ( identifier[iorder] , identifier[ndim] , identifier[array] ) keyword[return] identifier[stypes] . identifier[cVectorToPython] ( identifier[array] )
def reordd(iorder, ndim, array): """ Re-order the elements of a double precision array according to a given order vector. http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/reordd_c.html :param iorder: Order vector to be used to re-order array. :type iorder: Array of ints :param ndim: Dimension of array. :type ndim: int :param array: Array to be re-ordered. :type array: Array of floats :return: Re-ordered Array. :rtype: Array of floats """ iorder = stypes.toIntVector(iorder) ndim = ctypes.c_int(ndim) array = stypes.toDoubleVector(array) libspice.reordd_c(iorder, ndim, array) return stypes.cVectorToPython(array)
def create_manifest_from_space(self): """ Populate a manifest file generated from details from the cloud foundry space environment. """ space = predix.admin.cf.spaces.Space() summary = space.get_space_summary() for instance in summary['services']: service_type = instance['service_plan']['service']['label'] name = instance['name'] if service_type in self.supported: service = self.supported[service_type](name=name) service.add_to_manifest(self) elif service_type == 'us-weather-forecast': weather = predix.admin.weather.WeatherForecast(name=name) weather.add_to_manifest(self) else: logging.warning("Unsupported service type: %s" % service_type)
def function[create_manifest_from_space, parameter[self]]: constant[ Populate a manifest file generated from details from the cloud foundry space environment. ] variable[space] assign[=] call[name[predix].admin.cf.spaces.Space, parameter[]] variable[summary] assign[=] call[name[space].get_space_summary, parameter[]] for taget[name[instance]] in starred[call[name[summary]][constant[services]]] begin[:] variable[service_type] assign[=] call[call[call[name[instance]][constant[service_plan]]][constant[service]]][constant[label]] variable[name] assign[=] call[name[instance]][constant[name]] if compare[name[service_type] in name[self].supported] begin[:] variable[service] assign[=] call[call[name[self].supported][name[service_type]], parameter[]] call[name[service].add_to_manifest, parameter[name[self]]]
keyword[def] identifier[create_manifest_from_space] ( identifier[self] ): literal[string] identifier[space] = identifier[predix] . identifier[admin] . identifier[cf] . identifier[spaces] . identifier[Space] () identifier[summary] = identifier[space] . identifier[get_space_summary] () keyword[for] identifier[instance] keyword[in] identifier[summary] [ literal[string] ]: identifier[service_type] = identifier[instance] [ literal[string] ][ literal[string] ][ literal[string] ] identifier[name] = identifier[instance] [ literal[string] ] keyword[if] identifier[service_type] keyword[in] identifier[self] . identifier[supported] : identifier[service] = identifier[self] . identifier[supported] [ identifier[service_type] ]( identifier[name] = identifier[name] ) identifier[service] . identifier[add_to_manifest] ( identifier[self] ) keyword[elif] identifier[service_type] == literal[string] : identifier[weather] = identifier[predix] . identifier[admin] . identifier[weather] . identifier[WeatherForecast] ( identifier[name] = identifier[name] ) identifier[weather] . identifier[add_to_manifest] ( identifier[self] ) keyword[else] : identifier[logging] . identifier[warning] ( literal[string] % identifier[service_type] )
def create_manifest_from_space(self): """ Populate a manifest file generated from details from the cloud foundry space environment. """ space = predix.admin.cf.spaces.Space() summary = space.get_space_summary() for instance in summary['services']: service_type = instance['service_plan']['service']['label'] name = instance['name'] if service_type in self.supported: service = self.supported[service_type](name=name) service.add_to_manifest(self) # depends on [control=['if'], data=['service_type']] elif service_type == 'us-weather-forecast': weather = predix.admin.weather.WeatherForecast(name=name) weather.add_to_manifest(self) # depends on [control=['if'], data=[]] else: logging.warning('Unsupported service type: %s' % service_type) # depends on [control=['for'], data=['instance']]
def handle_stdin_request(self, timeout=0.1): """ Method to capture raw_input """ msg_rep = self.km.stdin_channel.get_msg(timeout=timeout) # in case any iopub came while we were waiting: self.handle_iopub() if self.session_id == msg_rep["parent_header"].get("session"): # wrap SIGINT handler real_handler = signal.getsignal(signal.SIGINT) def double_int(sig,frame): # call real handler (forwards sigint to kernel), # then raise local interrupt, stopping local raw_input real_handler(sig,frame) raise KeyboardInterrupt signal.signal(signal.SIGINT, double_int) try: raw_data = raw_input(msg_rep["content"]["prompt"]) except EOFError: # turn EOFError into EOF character raw_data = '\x04' except KeyboardInterrupt: sys.stdout.write('\n') return finally: # restore SIGINT handler signal.signal(signal.SIGINT, real_handler) # only send stdin reply if there *was not* another request # or execution finished while we were reading. if not (self.km.stdin_channel.msg_ready() or self.km.shell_channel.msg_ready()): self.km.stdin_channel.input(raw_data)
def function[handle_stdin_request, parameter[self, timeout]]: constant[ Method to capture raw_input ] variable[msg_rep] assign[=] call[name[self].km.stdin_channel.get_msg, parameter[]] call[name[self].handle_iopub, parameter[]] if compare[name[self].session_id equal[==] call[call[name[msg_rep]][constant[parent_header]].get, parameter[constant[session]]]] begin[:] variable[real_handler] assign[=] call[name[signal].getsignal, parameter[name[signal].SIGINT]] def function[double_int, parameter[sig, frame]]: call[name[real_handler], parameter[name[sig], name[frame]]] <ast.Raise object at 0x7da20e961c30> call[name[signal].signal, parameter[name[signal].SIGINT, name[double_int]]] <ast.Try object at 0x7da20e9625c0> if <ast.UnaryOp object at 0x7da1b26aee60> begin[:] call[name[self].km.stdin_channel.input, parameter[name[raw_data]]]
keyword[def] identifier[handle_stdin_request] ( identifier[self] , identifier[timeout] = literal[int] ): literal[string] identifier[msg_rep] = identifier[self] . identifier[km] . identifier[stdin_channel] . identifier[get_msg] ( identifier[timeout] = identifier[timeout] ) identifier[self] . identifier[handle_iopub] () keyword[if] identifier[self] . identifier[session_id] == identifier[msg_rep] [ literal[string] ]. identifier[get] ( literal[string] ): identifier[real_handler] = identifier[signal] . identifier[getsignal] ( identifier[signal] . identifier[SIGINT] ) keyword[def] identifier[double_int] ( identifier[sig] , identifier[frame] ): identifier[real_handler] ( identifier[sig] , identifier[frame] ) keyword[raise] identifier[KeyboardInterrupt] identifier[signal] . identifier[signal] ( identifier[signal] . identifier[SIGINT] , identifier[double_int] ) keyword[try] : identifier[raw_data] = identifier[raw_input] ( identifier[msg_rep] [ literal[string] ][ literal[string] ]) keyword[except] identifier[EOFError] : identifier[raw_data] = literal[string] keyword[except] identifier[KeyboardInterrupt] : identifier[sys] . identifier[stdout] . identifier[write] ( literal[string] ) keyword[return] keyword[finally] : identifier[signal] . identifier[signal] ( identifier[signal] . identifier[SIGINT] , identifier[real_handler] ) keyword[if] keyword[not] ( identifier[self] . identifier[km] . identifier[stdin_channel] . identifier[msg_ready] () keyword[or] identifier[self] . identifier[km] . identifier[shell_channel] . identifier[msg_ready] ()): identifier[self] . identifier[km] . identifier[stdin_channel] . identifier[input] ( identifier[raw_data] )
def handle_stdin_request(self, timeout=0.1): """ Method to capture raw_input """ msg_rep = self.km.stdin_channel.get_msg(timeout=timeout) # in case any iopub came while we were waiting: self.handle_iopub() if self.session_id == msg_rep['parent_header'].get('session'): # wrap SIGINT handler real_handler = signal.getsignal(signal.SIGINT) def double_int(sig, frame): # call real handler (forwards sigint to kernel), # then raise local interrupt, stopping local raw_input real_handler(sig, frame) raise KeyboardInterrupt signal.signal(signal.SIGINT, double_int) try: raw_data = raw_input(msg_rep['content']['prompt']) # depends on [control=['try'], data=[]] except EOFError: # turn EOFError into EOF character raw_data = '\x04' # depends on [control=['except'], data=[]] except KeyboardInterrupt: sys.stdout.write('\n') return # depends on [control=['except'], data=[]] finally: # restore SIGINT handler signal.signal(signal.SIGINT, real_handler) # only send stdin reply if there *was not* another request # or execution finished while we were reading. if not (self.km.stdin_channel.msg_ready() or self.km.shell_channel.msg_ready()): self.km.stdin_channel.input(raw_data) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
def _safe_attr(attr, camel_killer=False, replacement_char='x'): """Convert a key into something that is accessible as an attribute""" allowed = string.ascii_letters + string.digits + '_' attr = _safe_key(attr) if camel_killer: attr = _camel_killer(attr) attr = attr.replace(' ', '_') out = '' for character in attr: out += character if character in allowed else "_" out = out.strip("_") try: int(out[0]) except (ValueError, IndexError): pass else: out = '{0}{1}'.format(replacement_char, out) if out in kwlist: out = '{0}{1}'.format(replacement_char, out) return re.sub('_+', '_', out)
def function[_safe_attr, parameter[attr, camel_killer, replacement_char]]: constant[Convert a key into something that is accessible as an attribute] variable[allowed] assign[=] binary_operation[binary_operation[name[string].ascii_letters + name[string].digits] + constant[_]] variable[attr] assign[=] call[name[_safe_key], parameter[name[attr]]] if name[camel_killer] begin[:] variable[attr] assign[=] call[name[_camel_killer], parameter[name[attr]]] variable[attr] assign[=] call[name[attr].replace, parameter[constant[ ], constant[_]]] variable[out] assign[=] constant[] for taget[name[character]] in starred[name[attr]] begin[:] <ast.AugAssign object at 0x7da20e963880> variable[out] assign[=] call[name[out].strip, parameter[constant[_]]] <ast.Try object at 0x7da20e960b50> if compare[name[out] in name[kwlist]] begin[:] variable[out] assign[=] call[constant[{0}{1}].format, parameter[name[replacement_char], name[out]]] return[call[name[re].sub, parameter[constant[_+], constant[_], name[out]]]]
keyword[def] identifier[_safe_attr] ( identifier[attr] , identifier[camel_killer] = keyword[False] , identifier[replacement_char] = literal[string] ): literal[string] identifier[allowed] = identifier[string] . identifier[ascii_letters] + identifier[string] . identifier[digits] + literal[string] identifier[attr] = identifier[_safe_key] ( identifier[attr] ) keyword[if] identifier[camel_killer] : identifier[attr] = identifier[_camel_killer] ( identifier[attr] ) identifier[attr] = identifier[attr] . identifier[replace] ( literal[string] , literal[string] ) identifier[out] = literal[string] keyword[for] identifier[character] keyword[in] identifier[attr] : identifier[out] += identifier[character] keyword[if] identifier[character] keyword[in] identifier[allowed] keyword[else] literal[string] identifier[out] = identifier[out] . identifier[strip] ( literal[string] ) keyword[try] : identifier[int] ( identifier[out] [ literal[int] ]) keyword[except] ( identifier[ValueError] , identifier[IndexError] ): keyword[pass] keyword[else] : identifier[out] = literal[string] . identifier[format] ( identifier[replacement_char] , identifier[out] ) keyword[if] identifier[out] keyword[in] identifier[kwlist] : identifier[out] = literal[string] . identifier[format] ( identifier[replacement_char] , identifier[out] ) keyword[return] identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[out] )
def _safe_attr(attr, camel_killer=False, replacement_char='x'): """Convert a key into something that is accessible as an attribute""" allowed = string.ascii_letters + string.digits + '_' attr = _safe_key(attr) if camel_killer: attr = _camel_killer(attr) # depends on [control=['if'], data=[]] attr = attr.replace(' ', '_') out = '' for character in attr: out += character if character in allowed else '_' # depends on [control=['for'], data=['character']] out = out.strip('_') try: int(out[0]) # depends on [control=['try'], data=[]] except (ValueError, IndexError): pass # depends on [control=['except'], data=[]] else: out = '{0}{1}'.format(replacement_char, out) if out in kwlist: out = '{0}{1}'.format(replacement_char, out) # depends on [control=['if'], data=['out']] return re.sub('_+', '_', out)
def bounding_box(self, factor=10.0): """Tuple defining the default ``bounding_box`` limits, ``(x_low, x_high)``. .. math:: x_{\\textnormal{low}} = 0 x_{\\textnormal{high}} = \\log(\\lambda_{\\textnormal{max}} \\;\ (1 + \\textnormal{factor})) Parameters ---------- factor : float Used to calculate ``x_high``. """ w0 = self.lambda_max return (w0 * 0, np.log10(w0 + factor * w0))
def function[bounding_box, parameter[self, factor]]: constant[Tuple defining the default ``bounding_box`` limits, ``(x_low, x_high)``. .. math:: x_{\textnormal{low}} = 0 x_{\textnormal{high}} = \log(\lambda_{\textnormal{max}} \; (1 + \textnormal{factor})) Parameters ---------- factor : float Used to calculate ``x_high``. ] variable[w0] assign[=] name[self].lambda_max return[tuple[[<ast.BinOp object at 0x7da20c6c4eb0>, <ast.Call object at 0x7da20c6c65f0>]]]
keyword[def] identifier[bounding_box] ( identifier[self] , identifier[factor] = literal[int] ): literal[string] identifier[w0] = identifier[self] . identifier[lambda_max] keyword[return] ( identifier[w0] * literal[int] , identifier[np] . identifier[log10] ( identifier[w0] + identifier[factor] * identifier[w0] ))
def bounding_box(self, factor=10.0): """Tuple defining the default ``bounding_box`` limits, ``(x_low, x_high)``. .. math:: x_{\\textnormal{low}} = 0 x_{\\textnormal{high}} = \\log(\\lambda_{\\textnormal{max}} \\; (1 + \\textnormal{factor})) Parameters ---------- factor : float Used to calculate ``x_high``. """ w0 = self.lambda_max return (w0 * 0, np.log10(w0 + factor * w0))
def ask(self, answers=None): """ Ask the question, then ask any sub-questions. This returns a dict with the {value: answer} pairs for the current question plus all descendant questions. """ if answers is None: answers = {} _answers = {} if self.multiple: print((bold('Multiple answers are supported for this question. ' + 'Please enter a "." character to finish.'))) _answers[self.value] = [] answer = self._ask(answers) while answer is not None: _answers[self.value].append(answer) answer = self._ask(answers) else: _answers[self.value] = self._ask(answers) if isinstance(self.validator, list): for v in self.validator: _answers = dict(_answers, **v.hints()) else: _answers = dict(_answers, **self.validator.hints()) for q in self._questions: answers = dict(answers, **_answers) _answers = dict(_answers, **q.ask(answers)) return _answers
def function[ask, parameter[self, answers]]: constant[ Ask the question, then ask any sub-questions. This returns a dict with the {value: answer} pairs for the current question plus all descendant questions. ] if compare[name[answers] is constant[None]] begin[:] variable[answers] assign[=] dictionary[[], []] variable[_answers] assign[=] dictionary[[], []] if name[self].multiple begin[:] call[name[print], parameter[call[name[bold], parameter[binary_operation[constant[Multiple answers are supported for this question. ] + constant[Please enter a "." character to finish.]]]]]] call[name[_answers]][name[self].value] assign[=] list[[]] variable[answer] assign[=] call[name[self]._ask, parameter[name[answers]]] while compare[name[answer] is_not constant[None]] begin[:] call[call[name[_answers]][name[self].value].append, parameter[name[answer]]] variable[answer] assign[=] call[name[self]._ask, parameter[name[answers]]] if call[name[isinstance], parameter[name[self].validator, name[list]]] begin[:] for taget[name[v]] in starred[name[self].validator] begin[:] variable[_answers] assign[=] call[name[dict], parameter[name[_answers]]] for taget[name[q]] in starred[name[self]._questions] begin[:] variable[answers] assign[=] call[name[dict], parameter[name[answers]]] variable[_answers] assign[=] call[name[dict], parameter[name[_answers]]] return[name[_answers]]
keyword[def] identifier[ask] ( identifier[self] , identifier[answers] = keyword[None] ): literal[string] keyword[if] identifier[answers] keyword[is] keyword[None] : identifier[answers] ={} identifier[_answers] ={} keyword[if] identifier[self] . identifier[multiple] : identifier[print] (( identifier[bold] ( literal[string] + literal[string] ))) identifier[_answers] [ identifier[self] . identifier[value] ]=[] identifier[answer] = identifier[self] . identifier[_ask] ( identifier[answers] ) keyword[while] identifier[answer] keyword[is] keyword[not] keyword[None] : identifier[_answers] [ identifier[self] . identifier[value] ]. identifier[append] ( identifier[answer] ) identifier[answer] = identifier[self] . identifier[_ask] ( identifier[answers] ) keyword[else] : identifier[_answers] [ identifier[self] . identifier[value] ]= identifier[self] . identifier[_ask] ( identifier[answers] ) keyword[if] identifier[isinstance] ( identifier[self] . identifier[validator] , identifier[list] ): keyword[for] identifier[v] keyword[in] identifier[self] . identifier[validator] : identifier[_answers] = identifier[dict] ( identifier[_answers] ,** identifier[v] . identifier[hints] ()) keyword[else] : identifier[_answers] = identifier[dict] ( identifier[_answers] ,** identifier[self] . identifier[validator] . identifier[hints] ()) keyword[for] identifier[q] keyword[in] identifier[self] . identifier[_questions] : identifier[answers] = identifier[dict] ( identifier[answers] ,** identifier[_answers] ) identifier[_answers] = identifier[dict] ( identifier[_answers] ,** identifier[q] . identifier[ask] ( identifier[answers] )) keyword[return] identifier[_answers]
def ask(self, answers=None): """ Ask the question, then ask any sub-questions. This returns a dict with the {value: answer} pairs for the current question plus all descendant questions. """ if answers is None: answers = {} # depends on [control=['if'], data=['answers']] _answers = {} if self.multiple: print(bold('Multiple answers are supported for this question. ' + 'Please enter a "." character to finish.')) _answers[self.value] = [] answer = self._ask(answers) while answer is not None: _answers[self.value].append(answer) answer = self._ask(answers) # depends on [control=['while'], data=['answer']] # depends on [control=['if'], data=[]] else: _answers[self.value] = self._ask(answers) if isinstance(self.validator, list): for v in self.validator: _answers = dict(_answers, **v.hints()) # depends on [control=['for'], data=['v']] # depends on [control=['if'], data=[]] else: _answers = dict(_answers, **self.validator.hints()) for q in self._questions: answers = dict(answers, **_answers) _answers = dict(_answers, **q.ask(answers)) # depends on [control=['for'], data=['q']] return _answers
def user_courses(self): ''' Returns the list of courses this user is subscribed for, or owning, or tutoring. This leads to the fact that tutors and owners don't need course membership. ''' registered = self.courses.filter(active__exact=True).distinct() return (self.tutor_courses() | registered).distinct()
def function[user_courses, parameter[self]]: constant[ Returns the list of courses this user is subscribed for, or owning, or tutoring. This leads to the fact that tutors and owners don't need course membership. ] variable[registered] assign[=] call[call[name[self].courses.filter, parameter[]].distinct, parameter[]] return[call[binary_operation[call[name[self].tutor_courses, parameter[]] <ast.BitOr object at 0x7da2590d6aa0> name[registered]].distinct, parameter[]]]
keyword[def] identifier[user_courses] ( identifier[self] ): literal[string] identifier[registered] = identifier[self] . identifier[courses] . identifier[filter] ( identifier[active__exact] = keyword[True] ). identifier[distinct] () keyword[return] ( identifier[self] . identifier[tutor_courses] ()| identifier[registered] ). identifier[distinct] ()
def user_courses(self): """ Returns the list of courses this user is subscribed for, or owning, or tutoring. This leads to the fact that tutors and owners don't need course membership. """ registered = self.courses.filter(active__exact=True).distinct() return (self.tutor_courses() | registered).distinct()
def get_options(self, gradebook_id): """Get options for gradebook. Get options dictionary for a gradebook. Options include gradebook attributes. Args: gradebook_id (str): unique identifier for gradebook, i.e. ``2314`` Returns: An example return value is: .. code-block:: python { u'data': { u'accessLevel': u'class', u'archived': False, u'calc_on_approved_only': False, u'configured': None, u'courseName': u'', u'courseNumber': u'mitxdemosite', u'deriveOverallGrades': False, u'gradebookEwsEnabled': False, u'gradebookId': 1293808, u'gradebookName': u'Gradebook for mitxdemosite', u'gradebookReadOnly': False, u'gradebookVisibleToAdvisors': False, u'graders_change_approved': False, u'hideExcuseButtonInUI': False, u'homeworkBetaEnabled': False, u'membershipQualifier': u'/project/mitxdemosite', u'membershipSource': u'stellar', u'student_sees_actual_grades': True, u'student_sees_category_info': True, u'student_sees_comments': True, u'student_sees_cumulative_score': True, u'student_sees_histograms': True, u'student_sees_submissions': False, u'ta_approves': False, u'ta_change_approved': False, u'ta_configures': False, u'ta_edits': False, u'use_grade_weighting': False, u'usingAttendance': False, u'versionCompatible': 4, u'versionCompatibleString': u'General Availability' }, } """ end_point = 'gradebook/options/{gradebookId}'.format( gradebookId=gradebook_id or self.gradebook_id) options = self.get(end_point) return options['data']
def function[get_options, parameter[self, gradebook_id]]: constant[Get options for gradebook. Get options dictionary for a gradebook. Options include gradebook attributes. Args: gradebook_id (str): unique identifier for gradebook, i.e. ``2314`` Returns: An example return value is: .. code-block:: python { u'data': { u'accessLevel': u'class', u'archived': False, u'calc_on_approved_only': False, u'configured': None, u'courseName': u'', u'courseNumber': u'mitxdemosite', u'deriveOverallGrades': False, u'gradebookEwsEnabled': False, u'gradebookId': 1293808, u'gradebookName': u'Gradebook for mitxdemosite', u'gradebookReadOnly': False, u'gradebookVisibleToAdvisors': False, u'graders_change_approved': False, u'hideExcuseButtonInUI': False, u'homeworkBetaEnabled': False, u'membershipQualifier': u'/project/mitxdemosite', u'membershipSource': u'stellar', u'student_sees_actual_grades': True, u'student_sees_category_info': True, u'student_sees_comments': True, u'student_sees_cumulative_score': True, u'student_sees_histograms': True, u'student_sees_submissions': False, u'ta_approves': False, u'ta_change_approved': False, u'ta_configures': False, u'ta_edits': False, u'use_grade_weighting': False, u'usingAttendance': False, u'versionCompatible': 4, u'versionCompatibleString': u'General Availability' }, } ] variable[end_point] assign[=] call[constant[gradebook/options/{gradebookId}].format, parameter[]] variable[options] assign[=] call[name[self].get, parameter[name[end_point]]] return[call[name[options]][constant[data]]]
keyword[def] identifier[get_options] ( identifier[self] , identifier[gradebook_id] ): literal[string] identifier[end_point] = literal[string] . identifier[format] ( identifier[gradebookId] = identifier[gradebook_id] keyword[or] identifier[self] . identifier[gradebook_id] ) identifier[options] = identifier[self] . identifier[get] ( identifier[end_point] ) keyword[return] identifier[options] [ literal[string] ]
def get_options(self, gradebook_id): """Get options for gradebook. Get options dictionary for a gradebook. Options include gradebook attributes. Args: gradebook_id (str): unique identifier for gradebook, i.e. ``2314`` Returns: An example return value is: .. code-block:: python { u'data': { u'accessLevel': u'class', u'archived': False, u'calc_on_approved_only': False, u'configured': None, u'courseName': u'', u'courseNumber': u'mitxdemosite', u'deriveOverallGrades': False, u'gradebookEwsEnabled': False, u'gradebookId': 1293808, u'gradebookName': u'Gradebook for mitxdemosite', u'gradebookReadOnly': False, u'gradebookVisibleToAdvisors': False, u'graders_change_approved': False, u'hideExcuseButtonInUI': False, u'homeworkBetaEnabled': False, u'membershipQualifier': u'/project/mitxdemosite', u'membershipSource': u'stellar', u'student_sees_actual_grades': True, u'student_sees_category_info': True, u'student_sees_comments': True, u'student_sees_cumulative_score': True, u'student_sees_histograms': True, u'student_sees_submissions': False, u'ta_approves': False, u'ta_change_approved': False, u'ta_configures': False, u'ta_edits': False, u'use_grade_weighting': False, u'usingAttendance': False, u'versionCompatible': 4, u'versionCompatibleString': u'General Availability' }, } """ end_point = 'gradebook/options/{gradebookId}'.format(gradebookId=gradebook_id or self.gradebook_id) options = self.get(end_point) return options['data']
def systemInformationType8(): """SYSTEM INFORMATION TYPE 8 Section 9.1.42""" a = L2PseudoLength(l2pLength=0x01) b = TpPd(pd=0x6) c = MessageType(mesType=0x18) # 00011000 d = Si8RestOctets() packet = a / b / c / d return packet
def function[systemInformationType8, parameter[]]: constant[SYSTEM INFORMATION TYPE 8 Section 9.1.42] variable[a] assign[=] call[name[L2PseudoLength], parameter[]] variable[b] assign[=] call[name[TpPd], parameter[]] variable[c] assign[=] call[name[MessageType], parameter[]] variable[d] assign[=] call[name[Si8RestOctets], parameter[]] variable[packet] assign[=] binary_operation[binary_operation[binary_operation[name[a] / name[b]] / name[c]] / name[d]] return[name[packet]]
keyword[def] identifier[systemInformationType8] (): literal[string] identifier[a] = identifier[L2PseudoLength] ( identifier[l2pLength] = literal[int] ) identifier[b] = identifier[TpPd] ( identifier[pd] = literal[int] ) identifier[c] = identifier[MessageType] ( identifier[mesType] = literal[int] ) identifier[d] = identifier[Si8RestOctets] () identifier[packet] = identifier[a] / identifier[b] / identifier[c] / identifier[d] keyword[return] identifier[packet]
def systemInformationType8(): """SYSTEM INFORMATION TYPE 8 Section 9.1.42""" a = L2PseudoLength(l2pLength=1) b = TpPd(pd=6) c = MessageType(mesType=24) # 00011000 d = Si8RestOctets() packet = a / b / c / d return packet
def get_handler(): """Return the handler as a named tuple. The named tuple attributes are 'host', 'port', 'signum'. Return None when no handler has been registered. """ host, port, signum = _pdbhandler._registered() if signum: return Handler(host if host else DFLT_ADDRESS[0].encode(), port if port else DFLT_ADDRESS[1], signum)
def function[get_handler, parameter[]]: constant[Return the handler as a named tuple. The named tuple attributes are 'host', 'port', 'signum'. Return None when no handler has been registered. ] <ast.Tuple object at 0x7da1b0e4c220> assign[=] call[name[_pdbhandler]._registered, parameter[]] if name[signum] begin[:] return[call[name[Handler], parameter[<ast.IfExp object at 0x7da1b0e4fc10>, <ast.IfExp object at 0x7da1b0e4e440>, name[signum]]]]
keyword[def] identifier[get_handler] (): literal[string] identifier[host] , identifier[port] , identifier[signum] = identifier[_pdbhandler] . identifier[_registered] () keyword[if] identifier[signum] : keyword[return] identifier[Handler] ( identifier[host] keyword[if] identifier[host] keyword[else] identifier[DFLT_ADDRESS] [ literal[int] ]. identifier[encode] (), identifier[port] keyword[if] identifier[port] keyword[else] identifier[DFLT_ADDRESS] [ literal[int] ], identifier[signum] )
def get_handler(): """Return the handler as a named tuple. The named tuple attributes are 'host', 'port', 'signum'. Return None when no handler has been registered. """ (host, port, signum) = _pdbhandler._registered() if signum: return Handler(host if host else DFLT_ADDRESS[0].encode(), port if port else DFLT_ADDRESS[1], signum) # depends on [control=['if'], data=[]]
def fetch(self): """ Fetch a ExecutionContextInstance :returns: Fetched ExecutionContextInstance :rtype: twilio.rest.studio.v1.flow.execution.execution_context.ExecutionContextInstance """ params = values.of({}) payload = self._version.fetch( 'GET', self._uri, params=params, ) return ExecutionContextInstance( self._version, payload, flow_sid=self._solution['flow_sid'], execution_sid=self._solution['execution_sid'], )
def function[fetch, parameter[self]]: constant[ Fetch a ExecutionContextInstance :returns: Fetched ExecutionContextInstance :rtype: twilio.rest.studio.v1.flow.execution.execution_context.ExecutionContextInstance ] variable[params] assign[=] call[name[values].of, parameter[dictionary[[], []]]] variable[payload] assign[=] call[name[self]._version.fetch, parameter[constant[GET], name[self]._uri]] return[call[name[ExecutionContextInstance], parameter[name[self]._version, name[payload]]]]
keyword[def] identifier[fetch] ( identifier[self] ): literal[string] identifier[params] = identifier[values] . identifier[of] ({}) identifier[payload] = identifier[self] . identifier[_version] . identifier[fetch] ( literal[string] , identifier[self] . identifier[_uri] , identifier[params] = identifier[params] , ) keyword[return] identifier[ExecutionContextInstance] ( identifier[self] . identifier[_version] , identifier[payload] , identifier[flow_sid] = identifier[self] . identifier[_solution] [ literal[string] ], identifier[execution_sid] = identifier[self] . identifier[_solution] [ literal[string] ], )
def fetch(self): """ Fetch a ExecutionContextInstance :returns: Fetched ExecutionContextInstance :rtype: twilio.rest.studio.v1.flow.execution.execution_context.ExecutionContextInstance """ params = values.of({}) payload = self._version.fetch('GET', self._uri, params=params) return ExecutionContextInstance(self._version, payload, flow_sid=self._solution['flow_sid'], execution_sid=self._solution['execution_sid'])
def get_mysql_update_depth_in_branch(self, path): """ :returns: The sql needed to update the depth of all the nodes in a branch. """ vendor = self.node_cls.get_database_vendor('write') sql = ("UPDATE %s SET depth=" + sql_length("path", vendor=vendor) + "/%%s WHERE path LIKE %%s") % ( connection.ops.quote_name( get_result_class(self.node_cls)._meta.db_table), ) vals = [self.node_cls.steplen, path + '%'] return sql, vals
def function[get_mysql_update_depth_in_branch, parameter[self, path]]: constant[ :returns: The sql needed to update the depth of all the nodes in a branch. ] variable[vendor] assign[=] call[name[self].node_cls.get_database_vendor, parameter[constant[write]]] variable[sql] assign[=] binary_operation[binary_operation[binary_operation[constant[UPDATE %s SET depth=] + call[name[sql_length], parameter[constant[path]]]] + constant[/%%s WHERE path LIKE %%s]] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da204962f80>]]] variable[vals] assign[=] list[[<ast.Attribute object at 0x7da204960310>, <ast.BinOp object at 0x7da204961cf0>]] return[tuple[[<ast.Name object at 0x7da204961d20>, <ast.Name object at 0x7da2049614b0>]]]
keyword[def] identifier[get_mysql_update_depth_in_branch] ( identifier[self] , identifier[path] ): literal[string] identifier[vendor] = identifier[self] . identifier[node_cls] . identifier[get_database_vendor] ( literal[string] ) identifier[sql] =( literal[string] + identifier[sql_length] ( literal[string] , identifier[vendor] = identifier[vendor] )+ literal[string] )%( identifier[connection] . identifier[ops] . identifier[quote_name] ( identifier[get_result_class] ( identifier[self] . identifier[node_cls] ). identifier[_meta] . identifier[db_table] ),) identifier[vals] =[ identifier[self] . identifier[node_cls] . identifier[steplen] , identifier[path] + literal[string] ] keyword[return] identifier[sql] , identifier[vals]
def get_mysql_update_depth_in_branch(self, path): """ :returns: The sql needed to update the depth of all the nodes in a branch. """ vendor = self.node_cls.get_database_vendor('write') sql = ('UPDATE %s SET depth=' + sql_length('path', vendor=vendor) + '/%%s WHERE path LIKE %%s') % (connection.ops.quote_name(get_result_class(self.node_cls)._meta.db_table),) vals = [self.node_cls.steplen, path + '%'] return (sql, vals)
def allow_headers(self, domain, headers, secure=True): """ Allows ``domain`` to push data via the HTTP headers named in ``headers``. As with ``allow_domain``, ``domain`` may be either a full domain name or a wildcard. Again, use of wildcards is discouraged for security reasons. The value for ``headers`` should be a list of header names. To disable Flash's requirement of security matching (e.g., retrieving a policy via HTTPS will require that SWFs also be retrieved via HTTPS), pass ``secure=False``. Due to security concerns, it is strongly recommended that you not disable this. """ if self.site_control == SITE_CONTROL_NONE: raise TypeError( METAPOLICY_ERROR.format("allow headers from a domain") ) self.header_domains[domain] = {'headers': headers, 'secure': secure}
def function[allow_headers, parameter[self, domain, headers, secure]]: constant[ Allows ``domain`` to push data via the HTTP headers named in ``headers``. As with ``allow_domain``, ``domain`` may be either a full domain name or a wildcard. Again, use of wildcards is discouraged for security reasons. The value for ``headers`` should be a list of header names. To disable Flash's requirement of security matching (e.g., retrieving a policy via HTTPS will require that SWFs also be retrieved via HTTPS), pass ``secure=False``. Due to security concerns, it is strongly recommended that you not disable this. ] if compare[name[self].site_control equal[==] name[SITE_CONTROL_NONE]] begin[:] <ast.Raise object at 0x7da18c4cf340> call[name[self].header_domains][name[domain]] assign[=] dictionary[[<ast.Constant object at 0x7da18fe921a0>, <ast.Constant object at 0x7da18fe93580>], [<ast.Name object at 0x7da18fe924d0>, <ast.Name object at 0x7da18fe93c40>]]
keyword[def] identifier[allow_headers] ( identifier[self] , identifier[domain] , identifier[headers] , identifier[secure] = keyword[True] ): literal[string] keyword[if] identifier[self] . identifier[site_control] == identifier[SITE_CONTROL_NONE] : keyword[raise] identifier[TypeError] ( identifier[METAPOLICY_ERROR] . identifier[format] ( literal[string] ) ) identifier[self] . identifier[header_domains] [ identifier[domain] ]={ literal[string] : identifier[headers] , literal[string] : identifier[secure] }
def allow_headers(self, domain, headers, secure=True): """ Allows ``domain`` to push data via the HTTP headers named in ``headers``. As with ``allow_domain``, ``domain`` may be either a full domain name or a wildcard. Again, use of wildcards is discouraged for security reasons. The value for ``headers`` should be a list of header names. To disable Flash's requirement of security matching (e.g., retrieving a policy via HTTPS will require that SWFs also be retrieved via HTTPS), pass ``secure=False``. Due to security concerns, it is strongly recommended that you not disable this. """ if self.site_control == SITE_CONTROL_NONE: raise TypeError(METAPOLICY_ERROR.format('allow headers from a domain')) # depends on [control=['if'], data=[]] self.header_domains[domain] = {'headers': headers, 'secure': secure}
def index(self): """main page rendering """ self._check_auth(must_admin=False) is_admin = self._check_admin() sess = cherrypy.session user = sess.get(SESSION_KEY, None) if self.auth_mode == 'none': user_attrs = None else: user_attrs = self._get_user(user) attrs_list = self.attributes.get_search_attributes() return self.temp['index.tmpl'].render( is_admin=is_admin, attrs_list=attrs_list, searchresult=user_attrs, notifications=self._empty_notification(), )
def function[index, parameter[self]]: constant[main page rendering ] call[name[self]._check_auth, parameter[]] variable[is_admin] assign[=] call[name[self]._check_admin, parameter[]] variable[sess] assign[=] name[cherrypy].session variable[user] assign[=] call[name[sess].get, parameter[name[SESSION_KEY], constant[None]]] if compare[name[self].auth_mode equal[==] constant[none]] begin[:] variable[user_attrs] assign[=] constant[None] variable[attrs_list] assign[=] call[name[self].attributes.get_search_attributes, parameter[]] return[call[call[name[self].temp][constant[index.tmpl]].render, parameter[]]]
keyword[def] identifier[index] ( identifier[self] ): literal[string] identifier[self] . identifier[_check_auth] ( identifier[must_admin] = keyword[False] ) identifier[is_admin] = identifier[self] . identifier[_check_admin] () identifier[sess] = identifier[cherrypy] . identifier[session] identifier[user] = identifier[sess] . identifier[get] ( identifier[SESSION_KEY] , keyword[None] ) keyword[if] identifier[self] . identifier[auth_mode] == literal[string] : identifier[user_attrs] = keyword[None] keyword[else] : identifier[user_attrs] = identifier[self] . identifier[_get_user] ( identifier[user] ) identifier[attrs_list] = identifier[self] . identifier[attributes] . identifier[get_search_attributes] () keyword[return] identifier[self] . identifier[temp] [ literal[string] ]. identifier[render] ( identifier[is_admin] = identifier[is_admin] , identifier[attrs_list] = identifier[attrs_list] , identifier[searchresult] = identifier[user_attrs] , identifier[notifications] = identifier[self] . identifier[_empty_notification] (), )
def index(self): """main page rendering """ self._check_auth(must_admin=False) is_admin = self._check_admin() sess = cherrypy.session user = sess.get(SESSION_KEY, None) if self.auth_mode == 'none': user_attrs = None # depends on [control=['if'], data=[]] else: user_attrs = self._get_user(user) attrs_list = self.attributes.get_search_attributes() return self.temp['index.tmpl'].render(is_admin=is_admin, attrs_list=attrs_list, searchresult=user_attrs, notifications=self._empty_notification())
def nightmode(self, group=None): """ Enable nightmode (very dim white light). The command is sent only once, as multiple commands would blink lights rapidly. There is no way to automatically detect whether transmitting the command succeeded or not. This does not work with wifi gateway v3. Contrary to limitlessled documentation, this works with RGBW bulbs. """ self.off(group) if group is None or group == 0: if self.has_rgbw: self._send_command(self.RGBW_COMMANDS["all_nightmode"]) if self.has_white: self._send_command(self.WHITE_COMMANDS["all_nightmode"]) else: self._send_to_group(group, per_group=True, rgbw_cmd=self.RGBW_GROUP_X_NIGHTMODE, white_cmd=self.WHITE_GROUP_X_NIGHTMODE, send_on=False, retries=1)
def function[nightmode, parameter[self, group]]: constant[ Enable nightmode (very dim white light). The command is sent only once, as multiple commands would blink lights rapidly. There is no way to automatically detect whether transmitting the command succeeded or not. This does not work with wifi gateway v3. Contrary to limitlessled documentation, this works with RGBW bulbs. ] call[name[self].off, parameter[name[group]]] if <ast.BoolOp object at 0x7da204621780> begin[:] if name[self].has_rgbw begin[:] call[name[self]._send_command, parameter[call[name[self].RGBW_COMMANDS][constant[all_nightmode]]]] if name[self].has_white begin[:] call[name[self]._send_command, parameter[call[name[self].WHITE_COMMANDS][constant[all_nightmode]]]]
keyword[def] identifier[nightmode] ( identifier[self] , identifier[group] = keyword[None] ): literal[string] identifier[self] . identifier[off] ( identifier[group] ) keyword[if] identifier[group] keyword[is] keyword[None] keyword[or] identifier[group] == literal[int] : keyword[if] identifier[self] . identifier[has_rgbw] : identifier[self] . identifier[_send_command] ( identifier[self] . identifier[RGBW_COMMANDS] [ literal[string] ]) keyword[if] identifier[self] . identifier[has_white] : identifier[self] . identifier[_send_command] ( identifier[self] . identifier[WHITE_COMMANDS] [ literal[string] ]) keyword[else] : identifier[self] . identifier[_send_to_group] ( identifier[group] , identifier[per_group] = keyword[True] , identifier[rgbw_cmd] = identifier[self] . identifier[RGBW_GROUP_X_NIGHTMODE] , identifier[white_cmd] = identifier[self] . identifier[WHITE_GROUP_X_NIGHTMODE] , identifier[send_on] = keyword[False] , identifier[retries] = literal[int] )
def nightmode(self, group=None): """ Enable nightmode (very dim white light). The command is sent only once, as multiple commands would blink lights rapidly. There is no way to automatically detect whether transmitting the command succeeded or not. This does not work with wifi gateway v3. Contrary to limitlessled documentation, this works with RGBW bulbs. """ self.off(group) if group is None or group == 0: if self.has_rgbw: self._send_command(self.RGBW_COMMANDS['all_nightmode']) # depends on [control=['if'], data=[]] if self.has_white: self._send_command(self.WHITE_COMMANDS['all_nightmode']) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: self._send_to_group(group, per_group=True, rgbw_cmd=self.RGBW_GROUP_X_NIGHTMODE, white_cmd=self.WHITE_GROUP_X_NIGHTMODE, send_on=False, retries=1)
async def prefetch(self, query, *subqueries): """Asynchronous version of the `prefetch()` from peewee. :return: Query that has already cached data for subqueries """ query = self._swap_database(query) subqueries = map(self._swap_database, subqueries) return (await prefetch(query, *subqueries))
<ast.AsyncFunctionDef object at 0x7da20c6aa170>
keyword[async] keyword[def] identifier[prefetch] ( identifier[self] , identifier[query] ,* identifier[subqueries] ): literal[string] identifier[query] = identifier[self] . identifier[_swap_database] ( identifier[query] ) identifier[subqueries] = identifier[map] ( identifier[self] . identifier[_swap_database] , identifier[subqueries] ) keyword[return] ( keyword[await] identifier[prefetch] ( identifier[query] ,* identifier[subqueries] ))
async def prefetch(self, query, *subqueries): """Asynchronous version of the `prefetch()` from peewee. :return: Query that has already cached data for subqueries """ query = self._swap_database(query) subqueries = map(self._swap_database, subqueries) return await prefetch(query, *subqueries)
def _auto_commit(self, by_count=False): """Check if we should start a new commit operation and commit""" # Check if we are even supposed to do any auto-committing if (self._stopping or self._shuttingdown or (not self._start_d) or (self._last_processed_offset is None) or (not self.consumer_group) or (by_count and not self.auto_commit_every_n)): return # If we're auto_committing because the timer expired, or by count and # we don't have a record of our last_committed_offset, or we've # processed enough messages since our last commit, then try to commit if (not by_count or self._last_committed_offset is None or (self._last_processed_offset - self._last_committed_offset ) >= self.auto_commit_every_n): if not self._commit_ds: commit_d = self.commit() commit_d.addErrback(self._handle_auto_commit_error) else: # We're waiting on the last commit to complete, so add a # callback to be called when the current request completes d = Deferred() d.addCallback(self._retry_auto_commit, by_count) self._commit_ds.append(d)
def function[_auto_commit, parameter[self, by_count]]: constant[Check if we should start a new commit operation and commit] if <ast.BoolOp object at 0x7da1b04dbbb0> begin[:] return[None] if <ast.BoolOp object at 0x7da1b04d0940> begin[:] if <ast.UnaryOp object at 0x7da1b04d3160> begin[:] variable[commit_d] assign[=] call[name[self].commit, parameter[]] call[name[commit_d].addErrback, parameter[name[self]._handle_auto_commit_error]]
keyword[def] identifier[_auto_commit] ( identifier[self] , identifier[by_count] = keyword[False] ): literal[string] keyword[if] ( identifier[self] . identifier[_stopping] keyword[or] identifier[self] . identifier[_shuttingdown] keyword[or] ( keyword[not] identifier[self] . identifier[_start_d] ) keyword[or] ( identifier[self] . identifier[_last_processed_offset] keyword[is] keyword[None] ) keyword[or] ( keyword[not] identifier[self] . identifier[consumer_group] ) keyword[or] ( identifier[by_count] keyword[and] keyword[not] identifier[self] . identifier[auto_commit_every_n] )): keyword[return] keyword[if] ( keyword[not] identifier[by_count] keyword[or] identifier[self] . identifier[_last_committed_offset] keyword[is] keyword[None] keyword[or] ( identifier[self] . identifier[_last_processed_offset] - identifier[self] . identifier[_last_committed_offset] )>= identifier[self] . identifier[auto_commit_every_n] ): keyword[if] keyword[not] identifier[self] . identifier[_commit_ds] : identifier[commit_d] = identifier[self] . identifier[commit] () identifier[commit_d] . identifier[addErrback] ( identifier[self] . identifier[_handle_auto_commit_error] ) keyword[else] : identifier[d] = identifier[Deferred] () identifier[d] . identifier[addCallback] ( identifier[self] . identifier[_retry_auto_commit] , identifier[by_count] ) identifier[self] . identifier[_commit_ds] . identifier[append] ( identifier[d] )
def _auto_commit(self, by_count=False): """Check if we should start a new commit operation and commit""" # Check if we are even supposed to do any auto-committing if self._stopping or self._shuttingdown or (not self._start_d) or (self._last_processed_offset is None) or (not self.consumer_group) or (by_count and (not self.auto_commit_every_n)): return # depends on [control=['if'], data=[]] # If we're auto_committing because the timer expired, or by count and # we don't have a record of our last_committed_offset, or we've # processed enough messages since our last commit, then try to commit if not by_count or self._last_committed_offset is None or self._last_processed_offset - self._last_committed_offset >= self.auto_commit_every_n: if not self._commit_ds: commit_d = self.commit() commit_d.addErrback(self._handle_auto_commit_error) # depends on [control=['if'], data=[]] else: # We're waiting on the last commit to complete, so add a # callback to be called when the current request completes d = Deferred() d.addCallback(self._retry_auto_commit, by_count) self._commit_ds.append(d) # depends on [control=['if'], data=[]]
def output_json(self, file_name='/tmp/ND.json'): """ Arguments: file_name(str : '/tmp/ND.json'): The file name to store the json to Returns: None """ complete_example = ( self.dataset, self.project, self.channels, self.metadata) data = json.loads(self.nd_json(*complete_example)) # self.verify_json(data) self.verify_path(data, VERIFY_BY_SLICE) f = open(file_name, 'w') f.write(str(data)) f.close()
def function[output_json, parameter[self, file_name]]: constant[ Arguments: file_name(str : '/tmp/ND.json'): The file name to store the json to Returns: None ] variable[complete_example] assign[=] tuple[[<ast.Attribute object at 0x7da1b01d7e50>, <ast.Attribute object at 0x7da1b01d59f0>, <ast.Attribute object at 0x7da1b01d59c0>, <ast.Attribute object at 0x7da1b01d4d90>]] variable[data] assign[=] call[name[json].loads, parameter[call[name[self].nd_json, parameter[<ast.Starred object at 0x7da1b01d6f20>]]]] call[name[self].verify_path, parameter[name[data], name[VERIFY_BY_SLICE]]] variable[f] assign[=] call[name[open], parameter[name[file_name], constant[w]]] call[name[f].write, parameter[call[name[str], parameter[name[data]]]]] call[name[f].close, parameter[]]
keyword[def] identifier[output_json] ( identifier[self] , identifier[file_name] = literal[string] ): literal[string] identifier[complete_example] =( identifier[self] . identifier[dataset] , identifier[self] . identifier[project] , identifier[self] . identifier[channels] , identifier[self] . identifier[metadata] ) identifier[data] = identifier[json] . identifier[loads] ( identifier[self] . identifier[nd_json] (* identifier[complete_example] )) identifier[self] . identifier[verify_path] ( identifier[data] , identifier[VERIFY_BY_SLICE] ) identifier[f] = identifier[open] ( identifier[file_name] , literal[string] ) identifier[f] . identifier[write] ( identifier[str] ( identifier[data] )) identifier[f] . identifier[close] ()
def output_json(self, file_name='/tmp/ND.json'): """ Arguments: file_name(str : '/tmp/ND.json'): The file name to store the json to Returns: None """ complete_example = (self.dataset, self.project, self.channels, self.metadata) data = json.loads(self.nd_json(*complete_example)) # self.verify_json(data) self.verify_path(data, VERIFY_BY_SLICE) f = open(file_name, 'w') f.write(str(data)) f.close()
def play_env_problem_randomly(env_problem, num_steps): """Plays the env problem by randomly sampling actions for `num_steps`.""" # Reset all environments. env_problem.reset() # Play all environments, sampling random actions each time. for _ in range(num_steps): # Sample batch_size actions from the action space and stack them. actions = np.stack([env_problem.action_space.sample() for _ in range( env_problem.batch_size)]) # Execute actions, observations are stored in `env_problem`. _, _, dones, _ = env_problem.step(actions) # Get the indices where we are done and reset those. env_problem.reset(indices=done_indices(dones))
def function[play_env_problem_randomly, parameter[env_problem, num_steps]]: constant[Plays the env problem by randomly sampling actions for `num_steps`.] call[name[env_problem].reset, parameter[]] for taget[name[_]] in starred[call[name[range], parameter[name[num_steps]]]] begin[:] variable[actions] assign[=] call[name[np].stack, parameter[<ast.ListComp object at 0x7da1b1e16650>]] <ast.Tuple object at 0x7da1b1e17b20> assign[=] call[name[env_problem].step, parameter[name[actions]]] call[name[env_problem].reset, parameter[]]
keyword[def] identifier[play_env_problem_randomly] ( identifier[env_problem] , identifier[num_steps] ): literal[string] identifier[env_problem] . identifier[reset] () keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[num_steps] ): identifier[actions] = identifier[np] . identifier[stack] ([ identifier[env_problem] . identifier[action_space] . identifier[sample] () keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[env_problem] . identifier[batch_size] )]) identifier[_] , identifier[_] , identifier[dones] , identifier[_] = identifier[env_problem] . identifier[step] ( identifier[actions] ) identifier[env_problem] . identifier[reset] ( identifier[indices] = identifier[done_indices] ( identifier[dones] ))
def play_env_problem_randomly(env_problem, num_steps): """Plays the env problem by randomly sampling actions for `num_steps`.""" # Reset all environments. env_problem.reset() # Play all environments, sampling random actions each time. for _ in range(num_steps): # Sample batch_size actions from the action space and stack them. actions = np.stack([env_problem.action_space.sample() for _ in range(env_problem.batch_size)]) # Execute actions, observations are stored in `env_problem`. (_, _, dones, _) = env_problem.step(actions) # Get the indices where we are done and reset those. env_problem.reset(indices=done_indices(dones)) # depends on [control=['for'], data=['_']]
def _get_refreshed_check_result(self, check_id): """ Given the ``check_id``, return the dict of Trusted Advisor check results. This handles refreshing the Trusted Advisor check, if desired, according to ``self.refresh_mode`` and ``self.refresh_timeout``. :param check_id: the Trusted Advisor check ID :type check_id: str :returns: dict check result. The return value of :py:meth:`Support.Client.describe_trusted_advisor_check_result` :rtype: dict """ # handle a refresh_mode of None right off the bat if self.refresh_mode is None: logger.info("Not refreshing Trusted Advisor check (refresh mode " "is None)") return self._get_check_result(check_id)[0] logger.debug("Handling refresh of check: %s", check_id) # if we want to refresh, step 1 is to see if we can yet... if not self._can_refresh_check(check_id): return self._get_check_result(check_id)[0] # either it's not too soon to refresh, or we have no idea... if isinstance(self.refresh_mode, type(1)): # mode is an int, check the last refresh time and compare checks, check_datetime = self._get_check_result(check_id) logger.debug('ta_refresh_mode older; check last refresh: %s; ' 'threshold=%d seconds', check_datetime, self.refresh_mode) if check_datetime >= datetime.now(utc) - timedelta( seconds=self.refresh_mode): logger.warning('Trusted Advisor check %s last refresh time ' 'of %s is newer than refresh threshold of %d ' 'seconds.', check_id, check_datetime, self.refresh_mode) return self._get_check_result(check_id)[0] # do the refresh logger.info("Refreshing Trusted Advisor check: %s", check_id) self.conn.refresh_trusted_advisor_check(checkId=check_id) # if mode isn't trigger, wait for refresh up to timeout if self.refresh_mode == 'trigger': result = self._get_check_result(check_id)[0] else: result = self._poll_for_refresh(check_id) return result
def function[_get_refreshed_check_result, parameter[self, check_id]]: constant[ Given the ``check_id``, return the dict of Trusted Advisor check results. This handles refreshing the Trusted Advisor check, if desired, according to ``self.refresh_mode`` and ``self.refresh_timeout``. :param check_id: the Trusted Advisor check ID :type check_id: str :returns: dict check result. The return value of :py:meth:`Support.Client.describe_trusted_advisor_check_result` :rtype: dict ] if compare[name[self].refresh_mode is constant[None]] begin[:] call[name[logger].info, parameter[constant[Not refreshing Trusted Advisor check (refresh mode is None)]]] return[call[call[name[self]._get_check_result, parameter[name[check_id]]]][constant[0]]] call[name[logger].debug, parameter[constant[Handling refresh of check: %s], name[check_id]]] if <ast.UnaryOp object at 0x7da18f811450> begin[:] return[call[call[name[self]._get_check_result, parameter[name[check_id]]]][constant[0]]] if call[name[isinstance], parameter[name[self].refresh_mode, call[name[type], parameter[constant[1]]]]] begin[:] <ast.Tuple object at 0x7da18f810a60> assign[=] call[name[self]._get_check_result, parameter[name[check_id]]] call[name[logger].debug, parameter[constant[ta_refresh_mode older; check last refresh: %s; threshold=%d seconds], name[check_datetime], name[self].refresh_mode]] if compare[name[check_datetime] greater_or_equal[>=] binary_operation[call[name[datetime].now, parameter[name[utc]]] - call[name[timedelta], parameter[]]]] begin[:] call[name[logger].warning, parameter[constant[Trusted Advisor check %s last refresh time of %s is newer than refresh threshold of %d seconds.], name[check_id], name[check_datetime], name[self].refresh_mode]] return[call[call[name[self]._get_check_result, parameter[name[check_id]]]][constant[0]]] call[name[logger].info, parameter[constant[Refreshing Trusted Advisor check: %s], name[check_id]]] call[name[self].conn.refresh_trusted_advisor_check, parameter[]] if compare[name[self].refresh_mode equal[==] constant[trigger]] begin[:] variable[result] assign[=] call[call[name[self]._get_check_result, parameter[name[check_id]]]][constant[0]] return[name[result]]
keyword[def] identifier[_get_refreshed_check_result] ( identifier[self] , identifier[check_id] ): literal[string] keyword[if] identifier[self] . identifier[refresh_mode] keyword[is] keyword[None] : identifier[logger] . identifier[info] ( literal[string] literal[string] ) keyword[return] identifier[self] . identifier[_get_check_result] ( identifier[check_id] )[ literal[int] ] identifier[logger] . identifier[debug] ( literal[string] , identifier[check_id] ) keyword[if] keyword[not] identifier[self] . identifier[_can_refresh_check] ( identifier[check_id] ): keyword[return] identifier[self] . identifier[_get_check_result] ( identifier[check_id] )[ literal[int] ] keyword[if] identifier[isinstance] ( identifier[self] . identifier[refresh_mode] , identifier[type] ( literal[int] )): identifier[checks] , identifier[check_datetime] = identifier[self] . identifier[_get_check_result] ( identifier[check_id] ) identifier[logger] . identifier[debug] ( literal[string] literal[string] , identifier[check_datetime] , identifier[self] . identifier[refresh_mode] ) keyword[if] identifier[check_datetime] >= identifier[datetime] . identifier[now] ( identifier[utc] )- identifier[timedelta] ( identifier[seconds] = identifier[self] . identifier[refresh_mode] ): identifier[logger] . identifier[warning] ( literal[string] literal[string] literal[string] , identifier[check_id] , identifier[check_datetime] , identifier[self] . identifier[refresh_mode] ) keyword[return] identifier[self] . identifier[_get_check_result] ( identifier[check_id] )[ literal[int] ] identifier[logger] . identifier[info] ( literal[string] , identifier[check_id] ) identifier[self] . identifier[conn] . identifier[refresh_trusted_advisor_check] ( identifier[checkId] = identifier[check_id] ) keyword[if] identifier[self] . identifier[refresh_mode] == literal[string] : identifier[result] = identifier[self] . identifier[_get_check_result] ( identifier[check_id] )[ literal[int] ] keyword[else] : identifier[result] = identifier[self] . identifier[_poll_for_refresh] ( identifier[check_id] ) keyword[return] identifier[result]
def _get_refreshed_check_result(self, check_id): """ Given the ``check_id``, return the dict of Trusted Advisor check results. This handles refreshing the Trusted Advisor check, if desired, according to ``self.refresh_mode`` and ``self.refresh_timeout``. :param check_id: the Trusted Advisor check ID :type check_id: str :returns: dict check result. The return value of :py:meth:`Support.Client.describe_trusted_advisor_check_result` :rtype: dict """ # handle a refresh_mode of None right off the bat if self.refresh_mode is None: logger.info('Not refreshing Trusted Advisor check (refresh mode is None)') return self._get_check_result(check_id)[0] # depends on [control=['if'], data=[]] logger.debug('Handling refresh of check: %s', check_id) # if we want to refresh, step 1 is to see if we can yet... if not self._can_refresh_check(check_id): return self._get_check_result(check_id)[0] # depends on [control=['if'], data=[]] # either it's not too soon to refresh, or we have no idea... if isinstance(self.refresh_mode, type(1)): # mode is an int, check the last refresh time and compare (checks, check_datetime) = self._get_check_result(check_id) logger.debug('ta_refresh_mode older; check last refresh: %s; threshold=%d seconds', check_datetime, self.refresh_mode) if check_datetime >= datetime.now(utc) - timedelta(seconds=self.refresh_mode): logger.warning('Trusted Advisor check %s last refresh time of %s is newer than refresh threshold of %d seconds.', check_id, check_datetime, self.refresh_mode) return self._get_check_result(check_id)[0] # depends on [control=['if'], data=['check_datetime']] # depends on [control=['if'], data=[]] # do the refresh logger.info('Refreshing Trusted Advisor check: %s', check_id) self.conn.refresh_trusted_advisor_check(checkId=check_id) # if mode isn't trigger, wait for refresh up to timeout if self.refresh_mode == 'trigger': result = self._get_check_result(check_id)[0] # depends on [control=['if'], data=[]] else: result = self._poll_for_refresh(check_id) return result
def calcHairpin(seq, mv_conc=50.0, dv_conc=0.0, dntp_conc=0.8, dna_conc=50.0, temp_c=37, max_loop=30): ''' Calculate the hairpin formation thermodynamics of a DNA sequence. **Note that the maximum length of `seq` is 60 bp.** This is a cap suggested by the Primer3 team as the longest reasonable sequence length for which a two-state NN model produces reliable results (see primer3/src/libnano/thal.h:50). Args: seq (str): DNA sequence to analyze for hairpin formation mv_conc (float/int, optional): Monovalent cation conc. (mM) dv_conc (float/int, optional): Divalent cation conc. (mM) dntp_conc (float/int, optional): dNTP conc. (mM) dna_conc (float/int, optional): DNA conc. (nM) temp_c (int, optional): Simulation temperature for dG (Celsius) max_loop(int, optional): Maximum size of loops in the structure Returns: A `ThermoResult` object with thermodynamic characteristics of the hairpin formation. Raises: ``RuntimeError`` ''' _setThermoArgs(**locals()) return _THERMO_ANALYSIS.calcHairpin(seq).checkExc()
def function[calcHairpin, parameter[seq, mv_conc, dv_conc, dntp_conc, dna_conc, temp_c, max_loop]]: constant[ Calculate the hairpin formation thermodynamics of a DNA sequence. **Note that the maximum length of `seq` is 60 bp.** This is a cap suggested by the Primer3 team as the longest reasonable sequence length for which a two-state NN model produces reliable results (see primer3/src/libnano/thal.h:50). Args: seq (str): DNA sequence to analyze for hairpin formation mv_conc (float/int, optional): Monovalent cation conc. (mM) dv_conc (float/int, optional): Divalent cation conc. (mM) dntp_conc (float/int, optional): dNTP conc. (mM) dna_conc (float/int, optional): DNA conc. (nM) temp_c (int, optional): Simulation temperature for dG (Celsius) max_loop(int, optional): Maximum size of loops in the structure Returns: A `ThermoResult` object with thermodynamic characteristics of the hairpin formation. Raises: ``RuntimeError`` ] call[name[_setThermoArgs], parameter[]] return[call[call[name[_THERMO_ANALYSIS].calcHairpin, parameter[name[seq]]].checkExc, parameter[]]]
keyword[def] identifier[calcHairpin] ( identifier[seq] , identifier[mv_conc] = literal[int] , identifier[dv_conc] = literal[int] , identifier[dntp_conc] = literal[int] , identifier[dna_conc] = literal[int] , identifier[temp_c] = literal[int] , identifier[max_loop] = literal[int] ): literal[string] identifier[_setThermoArgs] (** identifier[locals] ()) keyword[return] identifier[_THERMO_ANALYSIS] . identifier[calcHairpin] ( identifier[seq] ). identifier[checkExc] ()
def calcHairpin(seq, mv_conc=50.0, dv_conc=0.0, dntp_conc=0.8, dna_conc=50.0, temp_c=37, max_loop=30): """ Calculate the hairpin formation thermodynamics of a DNA sequence. **Note that the maximum length of `seq` is 60 bp.** This is a cap suggested by the Primer3 team as the longest reasonable sequence length for which a two-state NN model produces reliable results (see primer3/src/libnano/thal.h:50). Args: seq (str): DNA sequence to analyze for hairpin formation mv_conc (float/int, optional): Monovalent cation conc. (mM) dv_conc (float/int, optional): Divalent cation conc. (mM) dntp_conc (float/int, optional): dNTP conc. (mM) dna_conc (float/int, optional): DNA conc. (nM) temp_c (int, optional): Simulation temperature for dG (Celsius) max_loop(int, optional): Maximum size of loops in the structure Returns: A `ThermoResult` object with thermodynamic characteristics of the hairpin formation. Raises: ``RuntimeError`` """ _setThermoArgs(**locals()) return _THERMO_ANALYSIS.calcHairpin(seq).checkExc()
def iter_doc_filepaths(self, **kwargs): """Generator that iterates over all detected documents. and returns the filesystem path to each doc. Order is by shard, but arbitrary within shards. @TEMP not locked to prevent doc creation/deletion """ for shard in self._shards: for doc_id, blob in shard.iter_doc_filepaths(**kwargs): yield doc_id, blob
def function[iter_doc_filepaths, parameter[self]]: constant[Generator that iterates over all detected documents. and returns the filesystem path to each doc. Order is by shard, but arbitrary within shards. @TEMP not locked to prevent doc creation/deletion ] for taget[name[shard]] in starred[name[self]._shards] begin[:] for taget[tuple[[<ast.Name object at 0x7da20c993ac0>, <ast.Name object at 0x7da20c990370>]]] in starred[call[name[shard].iter_doc_filepaths, parameter[]]] begin[:] <ast.Yield object at 0x7da20c990ca0>
keyword[def] identifier[iter_doc_filepaths] ( identifier[self] ,** identifier[kwargs] ): literal[string] keyword[for] identifier[shard] keyword[in] identifier[self] . identifier[_shards] : keyword[for] identifier[doc_id] , identifier[blob] keyword[in] identifier[shard] . identifier[iter_doc_filepaths] (** identifier[kwargs] ): keyword[yield] identifier[doc_id] , identifier[blob]
def iter_doc_filepaths(self, **kwargs): """Generator that iterates over all detected documents. and returns the filesystem path to each doc. Order is by shard, but arbitrary within shards. @TEMP not locked to prevent doc creation/deletion """ for shard in self._shards: for (doc_id, blob) in shard.iter_doc_filepaths(**kwargs): yield (doc_id, blob) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['shard']]
def closes(self, assets, dt): """ The close field's aggregation returns the latest close at the given dt. If the close for the given dt is `nan`, the most recent non-nan `close` is used. If there has been no data on or before the `dt` the close is `nan`. Returns ------- np.array with dtype=float64, in order of assets parameter. """ market_open, prev_dt, dt_value, entries = self._prelude(dt, 'close') closes = [] session_label = self._trading_calendar.minute_to_session_label(dt) def _get_filled_close(asset): """ Returns the most recent non-nan close for the asset in this session. If there has been no data in this session on or before the `dt`, returns `nan` """ window = self._minute_reader.load_raw_arrays( ['close'], market_open, dt, [asset], )[0] try: return window[~np.isnan(window)][-1] except IndexError: return np.NaN for asset in assets: if not asset.is_alive_for_session(session_label): closes.append(np.NaN) continue if prev_dt is None: val = self._minute_reader.get_value(asset, dt, 'close') entries[asset] = (dt_value, val) closes.append(val) continue else: try: last_visited_dt, last_close = entries[asset] if last_visited_dt == dt_value: closes.append(last_close) continue elif last_visited_dt == prev_dt: val = self._minute_reader.get_value( asset, dt, 'close') if pd.isnull(val): val = last_close entries[asset] = (dt_value, val) closes.append(val) continue else: val = self._minute_reader.get_value( asset, dt, 'close') if pd.isnull(val): val = _get_filled_close(asset) entries[asset] = (dt_value, val) closes.append(val) continue except KeyError: val = self._minute_reader.get_value( asset, dt, 'close') if pd.isnull(val): val = _get_filled_close(asset) entries[asset] = (dt_value, val) closes.append(val) continue return np.array(closes)
def function[closes, parameter[self, assets, dt]]: constant[ The close field's aggregation returns the latest close at the given dt. If the close for the given dt is `nan`, the most recent non-nan `close` is used. If there has been no data on or before the `dt` the close is `nan`. Returns ------- np.array with dtype=float64, in order of assets parameter. ] <ast.Tuple object at 0x7da1b1ea3fd0> assign[=] call[name[self]._prelude, parameter[name[dt], constant[close]]] variable[closes] assign[=] list[[]] variable[session_label] assign[=] call[name[self]._trading_calendar.minute_to_session_label, parameter[name[dt]]] def function[_get_filled_close, parameter[asset]]: constant[ Returns the most recent non-nan close for the asset in this session. If there has been no data in this session on or before the `dt`, returns `nan` ] variable[window] assign[=] call[call[name[self]._minute_reader.load_raw_arrays, parameter[list[[<ast.Constant object at 0x7da1b20119c0>]], name[market_open], name[dt], list[[<ast.Name object at 0x7da1b2012ec0>]]]]][constant[0]] <ast.Try object at 0x7da1b2011450> for taget[name[asset]] in starred[name[assets]] begin[:] if <ast.UnaryOp object at 0x7da1b20128f0> begin[:] call[name[closes].append, parameter[name[np].NaN]] continue if compare[name[prev_dt] is constant[None]] begin[:] variable[val] assign[=] call[name[self]._minute_reader.get_value, parameter[name[asset], name[dt], constant[close]]] call[name[entries]][name[asset]] assign[=] tuple[[<ast.Name object at 0x7da1b2011ab0>, <ast.Name object at 0x7da1b2011810>]] call[name[closes].append, parameter[name[val]]] continue return[call[name[np].array, parameter[name[closes]]]]
keyword[def] identifier[closes] ( identifier[self] , identifier[assets] , identifier[dt] ): literal[string] identifier[market_open] , identifier[prev_dt] , identifier[dt_value] , identifier[entries] = identifier[self] . identifier[_prelude] ( identifier[dt] , literal[string] ) identifier[closes] =[] identifier[session_label] = identifier[self] . identifier[_trading_calendar] . identifier[minute_to_session_label] ( identifier[dt] ) keyword[def] identifier[_get_filled_close] ( identifier[asset] ): literal[string] identifier[window] = identifier[self] . identifier[_minute_reader] . identifier[load_raw_arrays] ( [ literal[string] ], identifier[market_open] , identifier[dt] , [ identifier[asset] ], )[ literal[int] ] keyword[try] : keyword[return] identifier[window] [~ identifier[np] . identifier[isnan] ( identifier[window] )][- literal[int] ] keyword[except] identifier[IndexError] : keyword[return] identifier[np] . identifier[NaN] keyword[for] identifier[asset] keyword[in] identifier[assets] : keyword[if] keyword[not] identifier[asset] . identifier[is_alive_for_session] ( identifier[session_label] ): identifier[closes] . identifier[append] ( identifier[np] . identifier[NaN] ) keyword[continue] keyword[if] identifier[prev_dt] keyword[is] keyword[None] : identifier[val] = identifier[self] . identifier[_minute_reader] . identifier[get_value] ( identifier[asset] , identifier[dt] , literal[string] ) identifier[entries] [ identifier[asset] ]=( identifier[dt_value] , identifier[val] ) identifier[closes] . identifier[append] ( identifier[val] ) keyword[continue] keyword[else] : keyword[try] : identifier[last_visited_dt] , identifier[last_close] = identifier[entries] [ identifier[asset] ] keyword[if] identifier[last_visited_dt] == identifier[dt_value] : identifier[closes] . identifier[append] ( identifier[last_close] ) keyword[continue] keyword[elif] identifier[last_visited_dt] == identifier[prev_dt] : identifier[val] = identifier[self] . identifier[_minute_reader] . identifier[get_value] ( identifier[asset] , identifier[dt] , literal[string] ) keyword[if] identifier[pd] . identifier[isnull] ( identifier[val] ): identifier[val] = identifier[last_close] identifier[entries] [ identifier[asset] ]=( identifier[dt_value] , identifier[val] ) identifier[closes] . identifier[append] ( identifier[val] ) keyword[continue] keyword[else] : identifier[val] = identifier[self] . identifier[_minute_reader] . identifier[get_value] ( identifier[asset] , identifier[dt] , literal[string] ) keyword[if] identifier[pd] . identifier[isnull] ( identifier[val] ): identifier[val] = identifier[_get_filled_close] ( identifier[asset] ) identifier[entries] [ identifier[asset] ]=( identifier[dt_value] , identifier[val] ) identifier[closes] . identifier[append] ( identifier[val] ) keyword[continue] keyword[except] identifier[KeyError] : identifier[val] = identifier[self] . identifier[_minute_reader] . identifier[get_value] ( identifier[asset] , identifier[dt] , literal[string] ) keyword[if] identifier[pd] . identifier[isnull] ( identifier[val] ): identifier[val] = identifier[_get_filled_close] ( identifier[asset] ) identifier[entries] [ identifier[asset] ]=( identifier[dt_value] , identifier[val] ) identifier[closes] . identifier[append] ( identifier[val] ) keyword[continue] keyword[return] identifier[np] . identifier[array] ( identifier[closes] )
def closes(self, assets, dt): """ The close field's aggregation returns the latest close at the given dt. If the close for the given dt is `nan`, the most recent non-nan `close` is used. If there has been no data on or before the `dt` the close is `nan`. Returns ------- np.array with dtype=float64, in order of assets parameter. """ (market_open, prev_dt, dt_value, entries) = self._prelude(dt, 'close') closes = [] session_label = self._trading_calendar.minute_to_session_label(dt) def _get_filled_close(asset): """ Returns the most recent non-nan close for the asset in this session. If there has been no data in this session on or before the `dt`, returns `nan` """ window = self._minute_reader.load_raw_arrays(['close'], market_open, dt, [asset])[0] try: return window[~np.isnan(window)][-1] # depends on [control=['try'], data=[]] except IndexError: return np.NaN # depends on [control=['except'], data=[]] for asset in assets: if not asset.is_alive_for_session(session_label): closes.append(np.NaN) continue # depends on [control=['if'], data=[]] if prev_dt is None: val = self._minute_reader.get_value(asset, dt, 'close') entries[asset] = (dt_value, val) closes.append(val) continue # depends on [control=['if'], data=[]] else: try: (last_visited_dt, last_close) = entries[asset] if last_visited_dt == dt_value: closes.append(last_close) continue # depends on [control=['if'], data=[]] elif last_visited_dt == prev_dt: val = self._minute_reader.get_value(asset, dt, 'close') if pd.isnull(val): val = last_close # depends on [control=['if'], data=[]] entries[asset] = (dt_value, val) closes.append(val) continue # depends on [control=['if'], data=[]] else: val = self._minute_reader.get_value(asset, dt, 'close') if pd.isnull(val): val = _get_filled_close(asset) # depends on [control=['if'], data=[]] entries[asset] = (dt_value, val) closes.append(val) continue # depends on [control=['try'], data=[]] except KeyError: val = self._minute_reader.get_value(asset, dt, 'close') if pd.isnull(val): val = _get_filled_close(asset) # depends on [control=['if'], data=[]] entries[asset] = (dt_value, val) closes.append(val) continue # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['asset']] return np.array(closes)
def randomCharField(self, model_class, field_name): """ Checking if `field_name` has choices. Then, returning random value from it. Result of: `available_choices` [ ('project', 'I wanna to talk about project'), ('feedback', 'I want to report a bugs or give feedback'), ('hello', 'I just want to say hello') ] """ try: available_choices = model_class._meta.get_field(field_name).get_choices()[1:] return self.randomize([ci[0] for ci in available_choices]) except AttributeError: lst = [ "Enthusiastically whiteboard synergistic methods", "Authoritatively scale progressive meta-services through", "Objectively implement client-centered supply chains via stand-alone", "Phosfluorescently productize accurate products after cooperative results", "Appropriately drive cutting-edge systems before optimal scenarios", "Uniquely productize viral ROI for competitive e-markets" "Uniquely repurpose high-quality models vis-a-vis", "Django is Fucking Awesome? Yes" ] return self.randomize(lst)
def function[randomCharField, parameter[self, model_class, field_name]]: constant[ Checking if `field_name` has choices. Then, returning random value from it. Result of: `available_choices` [ ('project', 'I wanna to talk about project'), ('feedback', 'I want to report a bugs or give feedback'), ('hello', 'I just want to say hello') ] ] <ast.Try object at 0x7da1b0ca66e0>
keyword[def] identifier[randomCharField] ( identifier[self] , identifier[model_class] , identifier[field_name] ): literal[string] keyword[try] : identifier[available_choices] = identifier[model_class] . identifier[_meta] . identifier[get_field] ( identifier[field_name] ). identifier[get_choices] ()[ literal[int] :] keyword[return] identifier[self] . identifier[randomize] ([ identifier[ci] [ literal[int] ] keyword[for] identifier[ci] keyword[in] identifier[available_choices] ]) keyword[except] identifier[AttributeError] : identifier[lst] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] literal[string] , literal[string] ] keyword[return] identifier[self] . identifier[randomize] ( identifier[lst] )
def randomCharField(self, model_class, field_name): """ Checking if `field_name` has choices. Then, returning random value from it. Result of: `available_choices` [ ('project', 'I wanna to talk about project'), ('feedback', 'I want to report a bugs or give feedback'), ('hello', 'I just want to say hello') ] """ try: available_choices = model_class._meta.get_field(field_name).get_choices()[1:] return self.randomize([ci[0] for ci in available_choices]) # depends on [control=['try'], data=[]] except AttributeError: lst = ['Enthusiastically whiteboard synergistic methods', 'Authoritatively scale progressive meta-services through', 'Objectively implement client-centered supply chains via stand-alone', 'Phosfluorescently productize accurate products after cooperative results', 'Appropriately drive cutting-edge systems before optimal scenarios', 'Uniquely productize viral ROI for competitive e-marketsUniquely repurpose high-quality models vis-a-vis', 'Django is Fucking Awesome? Yes'] return self.randomize(lst) # depends on [control=['except'], data=[]]
def expand_attribute_strings( attribute_strings, quote_char='\"', missing_value="", usecols=None): """ The last column of GTF has a variable number of key value pairs of the format: "key1 value1; key2 value2;" Parse these into a dictionary mapping each key onto a list of values, where the value is None for any row where the key was missing. Parameters ---------- attribute_strings : list of str quote_char : str Quote character to remove from values missing_value : any If an attribute is missing from a row, give it this value. usecols : list of str or None If not None, then only expand columns included in this set, otherwise use all columns. Returns OrderedDict of column->value list mappings, in the order they appeared in the attribute strings. """ n = len(attribute_strings) extra_columns = {} column_order = [] # # SOME NOTES ABOUT THE BIZARRE STRING INTERNING GOING ON BELOW # # While parsing millions of repeated strings (e.g. "gene_id" and "TP53"), # we can save a lot of memory by making sure there's only one string # object per unique string. The canonical way to do this is using # the 'intern' function. One problem is that Py2 won't let you intern # unicode objects, so to get around this we call intern(str(...)). # # It also turns out to be faster to check interned strings ourselves # using a local dictionary, hence the two dictionaries below # and pair of try/except blocks in the loop. column_interned_strings = {} value_interned_strings = {} for (i, attribute_string) in enumerate(attribute_strings): for kv in attribute_string.split(";"): # We're slicing the first two elements out of split() because # Ensembl release 79 added values like: # transcript_support_level "1 (assigned to previous version 5)"; # ...which gets mangled by splitting on spaces. parts = kv.strip().split(" ", 2)[:2] if len(parts) != 2: continue column_name, value = parts try: column_name = column_interned_strings[column_name] except KeyError: column_name = intern(str(column_name)) column_interned_strings[column_name] = column_name if usecols is not None and column_name not in usecols: continue try: column = extra_columns[column_name] except KeyError: column = [missing_value] * n extra_columns[column_name] = column column_order.append(column_name) value = value.replace(quote_char, "") if value.startswith(quote_char) else value try: value = value_interned_strings[value] except KeyError: value = intern(str(value)) value_interned_strings[value] = value # if an attribute is used repeatedly then # keep track of all its values in a list old_value = column[i] if old_value is missing_value: column[i] = value else: column[i] = "%s,%s" % (old_value, value) logging.info("Extracted GTF attributes: %s" % column_order) return OrderedDict( (column_name, extra_columns[column_name]) for column_name in column_order)
def function[expand_attribute_strings, parameter[attribute_strings, quote_char, missing_value, usecols]]: constant[ The last column of GTF has a variable number of key value pairs of the format: "key1 value1; key2 value2;" Parse these into a dictionary mapping each key onto a list of values, where the value is None for any row where the key was missing. Parameters ---------- attribute_strings : list of str quote_char : str Quote character to remove from values missing_value : any If an attribute is missing from a row, give it this value. usecols : list of str or None If not None, then only expand columns included in this set, otherwise use all columns. Returns OrderedDict of column->value list mappings, in the order they appeared in the attribute strings. ] variable[n] assign[=] call[name[len], parameter[name[attribute_strings]]] variable[extra_columns] assign[=] dictionary[[], []] variable[column_order] assign[=] list[[]] variable[column_interned_strings] assign[=] dictionary[[], []] variable[value_interned_strings] assign[=] dictionary[[], []] for taget[tuple[[<ast.Name object at 0x7da1b2345ed0>, <ast.Name object at 0x7da1b2344250>]]] in starred[call[name[enumerate], parameter[name[attribute_strings]]]] begin[:] for taget[name[kv]] in starred[call[name[attribute_string].split, parameter[constant[;]]]] begin[:] variable[parts] assign[=] call[call[call[name[kv].strip, parameter[]].split, parameter[constant[ ], constant[2]]]][<ast.Slice object at 0x7da2047e9780>] if compare[call[name[len], parameter[name[parts]]] not_equal[!=] constant[2]] begin[:] continue <ast.Tuple object at 0x7da1b2346350> assign[=] name[parts] <ast.Try object at 0x7da1b2347e50> if <ast.BoolOp object at 0x7da1b2347550> begin[:] continue <ast.Try object at 0x7da1b23461a0> variable[value] assign[=] <ast.IfExp object at 0x7da1b2344100> <ast.Try object at 0x7da20c76d870> variable[old_value] assign[=] call[name[column]][name[i]] if compare[name[old_value] is name[missing_value]] begin[:] call[name[column]][name[i]] assign[=] name[value] call[name[logging].info, parameter[binary_operation[constant[Extracted GTF attributes: %s] <ast.Mod object at 0x7da2590d6920> name[column_order]]]] return[call[name[OrderedDict], parameter[<ast.GeneratorExp object at 0x7da20c76ec80>]]]
keyword[def] identifier[expand_attribute_strings] ( identifier[attribute_strings] , identifier[quote_char] = literal[string] , identifier[missing_value] = literal[string] , identifier[usecols] = keyword[None] ): literal[string] identifier[n] = identifier[len] ( identifier[attribute_strings] ) identifier[extra_columns] ={} identifier[column_order] =[] identifier[column_interned_strings] ={} identifier[value_interned_strings] ={} keyword[for] ( identifier[i] , identifier[attribute_string] ) keyword[in] identifier[enumerate] ( identifier[attribute_strings] ): keyword[for] identifier[kv] keyword[in] identifier[attribute_string] . identifier[split] ( literal[string] ): identifier[parts] = identifier[kv] . identifier[strip] (). identifier[split] ( literal[string] , literal[int] )[: literal[int] ] keyword[if] identifier[len] ( identifier[parts] )!= literal[int] : keyword[continue] identifier[column_name] , identifier[value] = identifier[parts] keyword[try] : identifier[column_name] = identifier[column_interned_strings] [ identifier[column_name] ] keyword[except] identifier[KeyError] : identifier[column_name] = identifier[intern] ( identifier[str] ( identifier[column_name] )) identifier[column_interned_strings] [ identifier[column_name] ]= identifier[column_name] keyword[if] identifier[usecols] keyword[is] keyword[not] keyword[None] keyword[and] identifier[column_name] keyword[not] keyword[in] identifier[usecols] : keyword[continue] keyword[try] : identifier[column] = identifier[extra_columns] [ identifier[column_name] ] keyword[except] identifier[KeyError] : identifier[column] =[ identifier[missing_value] ]* identifier[n] identifier[extra_columns] [ identifier[column_name] ]= identifier[column] identifier[column_order] . identifier[append] ( identifier[column_name] ) identifier[value] = identifier[value] . identifier[replace] ( identifier[quote_char] , literal[string] ) keyword[if] identifier[value] . identifier[startswith] ( identifier[quote_char] ) keyword[else] identifier[value] keyword[try] : identifier[value] = identifier[value_interned_strings] [ identifier[value] ] keyword[except] identifier[KeyError] : identifier[value] = identifier[intern] ( identifier[str] ( identifier[value] )) identifier[value_interned_strings] [ identifier[value] ]= identifier[value] identifier[old_value] = identifier[column] [ identifier[i] ] keyword[if] identifier[old_value] keyword[is] identifier[missing_value] : identifier[column] [ identifier[i] ]= identifier[value] keyword[else] : identifier[column] [ identifier[i] ]= literal[string] %( identifier[old_value] , identifier[value] ) identifier[logging] . identifier[info] ( literal[string] % identifier[column_order] ) keyword[return] identifier[OrderedDict] ( ( identifier[column_name] , identifier[extra_columns] [ identifier[column_name] ]) keyword[for] identifier[column_name] keyword[in] identifier[column_order] )
def expand_attribute_strings(attribute_strings, quote_char='"', missing_value='', usecols=None): """ The last column of GTF has a variable number of key value pairs of the format: "key1 value1; key2 value2;" Parse these into a dictionary mapping each key onto a list of values, where the value is None for any row where the key was missing. Parameters ---------- attribute_strings : list of str quote_char : str Quote character to remove from values missing_value : any If an attribute is missing from a row, give it this value. usecols : list of str or None If not None, then only expand columns included in this set, otherwise use all columns. Returns OrderedDict of column->value list mappings, in the order they appeared in the attribute strings. """ n = len(attribute_strings) extra_columns = {} column_order = [] # # SOME NOTES ABOUT THE BIZARRE STRING INTERNING GOING ON BELOW # # While parsing millions of repeated strings (e.g. "gene_id" and "TP53"), # we can save a lot of memory by making sure there's only one string # object per unique string. The canonical way to do this is using # the 'intern' function. One problem is that Py2 won't let you intern # unicode objects, so to get around this we call intern(str(...)). # # It also turns out to be faster to check interned strings ourselves # using a local dictionary, hence the two dictionaries below # and pair of try/except blocks in the loop. column_interned_strings = {} value_interned_strings = {} for (i, attribute_string) in enumerate(attribute_strings): for kv in attribute_string.split(';'): # We're slicing the first two elements out of split() because # Ensembl release 79 added values like: # transcript_support_level "1 (assigned to previous version 5)"; # ...which gets mangled by splitting on spaces. parts = kv.strip().split(' ', 2)[:2] if len(parts) != 2: continue # depends on [control=['if'], data=[]] (column_name, value) = parts try: column_name = column_interned_strings[column_name] # depends on [control=['try'], data=[]] except KeyError: column_name = intern(str(column_name)) column_interned_strings[column_name] = column_name # depends on [control=['except'], data=[]] if usecols is not None and column_name not in usecols: continue # depends on [control=['if'], data=[]] try: column = extra_columns[column_name] # depends on [control=['try'], data=[]] except KeyError: column = [missing_value] * n extra_columns[column_name] = column column_order.append(column_name) # depends on [control=['except'], data=[]] value = value.replace(quote_char, '') if value.startswith(quote_char) else value try: value = value_interned_strings[value] # depends on [control=['try'], data=[]] except KeyError: value = intern(str(value)) value_interned_strings[value] = value # depends on [control=['except'], data=[]] # if an attribute is used repeatedly then # keep track of all its values in a list old_value = column[i] if old_value is missing_value: column[i] = value # depends on [control=['if'], data=[]] else: column[i] = '%s,%s' % (old_value, value) # depends on [control=['for'], data=['kv']] # depends on [control=['for'], data=[]] logging.info('Extracted GTF attributes: %s' % column_order) return OrderedDict(((column_name, extra_columns[column_name]) for column_name in column_order))
def find(self, **filter_args): """ Find exactly one resource in scope of this manager, by matching resource properties against the specified filter arguments, and return its Python resource object (e.g. for a CPC, a :class:`~zhmcclient.Cpc` object is returned). Any resource property may be specified in a filter argument. For details about filter arguments, see :ref:`Filtering`. The zhmcclient implementation handles the specified properties in an optimized way: Properties that can be filtered on the HMC are actually filtered there (this varies by resource type), and the remaining properties are filtered on the client side. If the "name" property is specified as the only filter argument, an optimized lookup is performed that uses a name-to-URI cache in this manager object. This this optimized lookup uses the specified match value for exact matching and is not interpreted as a regular expression. Authorization requirements: * see the `list()` method in the derived classes. Parameters: \\**filter_args: All keyword arguments are used as filter arguments. Specifying no keyword arguments causes no filtering to happen. See the examples for usage details. Returns: Resource object in scope of this manager object that matches the filter arguments. This resource object has a minimal set of properties. Raises: :exc:`~zhmcclient.NotFound`: No matching resource found. :exc:`~zhmcclient.NoUniqueMatch`: More than one matching resource found. : Exceptions raised by the `list()` methods in derived resource manager classes (see :ref:`Resources`). Examples: * The following example finds a CPC by its name. Because the 'name' resource property is also a valid Python variable name, there are two ways for the caller to specify the filter arguments for this method: As named parameters:: cpc = client.cpcs.find(name='CPC001') As a parameter dictionary:: filter_args = {'name': 'CPC0001'} cpc = client.cpcs.find(**filter_args) * The following example finds a CPC by its object ID. Because the 'object-id' resource property is not a valid Python variable name, the caller can specify the filter argument only as a parameter dictionary:: filter_args = {'object-id': '12345-abc...de-12345'} cpc = client.cpcs.find(**filter_args) """ obj_list = self.findall(**filter_args) num_objs = len(obj_list) if num_objs == 0: raise NotFound(filter_args, self) elif num_objs > 1: raise NoUniqueMatch(filter_args, self, obj_list) else: return obj_list[0]
def function[find, parameter[self]]: constant[ Find exactly one resource in scope of this manager, by matching resource properties against the specified filter arguments, and return its Python resource object (e.g. for a CPC, a :class:`~zhmcclient.Cpc` object is returned). Any resource property may be specified in a filter argument. For details about filter arguments, see :ref:`Filtering`. The zhmcclient implementation handles the specified properties in an optimized way: Properties that can be filtered on the HMC are actually filtered there (this varies by resource type), and the remaining properties are filtered on the client side. If the "name" property is specified as the only filter argument, an optimized lookup is performed that uses a name-to-URI cache in this manager object. This this optimized lookup uses the specified match value for exact matching and is not interpreted as a regular expression. Authorization requirements: * see the `list()` method in the derived classes. Parameters: \**filter_args: All keyword arguments are used as filter arguments. Specifying no keyword arguments causes no filtering to happen. See the examples for usage details. Returns: Resource object in scope of this manager object that matches the filter arguments. This resource object has a minimal set of properties. Raises: :exc:`~zhmcclient.NotFound`: No matching resource found. :exc:`~zhmcclient.NoUniqueMatch`: More than one matching resource found. : Exceptions raised by the `list()` methods in derived resource manager classes (see :ref:`Resources`). Examples: * The following example finds a CPC by its name. Because the 'name' resource property is also a valid Python variable name, there are two ways for the caller to specify the filter arguments for this method: As named parameters:: cpc = client.cpcs.find(name='CPC001') As a parameter dictionary:: filter_args = {'name': 'CPC0001'} cpc = client.cpcs.find(**filter_args) * The following example finds a CPC by its object ID. Because the 'object-id' resource property is not a valid Python variable name, the caller can specify the filter argument only as a parameter dictionary:: filter_args = {'object-id': '12345-abc...de-12345'} cpc = client.cpcs.find(**filter_args) ] variable[obj_list] assign[=] call[name[self].findall, parameter[]] variable[num_objs] assign[=] call[name[len], parameter[name[obj_list]]] if compare[name[num_objs] equal[==] constant[0]] begin[:] <ast.Raise object at 0x7da18f58f8e0>
keyword[def] identifier[find] ( identifier[self] ,** identifier[filter_args] ): literal[string] identifier[obj_list] = identifier[self] . identifier[findall] (** identifier[filter_args] ) identifier[num_objs] = identifier[len] ( identifier[obj_list] ) keyword[if] identifier[num_objs] == literal[int] : keyword[raise] identifier[NotFound] ( identifier[filter_args] , identifier[self] ) keyword[elif] identifier[num_objs] > literal[int] : keyword[raise] identifier[NoUniqueMatch] ( identifier[filter_args] , identifier[self] , identifier[obj_list] ) keyword[else] : keyword[return] identifier[obj_list] [ literal[int] ]
def find(self, **filter_args): """ Find exactly one resource in scope of this manager, by matching resource properties against the specified filter arguments, and return its Python resource object (e.g. for a CPC, a :class:`~zhmcclient.Cpc` object is returned). Any resource property may be specified in a filter argument. For details about filter arguments, see :ref:`Filtering`. The zhmcclient implementation handles the specified properties in an optimized way: Properties that can be filtered on the HMC are actually filtered there (this varies by resource type), and the remaining properties are filtered on the client side. If the "name" property is specified as the only filter argument, an optimized lookup is performed that uses a name-to-URI cache in this manager object. This this optimized lookup uses the specified match value for exact matching and is not interpreted as a regular expression. Authorization requirements: * see the `list()` method in the derived classes. Parameters: \\**filter_args: All keyword arguments are used as filter arguments. Specifying no keyword arguments causes no filtering to happen. See the examples for usage details. Returns: Resource object in scope of this manager object that matches the filter arguments. This resource object has a minimal set of properties. Raises: :exc:`~zhmcclient.NotFound`: No matching resource found. :exc:`~zhmcclient.NoUniqueMatch`: More than one matching resource found. : Exceptions raised by the `list()` methods in derived resource manager classes (see :ref:`Resources`). Examples: * The following example finds a CPC by its name. Because the 'name' resource property is also a valid Python variable name, there are two ways for the caller to specify the filter arguments for this method: As named parameters:: cpc = client.cpcs.find(name='CPC001') As a parameter dictionary:: filter_args = {'name': 'CPC0001'} cpc = client.cpcs.find(**filter_args) * The following example finds a CPC by its object ID. Because the 'object-id' resource property is not a valid Python variable name, the caller can specify the filter argument only as a parameter dictionary:: filter_args = {'object-id': '12345-abc...de-12345'} cpc = client.cpcs.find(**filter_args) """ obj_list = self.findall(**filter_args) num_objs = len(obj_list) if num_objs == 0: raise NotFound(filter_args, self) # depends on [control=['if'], data=[]] elif num_objs > 1: raise NoUniqueMatch(filter_args, self, obj_list) # depends on [control=['if'], data=[]] else: return obj_list[0]
def publish(context): """Saves changes and sends them to GitHub""" header('Recording changes...') run('git add -A') header('Displaying changes...') run('git -c color.status=always status') if not click.confirm('\nContinue publishing'): run('git reset HEAD --') abort(context) header('Saving changes...') try: run('git commit -m "{message}"'.format( message='Publishing {}'.format(choose_commit_emoji()) ), capture=True) except subprocess.CalledProcessError as e: if 'nothing to commit' not in e.stdout: raise else: click.echo('Nothing to commit.') header('Pushing to GitHub...') branch = get_branch() run('git push origin {branch}:{branch}'.format(branch=branch)) pr_link = get_pr_link(branch) if pr_link: click.launch(pr_link)
def function[publish, parameter[context]]: constant[Saves changes and sends them to GitHub] call[name[header], parameter[constant[Recording changes...]]] call[name[run], parameter[constant[git add -A]]] call[name[header], parameter[constant[Displaying changes...]]] call[name[run], parameter[constant[git -c color.status=always status]]] if <ast.UnaryOp object at 0x7da204567340> begin[:] call[name[run], parameter[constant[git reset HEAD --]]] call[name[abort], parameter[name[context]]] call[name[header], parameter[constant[Saving changes...]]] <ast.Try object at 0x7da204564be0> call[name[header], parameter[constant[Pushing to GitHub...]]] variable[branch] assign[=] call[name[get_branch], parameter[]] call[name[run], parameter[call[constant[git push origin {branch}:{branch}].format, parameter[]]]] variable[pr_link] assign[=] call[name[get_pr_link], parameter[name[branch]]] if name[pr_link] begin[:] call[name[click].launch, parameter[name[pr_link]]]
keyword[def] identifier[publish] ( identifier[context] ): literal[string] identifier[header] ( literal[string] ) identifier[run] ( literal[string] ) identifier[header] ( literal[string] ) identifier[run] ( literal[string] ) keyword[if] keyword[not] identifier[click] . identifier[confirm] ( literal[string] ): identifier[run] ( literal[string] ) identifier[abort] ( identifier[context] ) identifier[header] ( literal[string] ) keyword[try] : identifier[run] ( literal[string] . identifier[format] ( identifier[message] = literal[string] . identifier[format] ( identifier[choose_commit_emoji] ()) ), identifier[capture] = keyword[True] ) keyword[except] identifier[subprocess] . identifier[CalledProcessError] keyword[as] identifier[e] : keyword[if] literal[string] keyword[not] keyword[in] identifier[e] . identifier[stdout] : keyword[raise] keyword[else] : identifier[click] . identifier[echo] ( literal[string] ) identifier[header] ( literal[string] ) identifier[branch] = identifier[get_branch] () identifier[run] ( literal[string] . identifier[format] ( identifier[branch] = identifier[branch] )) identifier[pr_link] = identifier[get_pr_link] ( identifier[branch] ) keyword[if] identifier[pr_link] : identifier[click] . identifier[launch] ( identifier[pr_link] )
def publish(context): """Saves changes and sends them to GitHub""" header('Recording changes...') run('git add -A') header('Displaying changes...') run('git -c color.status=always status') if not click.confirm('\nContinue publishing'): run('git reset HEAD --') abort(context) # depends on [control=['if'], data=[]] header('Saving changes...') try: run('git commit -m "{message}"'.format(message='Publishing {}'.format(choose_commit_emoji())), capture=True) # depends on [control=['try'], data=[]] except subprocess.CalledProcessError as e: if 'nothing to commit' not in e.stdout: raise # depends on [control=['if'], data=[]] else: click.echo('Nothing to commit.') # depends on [control=['except'], data=['e']] header('Pushing to GitHub...') branch = get_branch() run('git push origin {branch}:{branch}'.format(branch=branch)) pr_link = get_pr_link(branch) if pr_link: click.launch(pr_link) # depends on [control=['if'], data=[]]
async def set_room_temperatures(self, room_id, sleep_temp=None, comfort_temp=None, away_temp=None): """Set room temps.""" if sleep_temp is None and comfort_temp is None and away_temp is None: return room = self.rooms.get(room_id) if room is None: _LOGGER.error("No such device") return room.sleep_temp = sleep_temp if sleep_temp else room.sleep_temp room.away_temp = away_temp if away_temp else room.away_temp room.comfort_temp = comfort_temp if comfort_temp else room.comfort_temp payload = {"roomId": room_id, "sleepTemp": room.sleep_temp, "comfortTemp": room.comfort_temp, "awayTemp": room.away_temp, "homeType": 0} await self.request("changeRoomModeTempInfo", payload) self.rooms[room_id] = room
<ast.AsyncFunctionDef object at 0x7da20c76c5e0>
keyword[async] keyword[def] identifier[set_room_temperatures] ( identifier[self] , identifier[room_id] , identifier[sleep_temp] = keyword[None] , identifier[comfort_temp] = keyword[None] , identifier[away_temp] = keyword[None] ): literal[string] keyword[if] identifier[sleep_temp] keyword[is] keyword[None] keyword[and] identifier[comfort_temp] keyword[is] keyword[None] keyword[and] identifier[away_temp] keyword[is] keyword[None] : keyword[return] identifier[room] = identifier[self] . identifier[rooms] . identifier[get] ( identifier[room_id] ) keyword[if] identifier[room] keyword[is] keyword[None] : identifier[_LOGGER] . identifier[error] ( literal[string] ) keyword[return] identifier[room] . identifier[sleep_temp] = identifier[sleep_temp] keyword[if] identifier[sleep_temp] keyword[else] identifier[room] . identifier[sleep_temp] identifier[room] . identifier[away_temp] = identifier[away_temp] keyword[if] identifier[away_temp] keyword[else] identifier[room] . identifier[away_temp] identifier[room] . identifier[comfort_temp] = identifier[comfort_temp] keyword[if] identifier[comfort_temp] keyword[else] identifier[room] . identifier[comfort_temp] identifier[payload] ={ literal[string] : identifier[room_id] , literal[string] : identifier[room] . identifier[sleep_temp] , literal[string] : identifier[room] . identifier[comfort_temp] , literal[string] : identifier[room] . identifier[away_temp] , literal[string] : literal[int] } keyword[await] identifier[self] . identifier[request] ( literal[string] , identifier[payload] ) identifier[self] . identifier[rooms] [ identifier[room_id] ]= identifier[room]
async def set_room_temperatures(self, room_id, sleep_temp=None, comfort_temp=None, away_temp=None): """Set room temps.""" if sleep_temp is None and comfort_temp is None and (away_temp is None): return # depends on [control=['if'], data=[]] room = self.rooms.get(room_id) if room is None: _LOGGER.error('No such device') return # depends on [control=['if'], data=[]] room.sleep_temp = sleep_temp if sleep_temp else room.sleep_temp room.away_temp = away_temp if away_temp else room.away_temp room.comfort_temp = comfort_temp if comfort_temp else room.comfort_temp payload = {'roomId': room_id, 'sleepTemp': room.sleep_temp, 'comfortTemp': room.comfort_temp, 'awayTemp': room.away_temp, 'homeType': 0} await self.request('changeRoomModeTempInfo', payload) self.rooms[room_id] = room
def close_channel(self): """ Invoke this command to close the channel with RabbitMQ by sending the Channel.Close RPC command. """ self._logger.info('Closing the channel') if self._channel: self._channel.close()
def function[close_channel, parameter[self]]: constant[ Invoke this command to close the channel with RabbitMQ by sending the Channel.Close RPC command. ] call[name[self]._logger.info, parameter[constant[Closing the channel]]] if name[self]._channel begin[:] call[name[self]._channel.close, parameter[]]
keyword[def] identifier[close_channel] ( identifier[self] ): literal[string] identifier[self] . identifier[_logger] . identifier[info] ( literal[string] ) keyword[if] identifier[self] . identifier[_channel] : identifier[self] . identifier[_channel] . identifier[close] ()
def close_channel(self): """ Invoke this command to close the channel with RabbitMQ by sending the Channel.Close RPC command. """ self._logger.info('Closing the channel') if self._channel: self._channel.close() # depends on [control=['if'], data=[]]
def forever(self, key, value): """ Store an item in the cache indefinitely. :param key: The cache key :type key: str :param value: The value :type value: mixed """ self._store.forever(self.tagged_item_key(key), value)
def function[forever, parameter[self, key, value]]: constant[ Store an item in the cache indefinitely. :param key: The cache key :type key: str :param value: The value :type value: mixed ] call[name[self]._store.forever, parameter[call[name[self].tagged_item_key, parameter[name[key]]], name[value]]]
keyword[def] identifier[forever] ( identifier[self] , identifier[key] , identifier[value] ): literal[string] identifier[self] . identifier[_store] . identifier[forever] ( identifier[self] . identifier[tagged_item_key] ( identifier[key] ), identifier[value] )
def forever(self, key, value): """ Store an item in the cache indefinitely. :param key: The cache key :type key: str :param value: The value :type value: mixed """ self._store.forever(self.tagged_item_key(key), value)
def _initialize_variables(self): """Initializing the variables of the IP.""" # Initialize global variables. self._global_vars = {} # Indexed by (MTF dimension, mesh dimension) for mtf_dimension_name in ( self._layout_validator.splittable_mtf_dimension_names): for mesh_dimension_name in ( self._layout_validator.mesh_dimension_name_to_size): name = _global_var_name(mtf_dimension_name, mesh_dimension_name) self._global_vars[(mtf_dimension_name, mesh_dimension_name)] = ( self._model.NewBoolVar(name)) # Initialize local variables. self._local_vars = {} # Indexed by (tensorflow dimension set), then name of # assignment. for mtf_dimension_set in self._mtf_dimension_sets: self._local_vars[mtf_dimension_set] = {} for assignment in self._assignments[mtf_dimension_set]: # TODO(joshuawang): Avoid hash collision no matter what dimension names # are; don't hash by this local var name, swap to using a tuple encoding # of the full assignment instead. name = _local_var_name(mtf_dimension_set, assignment) self._local_vars[mtf_dimension_set][name] = ( self._model.NewBoolVar(name)) # Initialize memory variable. We need a crude upper bound on memory, so we # use the total size of all tensors under the empty assignment. # NOTE(joshuawang): This bound could be improved by factoring in the # schedule. memory_upper_bound = 0 for tensor_name in self._graph.get_all_tensor_names(): if self._graph.is_tensor_on_canonical_device(tensor_name): memory_upper_bound += int(self._graph.get_tensor_size(tensor_name)) self._memory_var = self._model.NewIntVar(0, memory_upper_bound, "z")
def function[_initialize_variables, parameter[self]]: constant[Initializing the variables of the IP.] name[self]._global_vars assign[=] dictionary[[], []] for taget[name[mtf_dimension_name]] in starred[name[self]._layout_validator.splittable_mtf_dimension_names] begin[:] for taget[name[mesh_dimension_name]] in starred[name[self]._layout_validator.mesh_dimension_name_to_size] begin[:] variable[name] assign[=] call[name[_global_var_name], parameter[name[mtf_dimension_name], name[mesh_dimension_name]]] call[name[self]._global_vars][tuple[[<ast.Name object at 0x7da207f011e0>, <ast.Name object at 0x7da207f00610>]]] assign[=] call[name[self]._model.NewBoolVar, parameter[name[name]]] name[self]._local_vars assign[=] dictionary[[], []] for taget[name[mtf_dimension_set]] in starred[name[self]._mtf_dimension_sets] begin[:] call[name[self]._local_vars][name[mtf_dimension_set]] assign[=] dictionary[[], []] for taget[name[assignment]] in starred[call[name[self]._assignments][name[mtf_dimension_set]]] begin[:] variable[name] assign[=] call[name[_local_var_name], parameter[name[mtf_dimension_set], name[assignment]]] call[call[name[self]._local_vars][name[mtf_dimension_set]]][name[name]] assign[=] call[name[self]._model.NewBoolVar, parameter[name[name]]] variable[memory_upper_bound] assign[=] constant[0] for taget[name[tensor_name]] in starred[call[name[self]._graph.get_all_tensor_names, parameter[]]] begin[:] if call[name[self]._graph.is_tensor_on_canonical_device, parameter[name[tensor_name]]] begin[:] <ast.AugAssign object at 0x7da207f014e0> name[self]._memory_var assign[=] call[name[self]._model.NewIntVar, parameter[constant[0], name[memory_upper_bound], constant[z]]]
keyword[def] identifier[_initialize_variables] ( identifier[self] ): literal[string] identifier[self] . identifier[_global_vars] ={} keyword[for] identifier[mtf_dimension_name] keyword[in] ( identifier[self] . identifier[_layout_validator] . identifier[splittable_mtf_dimension_names] ): keyword[for] identifier[mesh_dimension_name] keyword[in] ( identifier[self] . identifier[_layout_validator] . identifier[mesh_dimension_name_to_size] ): identifier[name] = identifier[_global_var_name] ( identifier[mtf_dimension_name] , identifier[mesh_dimension_name] ) identifier[self] . identifier[_global_vars] [( identifier[mtf_dimension_name] , identifier[mesh_dimension_name] )]=( identifier[self] . identifier[_model] . identifier[NewBoolVar] ( identifier[name] )) identifier[self] . identifier[_local_vars] ={} keyword[for] identifier[mtf_dimension_set] keyword[in] identifier[self] . identifier[_mtf_dimension_sets] : identifier[self] . identifier[_local_vars] [ identifier[mtf_dimension_set] ]={} keyword[for] identifier[assignment] keyword[in] identifier[self] . identifier[_assignments] [ identifier[mtf_dimension_set] ]: identifier[name] = identifier[_local_var_name] ( identifier[mtf_dimension_set] , identifier[assignment] ) identifier[self] . identifier[_local_vars] [ identifier[mtf_dimension_set] ][ identifier[name] ]=( identifier[self] . identifier[_model] . identifier[NewBoolVar] ( identifier[name] )) identifier[memory_upper_bound] = literal[int] keyword[for] identifier[tensor_name] keyword[in] identifier[self] . identifier[_graph] . identifier[get_all_tensor_names] (): keyword[if] identifier[self] . identifier[_graph] . identifier[is_tensor_on_canonical_device] ( identifier[tensor_name] ): identifier[memory_upper_bound] += identifier[int] ( identifier[self] . identifier[_graph] . identifier[get_tensor_size] ( identifier[tensor_name] )) identifier[self] . identifier[_memory_var] = identifier[self] . identifier[_model] . identifier[NewIntVar] ( literal[int] , identifier[memory_upper_bound] , literal[string] )
def _initialize_variables(self): """Initializing the variables of the IP.""" # Initialize global variables. self._global_vars = {} # Indexed by (MTF dimension, mesh dimension) for mtf_dimension_name in self._layout_validator.splittable_mtf_dimension_names: for mesh_dimension_name in self._layout_validator.mesh_dimension_name_to_size: name = _global_var_name(mtf_dimension_name, mesh_dimension_name) self._global_vars[mtf_dimension_name, mesh_dimension_name] = self._model.NewBoolVar(name) # depends on [control=['for'], data=['mesh_dimension_name']] # depends on [control=['for'], data=['mtf_dimension_name']] # Initialize local variables. self._local_vars = {} # Indexed by (tensorflow dimension set), then name of # assignment. for mtf_dimension_set in self._mtf_dimension_sets: self._local_vars[mtf_dimension_set] = {} for assignment in self._assignments[mtf_dimension_set]: # TODO(joshuawang): Avoid hash collision no matter what dimension names # are; don't hash by this local var name, swap to using a tuple encoding # of the full assignment instead. name = _local_var_name(mtf_dimension_set, assignment) self._local_vars[mtf_dimension_set][name] = self._model.NewBoolVar(name) # depends on [control=['for'], data=['assignment']] # depends on [control=['for'], data=['mtf_dimension_set']] # Initialize memory variable. We need a crude upper bound on memory, so we # use the total size of all tensors under the empty assignment. # NOTE(joshuawang): This bound could be improved by factoring in the # schedule. memory_upper_bound = 0 for tensor_name in self._graph.get_all_tensor_names(): if self._graph.is_tensor_on_canonical_device(tensor_name): memory_upper_bound += int(self._graph.get_tensor_size(tensor_name)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['tensor_name']] self._memory_var = self._model.NewIntVar(0, memory_upper_bound, 'z')
def natsorted(seq, key=lambda x: x, number_type=float, signed=True, exp=True): """\ Sorts a sequence naturally (alphabetically and numerically), not lexicographically. >>> a = ['num3', 'num5', 'num2'] >>> natsorted(a) ['num2', 'num3', 'num5'] >>> b = [('a', 'num3'), ('b', 'num5'), ('c', 'num2')] >>> from operator import itemgetter >>> natsorted(b, key=itemgetter(1)) [('c', 'num2'), ('a', 'num3'), ('b', 'num5')] """ return sorted(seq, key=lambda x: natsort_key(key(x), number_type=number_type, signed=signed, exp=exp))
def function[natsorted, parameter[seq, key, number_type, signed, exp]]: constant[ Sorts a sequence naturally (alphabetically and numerically), not lexicographically. >>> a = ['num3', 'num5', 'num2'] >>> natsorted(a) ['num2', 'num3', 'num5'] >>> b = [('a', 'num3'), ('b', 'num5'), ('c', 'num2')] >>> from operator import itemgetter >>> natsorted(b, key=itemgetter(1)) [('c', 'num2'), ('a', 'num3'), ('b', 'num5')] ] return[call[name[sorted], parameter[name[seq]]]]
keyword[def] identifier[natsorted] ( identifier[seq] , identifier[key] = keyword[lambda] identifier[x] : identifier[x] , identifier[number_type] = identifier[float] , identifier[signed] = keyword[True] , identifier[exp] = keyword[True] ): literal[string] keyword[return] identifier[sorted] ( identifier[seq] , identifier[key] = keyword[lambda] identifier[x] : identifier[natsort_key] ( identifier[key] ( identifier[x] ), identifier[number_type] = identifier[number_type] , identifier[signed] = identifier[signed] , identifier[exp] = identifier[exp] ))
def natsorted(seq, key=lambda x: x, number_type=float, signed=True, exp=True): """ Sorts a sequence naturally (alphabetically and numerically), not lexicographically. >>> a = ['num3', 'num5', 'num2'] >>> natsorted(a) ['num2', 'num3', 'num5'] >>> b = [('a', 'num3'), ('b', 'num5'), ('c', 'num2')] >>> from operator import itemgetter >>> natsorted(b, key=itemgetter(1)) [('c', 'num2'), ('a', 'num3'), ('b', 'num5')] """ return sorted(seq, key=lambda x: natsort_key(key(x), number_type=number_type, signed=signed, exp=exp))
def reset_max_values(self): """Reset the maximum values dict.""" self._max_values = {} for k in self._max_values_list: self._max_values[k] = 0.0
def function[reset_max_values, parameter[self]]: constant[Reset the maximum values dict.] name[self]._max_values assign[=] dictionary[[], []] for taget[name[k]] in starred[name[self]._max_values_list] begin[:] call[name[self]._max_values][name[k]] assign[=] constant[0.0]
keyword[def] identifier[reset_max_values] ( identifier[self] ): literal[string] identifier[self] . identifier[_max_values] ={} keyword[for] identifier[k] keyword[in] identifier[self] . identifier[_max_values_list] : identifier[self] . identifier[_max_values] [ identifier[k] ]= literal[int]
def reset_max_values(self): """Reset the maximum values dict.""" self._max_values = {} for k in self._max_values_list: self._max_values[k] = 0.0 # depends on [control=['for'], data=['k']]
def resample(y, orig_sr, target_sr, res_type='kaiser_best', fix=True, scale=False, **kwargs): """Resample a time series from orig_sr to target_sr Parameters ---------- y : np.ndarray [shape=(n,) or shape=(2, n)] audio time series. Can be mono or stereo. orig_sr : number > 0 [scalar] original sampling rate of `y` target_sr : number > 0 [scalar] target sampling rate res_type : str resample type (see note) .. note:: By default, this uses `resampy`'s high-quality mode ('kaiser_best'). To use a faster method, set `res_type='kaiser_fast'`. To use `scipy.signal.resample`, set `res_type='fft'` or `res_type='scipy'`. To use `scipy.signal.resample_poly`, set `res_type='polyphase'`. .. note:: When using `res_type='polyphase'`, only integer sampling rates are supported. fix : bool adjust the length of the resampled signal to be of size exactly `ceil(target_sr * len(y) / orig_sr)` scale : bool Scale the resampled signal so that `y` and `y_hat` have approximately equal total energy. kwargs : additional keyword arguments If `fix==True`, additional keyword arguments to pass to `librosa.util.fix_length`. Returns ------- y_hat : np.ndarray [shape=(n * target_sr / orig_sr,)] `y` resampled from `orig_sr` to `target_sr` Raises ------ ParameterError If `res_type='polyphase'` and `orig_sr` or `target_sr` are not both integer-valued. See Also -------- librosa.util.fix_length scipy.signal.resample resampy.resample Notes ----- This function caches at level 20. Examples -------- Downsample from 22 KHz to 8 KHz >>> y, sr = librosa.load(librosa.util.example_audio_file(), sr=22050) >>> y_8k = librosa.resample(y, sr, 8000) >>> y.shape, y_8k.shape ((1355168,), (491671,)) """ # First, validate the audio buffer util.valid_audio(y, mono=False) if orig_sr == target_sr: return y ratio = float(target_sr) / orig_sr n_samples = int(np.ceil(y.shape[-1] * ratio)) if res_type in ('scipy', 'fft'): y_hat = scipy.signal.resample(y, n_samples, axis=-1) elif res_type == 'polyphase': if int(orig_sr) != orig_sr or int(target_sr) != target_sr: raise ParameterError('polyphase resampling is only supported for integer-valued sampling rates.') # For polyphase resampling, we need up- and down-sampling ratios # We can get those from the greatest common divisor of the rates # as long as the rates are integrable orig_sr = int(orig_sr) target_sr = int(target_sr) gcd = np.gcd(orig_sr, target_sr) y_hat = scipy.signal.resample_poly(y, target_sr // gcd, orig_sr // gcd, axis=-1) else: y_hat = resampy.resample(y, orig_sr, target_sr, filter=res_type, axis=-1) if fix: y_hat = util.fix_length(y_hat, n_samples, **kwargs) if scale: y_hat /= np.sqrt(ratio) return np.ascontiguousarray(y_hat, dtype=y.dtype)
def function[resample, parameter[y, orig_sr, target_sr, res_type, fix, scale]]: constant[Resample a time series from orig_sr to target_sr Parameters ---------- y : np.ndarray [shape=(n,) or shape=(2, n)] audio time series. Can be mono or stereo. orig_sr : number > 0 [scalar] original sampling rate of `y` target_sr : number > 0 [scalar] target sampling rate res_type : str resample type (see note) .. note:: By default, this uses `resampy`'s high-quality mode ('kaiser_best'). To use a faster method, set `res_type='kaiser_fast'`. To use `scipy.signal.resample`, set `res_type='fft'` or `res_type='scipy'`. To use `scipy.signal.resample_poly`, set `res_type='polyphase'`. .. note:: When using `res_type='polyphase'`, only integer sampling rates are supported. fix : bool adjust the length of the resampled signal to be of size exactly `ceil(target_sr * len(y) / orig_sr)` scale : bool Scale the resampled signal so that `y` and `y_hat` have approximately equal total energy. kwargs : additional keyword arguments If `fix==True`, additional keyword arguments to pass to `librosa.util.fix_length`. Returns ------- y_hat : np.ndarray [shape=(n * target_sr / orig_sr,)] `y` resampled from `orig_sr` to `target_sr` Raises ------ ParameterError If `res_type='polyphase'` and `orig_sr` or `target_sr` are not both integer-valued. See Also -------- librosa.util.fix_length scipy.signal.resample resampy.resample Notes ----- This function caches at level 20. Examples -------- Downsample from 22 KHz to 8 KHz >>> y, sr = librosa.load(librosa.util.example_audio_file(), sr=22050) >>> y_8k = librosa.resample(y, sr, 8000) >>> y.shape, y_8k.shape ((1355168,), (491671,)) ] call[name[util].valid_audio, parameter[name[y]]] if compare[name[orig_sr] equal[==] name[target_sr]] begin[:] return[name[y]] variable[ratio] assign[=] binary_operation[call[name[float], parameter[name[target_sr]]] / name[orig_sr]] variable[n_samples] assign[=] call[name[int], parameter[call[name[np].ceil, parameter[binary_operation[call[name[y].shape][<ast.UnaryOp object at 0x7da20c6e68c0>] * name[ratio]]]]]] if compare[name[res_type] in tuple[[<ast.Constant object at 0x7da20c6e6350>, <ast.Constant object at 0x7da20c6e6770>]]] begin[:] variable[y_hat] assign[=] call[name[scipy].signal.resample, parameter[name[y], name[n_samples]]] if name[fix] begin[:] variable[y_hat] assign[=] call[name[util].fix_length, parameter[name[y_hat], name[n_samples]]] if name[scale] begin[:] <ast.AugAssign object at 0x7da1b055c940> return[call[name[np].ascontiguousarray, parameter[name[y_hat]]]]
keyword[def] identifier[resample] ( identifier[y] , identifier[orig_sr] , identifier[target_sr] , identifier[res_type] = literal[string] , identifier[fix] = keyword[True] , identifier[scale] = keyword[False] ,** identifier[kwargs] ): literal[string] identifier[util] . identifier[valid_audio] ( identifier[y] , identifier[mono] = keyword[False] ) keyword[if] identifier[orig_sr] == identifier[target_sr] : keyword[return] identifier[y] identifier[ratio] = identifier[float] ( identifier[target_sr] )/ identifier[orig_sr] identifier[n_samples] = identifier[int] ( identifier[np] . identifier[ceil] ( identifier[y] . identifier[shape] [- literal[int] ]* identifier[ratio] )) keyword[if] identifier[res_type] keyword[in] ( literal[string] , literal[string] ): identifier[y_hat] = identifier[scipy] . identifier[signal] . identifier[resample] ( identifier[y] , identifier[n_samples] , identifier[axis] =- literal[int] ) keyword[elif] identifier[res_type] == literal[string] : keyword[if] identifier[int] ( identifier[orig_sr] )!= identifier[orig_sr] keyword[or] identifier[int] ( identifier[target_sr] )!= identifier[target_sr] : keyword[raise] identifier[ParameterError] ( literal[string] ) identifier[orig_sr] = identifier[int] ( identifier[orig_sr] ) identifier[target_sr] = identifier[int] ( identifier[target_sr] ) identifier[gcd] = identifier[np] . identifier[gcd] ( identifier[orig_sr] , identifier[target_sr] ) identifier[y_hat] = identifier[scipy] . identifier[signal] . identifier[resample_poly] ( identifier[y] , identifier[target_sr] // identifier[gcd] , identifier[orig_sr] // identifier[gcd] , identifier[axis] =- literal[int] ) keyword[else] : identifier[y_hat] = identifier[resampy] . identifier[resample] ( identifier[y] , identifier[orig_sr] , identifier[target_sr] , identifier[filter] = identifier[res_type] , identifier[axis] =- literal[int] ) keyword[if] identifier[fix] : identifier[y_hat] = identifier[util] . identifier[fix_length] ( identifier[y_hat] , identifier[n_samples] ,** identifier[kwargs] ) keyword[if] identifier[scale] : identifier[y_hat] /= identifier[np] . identifier[sqrt] ( identifier[ratio] ) keyword[return] identifier[np] . identifier[ascontiguousarray] ( identifier[y_hat] , identifier[dtype] = identifier[y] . identifier[dtype] )
def resample(y, orig_sr, target_sr, res_type='kaiser_best', fix=True, scale=False, **kwargs): """Resample a time series from orig_sr to target_sr Parameters ---------- y : np.ndarray [shape=(n,) or shape=(2, n)] audio time series. Can be mono or stereo. orig_sr : number > 0 [scalar] original sampling rate of `y` target_sr : number > 0 [scalar] target sampling rate res_type : str resample type (see note) .. note:: By default, this uses `resampy`'s high-quality mode ('kaiser_best'). To use a faster method, set `res_type='kaiser_fast'`. To use `scipy.signal.resample`, set `res_type='fft'` or `res_type='scipy'`. To use `scipy.signal.resample_poly`, set `res_type='polyphase'`. .. note:: When using `res_type='polyphase'`, only integer sampling rates are supported. fix : bool adjust the length of the resampled signal to be of size exactly `ceil(target_sr * len(y) / orig_sr)` scale : bool Scale the resampled signal so that `y` and `y_hat` have approximately equal total energy. kwargs : additional keyword arguments If `fix==True`, additional keyword arguments to pass to `librosa.util.fix_length`. Returns ------- y_hat : np.ndarray [shape=(n * target_sr / orig_sr,)] `y` resampled from `orig_sr` to `target_sr` Raises ------ ParameterError If `res_type='polyphase'` and `orig_sr` or `target_sr` are not both integer-valued. See Also -------- librosa.util.fix_length scipy.signal.resample resampy.resample Notes ----- This function caches at level 20. Examples -------- Downsample from 22 KHz to 8 KHz >>> y, sr = librosa.load(librosa.util.example_audio_file(), sr=22050) >>> y_8k = librosa.resample(y, sr, 8000) >>> y.shape, y_8k.shape ((1355168,), (491671,)) """ # First, validate the audio buffer util.valid_audio(y, mono=False) if orig_sr == target_sr: return y # depends on [control=['if'], data=[]] ratio = float(target_sr) / orig_sr n_samples = int(np.ceil(y.shape[-1] * ratio)) if res_type in ('scipy', 'fft'): y_hat = scipy.signal.resample(y, n_samples, axis=-1) # depends on [control=['if'], data=[]] elif res_type == 'polyphase': if int(orig_sr) != orig_sr or int(target_sr) != target_sr: raise ParameterError('polyphase resampling is only supported for integer-valued sampling rates.') # depends on [control=['if'], data=[]] # For polyphase resampling, we need up- and down-sampling ratios # We can get those from the greatest common divisor of the rates # as long as the rates are integrable orig_sr = int(orig_sr) target_sr = int(target_sr) gcd = np.gcd(orig_sr, target_sr) y_hat = scipy.signal.resample_poly(y, target_sr // gcd, orig_sr // gcd, axis=-1) # depends on [control=['if'], data=[]] else: y_hat = resampy.resample(y, orig_sr, target_sr, filter=res_type, axis=-1) if fix: y_hat = util.fix_length(y_hat, n_samples, **kwargs) # depends on [control=['if'], data=[]] if scale: y_hat /= np.sqrt(ratio) # depends on [control=['if'], data=[]] return np.ascontiguousarray(y_hat, dtype=y.dtype)
def get_affected_box(self, src): """ Get the enlarged bounding box of a source. :param src: a source object :returns: a bounding box (min_lon, min_lat, max_lon, max_lat) """ mag = src.get_min_max_mag()[1] maxdist = self(src.tectonic_region_type, mag) bbox = get_bounding_box(src, maxdist) return (fix_lon(bbox[0]), bbox[1], fix_lon(bbox[2]), bbox[3])
def function[get_affected_box, parameter[self, src]]: constant[ Get the enlarged bounding box of a source. :param src: a source object :returns: a bounding box (min_lon, min_lat, max_lon, max_lat) ] variable[mag] assign[=] call[call[name[src].get_min_max_mag, parameter[]]][constant[1]] variable[maxdist] assign[=] call[name[self], parameter[name[src].tectonic_region_type, name[mag]]] variable[bbox] assign[=] call[name[get_bounding_box], parameter[name[src], name[maxdist]]] return[tuple[[<ast.Call object at 0x7da2046204c0>, <ast.Subscript object at 0x7da2046233d0>, <ast.Call object at 0x7da204565d20>, <ast.Subscript object at 0x7da204566740>]]]
keyword[def] identifier[get_affected_box] ( identifier[self] , identifier[src] ): literal[string] identifier[mag] = identifier[src] . identifier[get_min_max_mag] ()[ literal[int] ] identifier[maxdist] = identifier[self] ( identifier[src] . identifier[tectonic_region_type] , identifier[mag] ) identifier[bbox] = identifier[get_bounding_box] ( identifier[src] , identifier[maxdist] ) keyword[return] ( identifier[fix_lon] ( identifier[bbox] [ literal[int] ]), identifier[bbox] [ literal[int] ], identifier[fix_lon] ( identifier[bbox] [ literal[int] ]), identifier[bbox] [ literal[int] ])
def get_affected_box(self, src): """ Get the enlarged bounding box of a source. :param src: a source object :returns: a bounding box (min_lon, min_lat, max_lon, max_lat) """ mag = src.get_min_max_mag()[1] maxdist = self(src.tectonic_region_type, mag) bbox = get_bounding_box(src, maxdist) return (fix_lon(bbox[0]), bbox[1], fix_lon(bbox[2]), bbox[3])
def random_product(iter1, iter2): """ random sampler for equal_splits func""" pool1 = tuple(iter1) pool2 = tuple(iter2) ind1 = random.sample(pool1, 2) ind2 = random.sample(pool2, 2) return tuple(ind1+ind2)
def function[random_product, parameter[iter1, iter2]]: constant[ random sampler for equal_splits func] variable[pool1] assign[=] call[name[tuple], parameter[name[iter1]]] variable[pool2] assign[=] call[name[tuple], parameter[name[iter2]]] variable[ind1] assign[=] call[name[random].sample, parameter[name[pool1], constant[2]]] variable[ind2] assign[=] call[name[random].sample, parameter[name[pool2], constant[2]]] return[call[name[tuple], parameter[binary_operation[name[ind1] + name[ind2]]]]]
keyword[def] identifier[random_product] ( identifier[iter1] , identifier[iter2] ): literal[string] identifier[pool1] = identifier[tuple] ( identifier[iter1] ) identifier[pool2] = identifier[tuple] ( identifier[iter2] ) identifier[ind1] = identifier[random] . identifier[sample] ( identifier[pool1] , literal[int] ) identifier[ind2] = identifier[random] . identifier[sample] ( identifier[pool2] , literal[int] ) keyword[return] identifier[tuple] ( identifier[ind1] + identifier[ind2] )
def random_product(iter1, iter2): """ random sampler for equal_splits func""" pool1 = tuple(iter1) pool2 = tuple(iter2) ind1 = random.sample(pool1, 2) ind2 = random.sample(pool2, 2) return tuple(ind1 + ind2)
def list(self, request, *args, **kwargs): """ Available request parameters: - ?type=type_of_statistics_objects (required. Have to be from the list: 'customer', 'project') - ?from=timestamp (default: now - 30 days, for example: 1415910025) - ?to=timestamp (default: now, for example: 1415912625) - ?datapoints=how many data points have to be in answer (default: 6) Answer will be list of datapoints(dictionaries). Each datapoint will contain fields: 'to', 'from', 'value'. 'Value' - count of objects, that were created between 'from' and 'to' dates. Example: .. code-block:: javascript [ {"to": 471970877, "from": 1, "value": 5}, {"to": 943941753, "from": 471970877, "value": 0}, {"to": 1415912629, "from": 943941753, "value": 3} ] """ return super(CreationTimeStatsView, self).list(request, *args, **kwargs)
def function[list, parameter[self, request]]: constant[ Available request parameters: - ?type=type_of_statistics_objects (required. Have to be from the list: 'customer', 'project') - ?from=timestamp (default: now - 30 days, for example: 1415910025) - ?to=timestamp (default: now, for example: 1415912625) - ?datapoints=how many data points have to be in answer (default: 6) Answer will be list of datapoints(dictionaries). Each datapoint will contain fields: 'to', 'from', 'value'. 'Value' - count of objects, that were created between 'from' and 'to' dates. Example: .. code-block:: javascript [ {"to": 471970877, "from": 1, "value": 5}, {"to": 943941753, "from": 471970877, "value": 0}, {"to": 1415912629, "from": 943941753, "value": 3} ] ] return[call[call[name[super], parameter[name[CreationTimeStatsView], name[self]]].list, parameter[name[request], <ast.Starred object at 0x7da1b0fe65f0>]]]
keyword[def] identifier[list] ( identifier[self] , identifier[request] ,* identifier[args] ,** identifier[kwargs] ): literal[string] keyword[return] identifier[super] ( identifier[CreationTimeStatsView] , identifier[self] ). identifier[list] ( identifier[request] ,* identifier[args] ,** identifier[kwargs] )
def list(self, request, *args, **kwargs): """ Available request parameters: - ?type=type_of_statistics_objects (required. Have to be from the list: 'customer', 'project') - ?from=timestamp (default: now - 30 days, for example: 1415910025) - ?to=timestamp (default: now, for example: 1415912625) - ?datapoints=how many data points have to be in answer (default: 6) Answer will be list of datapoints(dictionaries). Each datapoint will contain fields: 'to', 'from', 'value'. 'Value' - count of objects, that were created between 'from' and 'to' dates. Example: .. code-block:: javascript [ {"to": 471970877, "from": 1, "value": 5}, {"to": 943941753, "from": 471970877, "value": 0}, {"to": 1415912629, "from": 943941753, "value": 3} ] """ return super(CreationTimeStatsView, self).list(request, *args, **kwargs)
def remove_unweighted_sources(graph: BELGraph, key: Optional[str] = None) -> None: """Prune unannotated nodes on the periphery of the sub-graph. :param graph: A BEL graph :param key: The key in the node data dictionary representing the experimental data. Defaults to :data:`pybel_tools.constants.WEIGHT`. """ nodes = list(get_unweighted_sources(graph, key=key)) graph.remove_nodes_from(nodes)
def function[remove_unweighted_sources, parameter[graph, key]]: constant[Prune unannotated nodes on the periphery of the sub-graph. :param graph: A BEL graph :param key: The key in the node data dictionary representing the experimental data. Defaults to :data:`pybel_tools.constants.WEIGHT`. ] variable[nodes] assign[=] call[name[list], parameter[call[name[get_unweighted_sources], parameter[name[graph]]]]] call[name[graph].remove_nodes_from, parameter[name[nodes]]]
keyword[def] identifier[remove_unweighted_sources] ( identifier[graph] : identifier[BELGraph] , identifier[key] : identifier[Optional] [ identifier[str] ]= keyword[None] )-> keyword[None] : literal[string] identifier[nodes] = identifier[list] ( identifier[get_unweighted_sources] ( identifier[graph] , identifier[key] = identifier[key] )) identifier[graph] . identifier[remove_nodes_from] ( identifier[nodes] )
def remove_unweighted_sources(graph: BELGraph, key: Optional[str]=None) -> None: """Prune unannotated nodes on the periphery of the sub-graph. :param graph: A BEL graph :param key: The key in the node data dictionary representing the experimental data. Defaults to :data:`pybel_tools.constants.WEIGHT`. """ nodes = list(get_unweighted_sources(graph, key=key)) graph.remove_nodes_from(nodes)
def _get_titles(self, limit): """ The file processed here is of the format: #NBK_id GR_shortname OMIM NBK1103 trimethylaminuria 136132 NBK1103 trimethylaminuria 602079 NBK1104 cdls 122470 Where each of the rows represents a mapping between a gr id and an omim id. These are a 1:many relationship, and some of the omim ids are genes (not diseases). Therefore, we need to create a loose coupling here. We make the assumption that these NBKs are generally higher-level grouping classes; therefore the OMIM ids are treated as subclasses. (This assumption is poor for those omims that are actually genes, but we have no way of knowing what those are here... we will just have to deal with that for now.) :param limit: :return: """ raw = '/'.join((self.rawdir, self.files['titles']['file'])) model = Model(self.graph) col = ['GR_shortname', 'GR_Title', 'NBK_id', 'PMID'] with open(raw, 'r', encoding='latin-1') as csvfile: filereader = csv.reader(csvfile, delimiter='\t', quotechar='\"') header = next(filereader) header[0] = header[0][1:] colcount = len(col) if header != col: LOG.error( '\nExpected header: %s\nRecieved header: %s', col, header) exit(-1) for row in filereader: if len(row) != colcount: LOG.error("Unexpected row. got: %s", row) LOG.error("Expected data for: %s", col) exit(-1) nbk_num = row[col.index('NBK_id')] gr_id = 'GeneReviews:' + nbk_num self.book_ids.add(nbk_num) # a global set of the book nums if limit is None or filereader.line_num < limit: model.addClassToGraph(gr_id, row[col.index('GR_Title')]) model.addSynonym(gr_id, row[col.index('GR_shortname')]) # TODO include the new PMID? return
def function[_get_titles, parameter[self, limit]]: constant[ The file processed here is of the format: #NBK_id GR_shortname OMIM NBK1103 trimethylaminuria 136132 NBK1103 trimethylaminuria 602079 NBK1104 cdls 122470 Where each of the rows represents a mapping between a gr id and an omim id. These are a 1:many relationship, and some of the omim ids are genes (not diseases). Therefore, we need to create a loose coupling here. We make the assumption that these NBKs are generally higher-level grouping classes; therefore the OMIM ids are treated as subclasses. (This assumption is poor for those omims that are actually genes, but we have no way of knowing what those are here... we will just have to deal with that for now.) :param limit: :return: ] variable[raw] assign[=] call[constant[/].join, parameter[tuple[[<ast.Attribute object at 0x7da20e955330>, <ast.Subscript object at 0x7da20e955720>]]]] variable[model] assign[=] call[name[Model], parameter[name[self].graph]] variable[col] assign[=] list[[<ast.Constant object at 0x7da20e954a00>, <ast.Constant object at 0x7da20e956cb0>, <ast.Constant object at 0x7da20e957e20>, <ast.Constant object at 0x7da20e955210>]] with call[name[open], parameter[name[raw], constant[r]]] begin[:] variable[filereader] assign[=] call[name[csv].reader, parameter[name[csvfile]]] variable[header] assign[=] call[name[next], parameter[name[filereader]]] call[name[header]][constant[0]] assign[=] call[call[name[header]][constant[0]]][<ast.Slice object at 0x7da20e955270>] variable[colcount] assign[=] call[name[len], parameter[name[col]]] if compare[name[header] not_equal[!=] name[col]] begin[:] call[name[LOG].error, parameter[constant[ Expected header: %s Recieved header: %s], name[col], name[header]]] call[name[exit], parameter[<ast.UnaryOp object at 0x7da20e956620>]] for taget[name[row]] in starred[name[filereader]] begin[:] if compare[call[name[len], parameter[name[row]]] not_equal[!=] name[colcount]] begin[:] call[name[LOG].error, parameter[constant[Unexpected row. got: %s], name[row]]] call[name[LOG].error, parameter[constant[Expected data for: %s], name[col]]] call[name[exit], parameter[<ast.UnaryOp object at 0x7da20e9559f0>]] variable[nbk_num] assign[=] call[name[row]][call[name[col].index, parameter[constant[NBK_id]]]] variable[gr_id] assign[=] binary_operation[constant[GeneReviews:] + name[nbk_num]] call[name[self].book_ids.add, parameter[name[nbk_num]]] if <ast.BoolOp object at 0x7da20e9567d0> begin[:] call[name[model].addClassToGraph, parameter[name[gr_id], call[name[row]][call[name[col].index, parameter[constant[GR_Title]]]]]] call[name[model].addSynonym, parameter[name[gr_id], call[name[row]][call[name[col].index, parameter[constant[GR_shortname]]]]]] return[None]
keyword[def] identifier[_get_titles] ( identifier[self] , identifier[limit] ): literal[string] identifier[raw] = literal[string] . identifier[join] (( identifier[self] . identifier[rawdir] , identifier[self] . identifier[files] [ literal[string] ][ literal[string] ])) identifier[model] = identifier[Model] ( identifier[self] . identifier[graph] ) identifier[col] =[ literal[string] , literal[string] , literal[string] , literal[string] ] keyword[with] identifier[open] ( identifier[raw] , literal[string] , identifier[encoding] = literal[string] ) keyword[as] identifier[csvfile] : identifier[filereader] = identifier[csv] . identifier[reader] ( identifier[csvfile] , identifier[delimiter] = literal[string] , identifier[quotechar] = literal[string] ) identifier[header] = identifier[next] ( identifier[filereader] ) identifier[header] [ literal[int] ]= identifier[header] [ literal[int] ][ literal[int] :] identifier[colcount] = identifier[len] ( identifier[col] ) keyword[if] identifier[header] != identifier[col] : identifier[LOG] . identifier[error] ( literal[string] , identifier[col] , identifier[header] ) identifier[exit] (- literal[int] ) keyword[for] identifier[row] keyword[in] identifier[filereader] : keyword[if] identifier[len] ( identifier[row] )!= identifier[colcount] : identifier[LOG] . identifier[error] ( literal[string] , identifier[row] ) identifier[LOG] . identifier[error] ( literal[string] , identifier[col] ) identifier[exit] (- literal[int] ) identifier[nbk_num] = identifier[row] [ identifier[col] . identifier[index] ( literal[string] )] identifier[gr_id] = literal[string] + identifier[nbk_num] identifier[self] . identifier[book_ids] . identifier[add] ( identifier[nbk_num] ) keyword[if] identifier[limit] keyword[is] keyword[None] keyword[or] identifier[filereader] . identifier[line_num] < identifier[limit] : identifier[model] . identifier[addClassToGraph] ( identifier[gr_id] , identifier[row] [ identifier[col] . identifier[index] ( literal[string] )]) identifier[model] . identifier[addSynonym] ( identifier[gr_id] , identifier[row] [ identifier[col] . identifier[index] ( literal[string] )]) keyword[return]
def _get_titles(self, limit): """ The file processed here is of the format: #NBK_id GR_shortname OMIM NBK1103 trimethylaminuria 136132 NBK1103 trimethylaminuria 602079 NBK1104 cdls 122470 Where each of the rows represents a mapping between a gr id and an omim id. These are a 1:many relationship, and some of the omim ids are genes (not diseases). Therefore, we need to create a loose coupling here. We make the assumption that these NBKs are generally higher-level grouping classes; therefore the OMIM ids are treated as subclasses. (This assumption is poor for those omims that are actually genes, but we have no way of knowing what those are here... we will just have to deal with that for now.) :param limit: :return: """ raw = '/'.join((self.rawdir, self.files['titles']['file'])) model = Model(self.graph) col = ['GR_shortname', 'GR_Title', 'NBK_id', 'PMID'] with open(raw, 'r', encoding='latin-1') as csvfile: filereader = csv.reader(csvfile, delimiter='\t', quotechar='"') header = next(filereader) header[0] = header[0][1:] colcount = len(col) if header != col: LOG.error('\nExpected header: %s\nRecieved header: %s', col, header) exit(-1) # depends on [control=['if'], data=['header', 'col']] for row in filereader: if len(row) != colcount: LOG.error('Unexpected row. got: %s', row) LOG.error('Expected data for: %s', col) exit(-1) # depends on [control=['if'], data=[]] nbk_num = row[col.index('NBK_id')] gr_id = 'GeneReviews:' + nbk_num self.book_ids.add(nbk_num) # a global set of the book nums if limit is None or filereader.line_num < limit: model.addClassToGraph(gr_id, row[col.index('GR_Title')]) model.addSynonym(gr_id, row[col.index('GR_shortname')]) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['row']] # depends on [control=['with'], data=['csvfile']] # TODO include the new PMID? return
def unit_triangle(): """Image for :class:`.surface.Surface` docstring.""" if NO_IMAGES: return nodes = np.asfortranarray([[0.0, 1.0, 0.0], [0.0, 0.0, 1.0]]) surface = bezier.Surface(nodes, degree=1) ax = surface.plot(256) ax.axis("scaled") _plot_helpers.add_plot_boundary(ax) save_image(ax.figure, "unit_triangle.png")
def function[unit_triangle, parameter[]]: constant[Image for :class:`.surface.Surface` docstring.] if name[NO_IMAGES] begin[:] return[None] variable[nodes] assign[=] call[name[np].asfortranarray, parameter[list[[<ast.List object at 0x7da20c6c6560>, <ast.List object at 0x7da20c6c5240>]]]] variable[surface] assign[=] call[name[bezier].Surface, parameter[name[nodes]]] variable[ax] assign[=] call[name[surface].plot, parameter[constant[256]]] call[name[ax].axis, parameter[constant[scaled]]] call[name[_plot_helpers].add_plot_boundary, parameter[name[ax]]] call[name[save_image], parameter[name[ax].figure, constant[unit_triangle.png]]]
keyword[def] identifier[unit_triangle] (): literal[string] keyword[if] identifier[NO_IMAGES] : keyword[return] identifier[nodes] = identifier[np] . identifier[asfortranarray] ([[ literal[int] , literal[int] , literal[int] ],[ literal[int] , literal[int] , literal[int] ]]) identifier[surface] = identifier[bezier] . identifier[Surface] ( identifier[nodes] , identifier[degree] = literal[int] ) identifier[ax] = identifier[surface] . identifier[plot] ( literal[int] ) identifier[ax] . identifier[axis] ( literal[string] ) identifier[_plot_helpers] . identifier[add_plot_boundary] ( identifier[ax] ) identifier[save_image] ( identifier[ax] . identifier[figure] , literal[string] )
def unit_triangle(): """Image for :class:`.surface.Surface` docstring.""" if NO_IMAGES: return # depends on [control=['if'], data=[]] nodes = np.asfortranarray([[0.0, 1.0, 0.0], [0.0, 0.0, 1.0]]) surface = bezier.Surface(nodes, degree=1) ax = surface.plot(256) ax.axis('scaled') _plot_helpers.add_plot_boundary(ax) save_image(ax.figure, 'unit_triangle.png')
def evaluateplanarR2derivs(Pot,R,phi=None,t=0.): """ NAME: evaluateplanarR2derivs PURPOSE: evaluate the second radial derivative of a (list of) planarPotential instance(s) INPUT: Pot - (list of) planarPotential instance(s) R - Cylindrical radius (can be Quantity) phi= azimuth (optional; can be Quantity) t= time (optional; can be Quantity) OUTPUT: F_R(R(,phi,t)) HISTORY: 2010-10-09 - Written - Bovy (IAS) """ from .Potential import _isNonAxi isList= isinstance(Pot,list) nonAxi= _isNonAxi(Pot) if nonAxi and phi is None: raise PotentialError("The (list of) planarPotential instances is non-axisymmetric, but you did not provide phi") if isinstance(Pot,list) \ and nu.all([isinstance(p,planarPotential) for p in Pot]): sum= 0. for pot in Pot: if nonAxi: sum+= pot.R2deriv(R,phi=phi,t=t,use_physical=False) else: sum+= pot.R2deriv(R,t=t,use_physical=False) return sum elif isinstance(Pot,planarPotential): if nonAxi: return Pot.R2deriv(R,phi=phi,t=t,use_physical=False) else: return Pot.R2deriv(R,t=t,use_physical=False) else: #pragma: no cover raise PotentialError("Input to 'evaluatePotentials' is neither a Potential-instance or a list of such instances")
def function[evaluateplanarR2derivs, parameter[Pot, R, phi, t]]: constant[ NAME: evaluateplanarR2derivs PURPOSE: evaluate the second radial derivative of a (list of) planarPotential instance(s) INPUT: Pot - (list of) planarPotential instance(s) R - Cylindrical radius (can be Quantity) phi= azimuth (optional; can be Quantity) t= time (optional; can be Quantity) OUTPUT: F_R(R(,phi,t)) HISTORY: 2010-10-09 - Written - Bovy (IAS) ] from relative_module[Potential] import module[_isNonAxi] variable[isList] assign[=] call[name[isinstance], parameter[name[Pot], name[list]]] variable[nonAxi] assign[=] call[name[_isNonAxi], parameter[name[Pot]]] if <ast.BoolOp object at 0x7da1b0c41300> begin[:] <ast.Raise object at 0x7da1b0c40dc0> if <ast.BoolOp object at 0x7da1b0c41f00> begin[:] variable[sum] assign[=] constant[0.0] for taget[name[pot]] in starred[name[Pot]] begin[:] if name[nonAxi] begin[:] <ast.AugAssign object at 0x7da1b0c42020> return[name[sum]]
keyword[def] identifier[evaluateplanarR2derivs] ( identifier[Pot] , identifier[R] , identifier[phi] = keyword[None] , identifier[t] = literal[int] ): literal[string] keyword[from] . identifier[Potential] keyword[import] identifier[_isNonAxi] identifier[isList] = identifier[isinstance] ( identifier[Pot] , identifier[list] ) identifier[nonAxi] = identifier[_isNonAxi] ( identifier[Pot] ) keyword[if] identifier[nonAxi] keyword[and] identifier[phi] keyword[is] keyword[None] : keyword[raise] identifier[PotentialError] ( literal[string] ) keyword[if] identifier[isinstance] ( identifier[Pot] , identifier[list] ) keyword[and] identifier[nu] . identifier[all] ([ identifier[isinstance] ( identifier[p] , identifier[planarPotential] ) keyword[for] identifier[p] keyword[in] identifier[Pot] ]): identifier[sum] = literal[int] keyword[for] identifier[pot] keyword[in] identifier[Pot] : keyword[if] identifier[nonAxi] : identifier[sum] += identifier[pot] . identifier[R2deriv] ( identifier[R] , identifier[phi] = identifier[phi] , identifier[t] = identifier[t] , identifier[use_physical] = keyword[False] ) keyword[else] : identifier[sum] += identifier[pot] . identifier[R2deriv] ( identifier[R] , identifier[t] = identifier[t] , identifier[use_physical] = keyword[False] ) keyword[return] identifier[sum] keyword[elif] identifier[isinstance] ( identifier[Pot] , identifier[planarPotential] ): keyword[if] identifier[nonAxi] : keyword[return] identifier[Pot] . identifier[R2deriv] ( identifier[R] , identifier[phi] = identifier[phi] , identifier[t] = identifier[t] , identifier[use_physical] = keyword[False] ) keyword[else] : keyword[return] identifier[Pot] . identifier[R2deriv] ( identifier[R] , identifier[t] = identifier[t] , identifier[use_physical] = keyword[False] ) keyword[else] : keyword[raise] identifier[PotentialError] ( literal[string] )
def evaluateplanarR2derivs(Pot, R, phi=None, t=0.0): """ NAME: evaluateplanarR2derivs PURPOSE: evaluate the second radial derivative of a (list of) planarPotential instance(s) INPUT: Pot - (list of) planarPotential instance(s) R - Cylindrical radius (can be Quantity) phi= azimuth (optional; can be Quantity) t= time (optional; can be Quantity) OUTPUT: F_R(R(,phi,t)) HISTORY: 2010-10-09 - Written - Bovy (IAS) """ from .Potential import _isNonAxi isList = isinstance(Pot, list) nonAxi = _isNonAxi(Pot) if nonAxi and phi is None: raise PotentialError('The (list of) planarPotential instances is non-axisymmetric, but you did not provide phi') # depends on [control=['if'], data=[]] if isinstance(Pot, list) and nu.all([isinstance(p, planarPotential) for p in Pot]): sum = 0.0 for pot in Pot: if nonAxi: sum += pot.R2deriv(R, phi=phi, t=t, use_physical=False) # depends on [control=['if'], data=[]] else: sum += pot.R2deriv(R, t=t, use_physical=False) # depends on [control=['for'], data=['pot']] return sum # depends on [control=['if'], data=[]] elif isinstance(Pot, planarPotential): if nonAxi: return Pot.R2deriv(R, phi=phi, t=t, use_physical=False) # depends on [control=['if'], data=[]] else: return Pot.R2deriv(R, t=t, use_physical=False) # depends on [control=['if'], data=[]] else: #pragma: no cover raise PotentialError("Input to 'evaluatePotentials' is neither a Potential-instance or a list of such instances")
def handle_stream(self, stream, address): ''' Handle incoming streams and add messages to the incoming queue ''' log.trace('Req client %s connected', address) self.clients.append((stream, address)) unpacker = msgpack.Unpacker() try: while True: wire_bytes = yield stream.read_bytes(4096, partial=True) unpacker.feed(wire_bytes) for framed_msg in unpacker: if six.PY3: framed_msg = salt.transport.frame.decode_embedded_strs( framed_msg ) header = framed_msg['head'] self.io_loop.spawn_callback(self.message_handler, stream, header, framed_msg['body']) except StreamClosedError: log.trace('req client disconnected %s', address) self.clients.remove((stream, address)) except Exception as e: log.trace('other master-side exception: %s', e) self.clients.remove((stream, address)) stream.close()
def function[handle_stream, parameter[self, stream, address]]: constant[ Handle incoming streams and add messages to the incoming queue ] call[name[log].trace, parameter[constant[Req client %s connected], name[address]]] call[name[self].clients.append, parameter[tuple[[<ast.Name object at 0x7da18dc067a0>, <ast.Name object at 0x7da18dc05630>]]]] variable[unpacker] assign[=] call[name[msgpack].Unpacker, parameter[]] <ast.Try object at 0x7da18dc06110>
keyword[def] identifier[handle_stream] ( identifier[self] , identifier[stream] , identifier[address] ): literal[string] identifier[log] . identifier[trace] ( literal[string] , identifier[address] ) identifier[self] . identifier[clients] . identifier[append] (( identifier[stream] , identifier[address] )) identifier[unpacker] = identifier[msgpack] . identifier[Unpacker] () keyword[try] : keyword[while] keyword[True] : identifier[wire_bytes] = keyword[yield] identifier[stream] . identifier[read_bytes] ( literal[int] , identifier[partial] = keyword[True] ) identifier[unpacker] . identifier[feed] ( identifier[wire_bytes] ) keyword[for] identifier[framed_msg] keyword[in] identifier[unpacker] : keyword[if] identifier[six] . identifier[PY3] : identifier[framed_msg] = identifier[salt] . identifier[transport] . identifier[frame] . identifier[decode_embedded_strs] ( identifier[framed_msg] ) identifier[header] = identifier[framed_msg] [ literal[string] ] identifier[self] . identifier[io_loop] . identifier[spawn_callback] ( identifier[self] . identifier[message_handler] , identifier[stream] , identifier[header] , identifier[framed_msg] [ literal[string] ]) keyword[except] identifier[StreamClosedError] : identifier[log] . identifier[trace] ( literal[string] , identifier[address] ) identifier[self] . identifier[clients] . identifier[remove] (( identifier[stream] , identifier[address] )) keyword[except] identifier[Exception] keyword[as] identifier[e] : identifier[log] . identifier[trace] ( literal[string] , identifier[e] ) identifier[self] . identifier[clients] . identifier[remove] (( identifier[stream] , identifier[address] )) identifier[stream] . identifier[close] ()
def handle_stream(self, stream, address): """ Handle incoming streams and add messages to the incoming queue """ log.trace('Req client %s connected', address) self.clients.append((stream, address)) unpacker = msgpack.Unpacker() try: while True: wire_bytes = (yield stream.read_bytes(4096, partial=True)) unpacker.feed(wire_bytes) for framed_msg in unpacker: if six.PY3: framed_msg = salt.transport.frame.decode_embedded_strs(framed_msg) # depends on [control=['if'], data=[]] header = framed_msg['head'] self.io_loop.spawn_callback(self.message_handler, stream, header, framed_msg['body']) # depends on [control=['for'], data=['framed_msg']] # depends on [control=['while'], data=[]] # depends on [control=['try'], data=[]] except StreamClosedError: log.trace('req client disconnected %s', address) self.clients.remove((stream, address)) # depends on [control=['except'], data=[]] except Exception as e: log.trace('other master-side exception: %s', e) self.clients.remove((stream, address)) stream.close() # depends on [control=['except'], data=['e']]
def create_or_update(cluster_config_file, min_workers, max_workers, no_restart, restart_only, yes, cluster_name): """Create or update a Ray cluster.""" if restart_only or no_restart: assert restart_only != no_restart, "Cannot set both 'restart_only' " \ "and 'no_restart' at the same time!" create_or_update_cluster(cluster_config_file, min_workers, max_workers, no_restart, restart_only, yes, cluster_name)
def function[create_or_update, parameter[cluster_config_file, min_workers, max_workers, no_restart, restart_only, yes, cluster_name]]: constant[Create or update a Ray cluster.] if <ast.BoolOp object at 0x7da20e9b3fa0> begin[:] assert[compare[name[restart_only] not_equal[!=] name[no_restart]]] call[name[create_or_update_cluster], parameter[name[cluster_config_file], name[min_workers], name[max_workers], name[no_restart], name[restart_only], name[yes], name[cluster_name]]]
keyword[def] identifier[create_or_update] ( identifier[cluster_config_file] , identifier[min_workers] , identifier[max_workers] , identifier[no_restart] , identifier[restart_only] , identifier[yes] , identifier[cluster_name] ): literal[string] keyword[if] identifier[restart_only] keyword[or] identifier[no_restart] : keyword[assert] identifier[restart_only] != identifier[no_restart] , literal[string] literal[string] identifier[create_or_update_cluster] ( identifier[cluster_config_file] , identifier[min_workers] , identifier[max_workers] , identifier[no_restart] , identifier[restart_only] , identifier[yes] , identifier[cluster_name] )
def create_or_update(cluster_config_file, min_workers, max_workers, no_restart, restart_only, yes, cluster_name): """Create or update a Ray cluster.""" if restart_only or no_restart: assert restart_only != no_restart, "Cannot set both 'restart_only' and 'no_restart' at the same time!" # depends on [control=['if'], data=[]] create_or_update_cluster(cluster_config_file, min_workers, max_workers, no_restart, restart_only, yes, cluster_name)
def setup(self): "Connect incoming connection to a telnet session" try: self.TERM = self.request.term except: pass self.setterm(self.TERM) self.sock = self.request._sock for k in self.DOACK.keys(): self.sendcommand(self.DOACK[k], k) for k in self.WILLACK.keys(): self.sendcommand(self.WILLACK[k], k)
def function[setup, parameter[self]]: constant[Connect incoming connection to a telnet session] <ast.Try object at 0x7da18f09c820> call[name[self].setterm, parameter[name[self].TERM]] name[self].sock assign[=] name[self].request._sock for taget[name[k]] in starred[call[name[self].DOACK.keys, parameter[]]] begin[:] call[name[self].sendcommand, parameter[call[name[self].DOACK][name[k]], name[k]]] for taget[name[k]] in starred[call[name[self].WILLACK.keys, parameter[]]] begin[:] call[name[self].sendcommand, parameter[call[name[self].WILLACK][name[k]], name[k]]]
keyword[def] identifier[setup] ( identifier[self] ): literal[string] keyword[try] : identifier[self] . identifier[TERM] = identifier[self] . identifier[request] . identifier[term] keyword[except] : keyword[pass] identifier[self] . identifier[setterm] ( identifier[self] . identifier[TERM] ) identifier[self] . identifier[sock] = identifier[self] . identifier[request] . identifier[_sock] keyword[for] identifier[k] keyword[in] identifier[self] . identifier[DOACK] . identifier[keys] (): identifier[self] . identifier[sendcommand] ( identifier[self] . identifier[DOACK] [ identifier[k] ], identifier[k] ) keyword[for] identifier[k] keyword[in] identifier[self] . identifier[WILLACK] . identifier[keys] (): identifier[self] . identifier[sendcommand] ( identifier[self] . identifier[WILLACK] [ identifier[k] ], identifier[k] )
def setup(self): """Connect incoming connection to a telnet session""" try: self.TERM = self.request.term # depends on [control=['try'], data=[]] except: pass # depends on [control=['except'], data=[]] self.setterm(self.TERM) self.sock = self.request._sock for k in self.DOACK.keys(): self.sendcommand(self.DOACK[k], k) # depends on [control=['for'], data=['k']] for k in self.WILLACK.keys(): self.sendcommand(self.WILLACK[k], k) # depends on [control=['for'], data=['k']]
def fail_print(error): """Print an error in red text. Parameters error (HTTPError) Error object to print. """ print(COLORS.fail, error.message, COLORS.end) print(COLORS.fail, error.errors, COLORS.end)
def function[fail_print, parameter[error]]: constant[Print an error in red text. Parameters error (HTTPError) Error object to print. ] call[name[print], parameter[name[COLORS].fail, name[error].message, name[COLORS].end]] call[name[print], parameter[name[COLORS].fail, name[error].errors, name[COLORS].end]]
keyword[def] identifier[fail_print] ( identifier[error] ): literal[string] identifier[print] ( identifier[COLORS] . identifier[fail] , identifier[error] . identifier[message] , identifier[COLORS] . identifier[end] ) identifier[print] ( identifier[COLORS] . identifier[fail] , identifier[error] . identifier[errors] , identifier[COLORS] . identifier[end] )
def fail_print(error): """Print an error in red text. Parameters error (HTTPError) Error object to print. """ print(COLORS.fail, error.message, COLORS.end) print(COLORS.fail, error.errors, COLORS.end)
def endswith(self, suffix): """ Construct a Filter matching values ending with ``suffix``. Parameters ---------- suffix : str String suffix against which to compare values produced by ``self``. Returns ------- matches : Filter Filter returning True for all sid/date pairs for which ``self`` produces a string ending with ``prefix``. """ return ArrayPredicate( term=self, op=LabelArray.endswith, opargs=(suffix,), )
def function[endswith, parameter[self, suffix]]: constant[ Construct a Filter matching values ending with ``suffix``. Parameters ---------- suffix : str String suffix against which to compare values produced by ``self``. Returns ------- matches : Filter Filter returning True for all sid/date pairs for which ``self`` produces a string ending with ``prefix``. ] return[call[name[ArrayPredicate], parameter[]]]
keyword[def] identifier[endswith] ( identifier[self] , identifier[suffix] ): literal[string] keyword[return] identifier[ArrayPredicate] ( identifier[term] = identifier[self] , identifier[op] = identifier[LabelArray] . identifier[endswith] , identifier[opargs] =( identifier[suffix] ,), )
def endswith(self, suffix): """ Construct a Filter matching values ending with ``suffix``. Parameters ---------- suffix : str String suffix against which to compare values produced by ``self``. Returns ------- matches : Filter Filter returning True for all sid/date pairs for which ``self`` produces a string ending with ``prefix``. """ return ArrayPredicate(term=self, op=LabelArray.endswith, opargs=(suffix,))
def fake(cls, gsimlt=None): """ :returns: a fake `CompositionInfo` instance with the given gsim logic tree object; if None, builds automatically a fake gsim logic tree """ weight = 1 gsim_lt = gsimlt or logictree.GsimLogicTree.from_('[FromFile]') fakeSM = logictree.LtSourceModel( 'scenario', weight, 'b1', [sourceconverter.SourceGroup('*', eff_ruptures=1)], gsim_lt.get_num_paths(), ordinal=0, samples=1) return cls(gsim_lt, seed=0, num_samples=0, source_models=[fakeSM], totweight=0)
def function[fake, parameter[cls, gsimlt]]: constant[ :returns: a fake `CompositionInfo` instance with the given gsim logic tree object; if None, builds automatically a fake gsim logic tree ] variable[weight] assign[=] constant[1] variable[gsim_lt] assign[=] <ast.BoolOp object at 0x7da20c796440> variable[fakeSM] assign[=] call[name[logictree].LtSourceModel, parameter[constant[scenario], name[weight], constant[b1], list[[<ast.Call object at 0x7da20c7965f0>]], call[name[gsim_lt].get_num_paths, parameter[]]]] return[call[name[cls], parameter[name[gsim_lt]]]]
keyword[def] identifier[fake] ( identifier[cls] , identifier[gsimlt] = keyword[None] ): literal[string] identifier[weight] = literal[int] identifier[gsim_lt] = identifier[gsimlt] keyword[or] identifier[logictree] . identifier[GsimLogicTree] . identifier[from_] ( literal[string] ) identifier[fakeSM] = identifier[logictree] . identifier[LtSourceModel] ( literal[string] , identifier[weight] , literal[string] , [ identifier[sourceconverter] . identifier[SourceGroup] ( literal[string] , identifier[eff_ruptures] = literal[int] )], identifier[gsim_lt] . identifier[get_num_paths] (), identifier[ordinal] = literal[int] , identifier[samples] = literal[int] ) keyword[return] identifier[cls] ( identifier[gsim_lt] , identifier[seed] = literal[int] , identifier[num_samples] = literal[int] , identifier[source_models] =[ identifier[fakeSM] ], identifier[totweight] = literal[int] )
def fake(cls, gsimlt=None): """ :returns: a fake `CompositionInfo` instance with the given gsim logic tree object; if None, builds automatically a fake gsim logic tree """ weight = 1 gsim_lt = gsimlt or logictree.GsimLogicTree.from_('[FromFile]') fakeSM = logictree.LtSourceModel('scenario', weight, 'b1', [sourceconverter.SourceGroup('*', eff_ruptures=1)], gsim_lt.get_num_paths(), ordinal=0, samples=1) return cls(gsim_lt, seed=0, num_samples=0, source_models=[fakeSM], totweight=0)
def match_events(events_from, events_to, left=True, right=True): '''Match one set of events to another. This is useful for tasks such as matching beats to the nearest detected onsets, or frame-aligned events to the nearest zero-crossing. .. note:: A target event may be matched to multiple source events. Examples -------- >>> # Sources are multiples of 7 >>> s_from = np.arange(0, 100, 7) >>> s_from array([ 0, 7, 14, 21, 28, 35, 42, 49, 56, 63, 70, 77, 84, 91, 98]) >>> # Targets are multiples of 10 >>> s_to = np.arange(0, 100, 10) >>> s_to array([ 0, 10, 20, 30, 40, 50, 60, 70, 80, 90]) >>> # Find the matching >>> idx = librosa.util.match_events(s_from, s_to) >>> idx array([0, 1, 1, 2, 3, 3, 4, 5, 6, 6, 7, 8, 8, 9, 9]) >>> # Print each source value to its matching target >>> zip(s_from, s_to[idx]) [(0, 0), (7, 10), (14, 10), (21, 20), (28, 30), (35, 30), (42, 40), (49, 50), (56, 60), (63, 60), (70, 70), (77, 80), (84, 80), (91, 90), (98, 90)] Parameters ---------- events_from : ndarray [shape=(n,)] Array of events (eg, times, sample or frame indices) to match from. events_to : ndarray [shape=(m,)] Array of events (eg, times, sample or frame indices) to match against. left : bool right : bool If `False`, then matched events cannot be to the left (or right) of source events. Returns ------- event_mapping : np.ndarray [shape=(n,)] For each event in `events_from`, the corresponding event index in `events_to`. `event_mapping[i] == arg min |events_from[i] - events_to[:]|` See Also -------- match_intervals Raises ------ ParameterError If either array of input events is not the correct shape ''' if len(events_from) == 0 or len(events_to) == 0: raise ParameterError('Attempting to match empty event list') # If we can't match left or right, then only strict equivalence # counts as a match. if not (left or right) and not np.all(np.in1d(events_from, events_to)): raise ParameterError('Cannot match events with left=right=False ' 'and events_from is not contained ' 'in events_to') # If we can't match to the left, then there should be at least one # target event greater-equal to every source event if (not left) and max(events_to) < max(events_from): raise ParameterError('Cannot match events with left=False ' 'and max(events_to) < max(events_from)') # If we can't match to the right, then there should be at least one # target event less-equal to every source event if (not right) and min(events_to) > min(events_from): raise ParameterError('Cannot match events with right=False ' 'and min(events_to) > min(events_from)') # array of matched items output = np.empty_like(events_from, dtype=np.int) return __match_events_helper(output, events_from, events_to, left, right)
def function[match_events, parameter[events_from, events_to, left, right]]: constant[Match one set of events to another. This is useful for tasks such as matching beats to the nearest detected onsets, or frame-aligned events to the nearest zero-crossing. .. note:: A target event may be matched to multiple source events. Examples -------- >>> # Sources are multiples of 7 >>> s_from = np.arange(0, 100, 7) >>> s_from array([ 0, 7, 14, 21, 28, 35, 42, 49, 56, 63, 70, 77, 84, 91, 98]) >>> # Targets are multiples of 10 >>> s_to = np.arange(0, 100, 10) >>> s_to array([ 0, 10, 20, 30, 40, 50, 60, 70, 80, 90]) >>> # Find the matching >>> idx = librosa.util.match_events(s_from, s_to) >>> idx array([0, 1, 1, 2, 3, 3, 4, 5, 6, 6, 7, 8, 8, 9, 9]) >>> # Print each source value to its matching target >>> zip(s_from, s_to[idx]) [(0, 0), (7, 10), (14, 10), (21, 20), (28, 30), (35, 30), (42, 40), (49, 50), (56, 60), (63, 60), (70, 70), (77, 80), (84, 80), (91, 90), (98, 90)] Parameters ---------- events_from : ndarray [shape=(n,)] Array of events (eg, times, sample or frame indices) to match from. events_to : ndarray [shape=(m,)] Array of events (eg, times, sample or frame indices) to match against. left : bool right : bool If `False`, then matched events cannot be to the left (or right) of source events. Returns ------- event_mapping : np.ndarray [shape=(n,)] For each event in `events_from`, the corresponding event index in `events_to`. `event_mapping[i] == arg min |events_from[i] - events_to[:]|` See Also -------- match_intervals Raises ------ ParameterError If either array of input events is not the correct shape ] if <ast.BoolOp object at 0x7da18f810dc0> begin[:] <ast.Raise object at 0x7da18f811e10> if <ast.BoolOp object at 0x7da18f813460> begin[:] <ast.Raise object at 0x7da18f810070> if <ast.BoolOp object at 0x7da18f8108b0> begin[:] <ast.Raise object at 0x7da18f8107f0> if <ast.BoolOp object at 0x7da18f813340> begin[:] <ast.Raise object at 0x7da18f813cd0> variable[output] assign[=] call[name[np].empty_like, parameter[name[events_from]]] return[call[name[__match_events_helper], parameter[name[output], name[events_from], name[events_to], name[left], name[right]]]]
keyword[def] identifier[match_events] ( identifier[events_from] , identifier[events_to] , identifier[left] = keyword[True] , identifier[right] = keyword[True] ): literal[string] keyword[if] identifier[len] ( identifier[events_from] )== literal[int] keyword[or] identifier[len] ( identifier[events_to] )== literal[int] : keyword[raise] identifier[ParameterError] ( literal[string] ) keyword[if] keyword[not] ( identifier[left] keyword[or] identifier[right] ) keyword[and] keyword[not] identifier[np] . identifier[all] ( identifier[np] . identifier[in1d] ( identifier[events_from] , identifier[events_to] )): keyword[raise] identifier[ParameterError] ( literal[string] literal[string] literal[string] ) keyword[if] ( keyword[not] identifier[left] ) keyword[and] identifier[max] ( identifier[events_to] )< identifier[max] ( identifier[events_from] ): keyword[raise] identifier[ParameterError] ( literal[string] literal[string] ) keyword[if] ( keyword[not] identifier[right] ) keyword[and] identifier[min] ( identifier[events_to] )> identifier[min] ( identifier[events_from] ): keyword[raise] identifier[ParameterError] ( literal[string] literal[string] ) identifier[output] = identifier[np] . identifier[empty_like] ( identifier[events_from] , identifier[dtype] = identifier[np] . identifier[int] ) keyword[return] identifier[__match_events_helper] ( identifier[output] , identifier[events_from] , identifier[events_to] , identifier[left] , identifier[right] )
def match_events(events_from, events_to, left=True, right=True): """Match one set of events to another. This is useful for tasks such as matching beats to the nearest detected onsets, or frame-aligned events to the nearest zero-crossing. .. note:: A target event may be matched to multiple source events. Examples -------- >>> # Sources are multiples of 7 >>> s_from = np.arange(0, 100, 7) >>> s_from array([ 0, 7, 14, 21, 28, 35, 42, 49, 56, 63, 70, 77, 84, 91, 98]) >>> # Targets are multiples of 10 >>> s_to = np.arange(0, 100, 10) >>> s_to array([ 0, 10, 20, 30, 40, 50, 60, 70, 80, 90]) >>> # Find the matching >>> idx = librosa.util.match_events(s_from, s_to) >>> idx array([0, 1, 1, 2, 3, 3, 4, 5, 6, 6, 7, 8, 8, 9, 9]) >>> # Print each source value to its matching target >>> zip(s_from, s_to[idx]) [(0, 0), (7, 10), (14, 10), (21, 20), (28, 30), (35, 30), (42, 40), (49, 50), (56, 60), (63, 60), (70, 70), (77, 80), (84, 80), (91, 90), (98, 90)] Parameters ---------- events_from : ndarray [shape=(n,)] Array of events (eg, times, sample or frame indices) to match from. events_to : ndarray [shape=(m,)] Array of events (eg, times, sample or frame indices) to match against. left : bool right : bool If `False`, then matched events cannot be to the left (or right) of source events. Returns ------- event_mapping : np.ndarray [shape=(n,)] For each event in `events_from`, the corresponding event index in `events_to`. `event_mapping[i] == arg min |events_from[i] - events_to[:]|` See Also -------- match_intervals Raises ------ ParameterError If either array of input events is not the correct shape """ if len(events_from) == 0 or len(events_to) == 0: raise ParameterError('Attempting to match empty event list') # depends on [control=['if'], data=[]] # If we can't match left or right, then only strict equivalence # counts as a match. if not (left or right) and (not np.all(np.in1d(events_from, events_to))): raise ParameterError('Cannot match events with left=right=False and events_from is not contained in events_to') # depends on [control=['if'], data=[]] # If we can't match to the left, then there should be at least one # target event greater-equal to every source event if not left and max(events_to) < max(events_from): raise ParameterError('Cannot match events with left=False and max(events_to) < max(events_from)') # depends on [control=['if'], data=[]] # If we can't match to the right, then there should be at least one # target event less-equal to every source event if not right and min(events_to) > min(events_from): raise ParameterError('Cannot match events with right=False and min(events_to) > min(events_from)') # depends on [control=['if'], data=[]] # array of matched items output = np.empty_like(events_from, dtype=np.int) return __match_events_helper(output, events_from, events_to, left, right)
def ask(self): """ Return the wait time in seconds required to retrieve the item currently at the head of the queue. Note that there is no guarantee that a call to `get()` will succeed even if `ask()` returns 0. By the time the calling thread reacts, other threads may have caused a different item to be at the head of the queue. """ with self.mutex: if not len(self.queue): raise Empty utcnow = dt.datetime.utcnow() if self.queue[0][0] <= utcnow: self.ready.notify() return 0 return (self.queue[0][0] - utcnow).total_seconds()
def function[ask, parameter[self]]: constant[ Return the wait time in seconds required to retrieve the item currently at the head of the queue. Note that there is no guarantee that a call to `get()` will succeed even if `ask()` returns 0. By the time the calling thread reacts, other threads may have caused a different item to be at the head of the queue. ] with name[self].mutex begin[:] if <ast.UnaryOp object at 0x7da1b021e770> begin[:] <ast.Raise object at 0x7da1afe70bb0> variable[utcnow] assign[=] call[name[dt].datetime.utcnow, parameter[]] if compare[call[call[name[self].queue][constant[0]]][constant[0]] less_or_equal[<=] name[utcnow]] begin[:] call[name[self].ready.notify, parameter[]] return[constant[0]] return[call[binary_operation[call[call[name[self].queue][constant[0]]][constant[0]] - name[utcnow]].total_seconds, parameter[]]]
keyword[def] identifier[ask] ( identifier[self] ): literal[string] keyword[with] identifier[self] . identifier[mutex] : keyword[if] keyword[not] identifier[len] ( identifier[self] . identifier[queue] ): keyword[raise] identifier[Empty] identifier[utcnow] = identifier[dt] . identifier[datetime] . identifier[utcnow] () keyword[if] identifier[self] . identifier[queue] [ literal[int] ][ literal[int] ]<= identifier[utcnow] : identifier[self] . identifier[ready] . identifier[notify] () keyword[return] literal[int] keyword[return] ( identifier[self] . identifier[queue] [ literal[int] ][ literal[int] ]- identifier[utcnow] ). identifier[total_seconds] ()
def ask(self): """ Return the wait time in seconds required to retrieve the item currently at the head of the queue. Note that there is no guarantee that a call to `get()` will succeed even if `ask()` returns 0. By the time the calling thread reacts, other threads may have caused a different item to be at the head of the queue. """ with self.mutex: if not len(self.queue): raise Empty # depends on [control=['if'], data=[]] utcnow = dt.datetime.utcnow() if self.queue[0][0] <= utcnow: self.ready.notify() return 0 # depends on [control=['if'], data=[]] return (self.queue[0][0] - utcnow).total_seconds() # depends on [control=['with'], data=[]]
def match(self, *args): """Whether or not to enter a given case statement""" self.fall = self.fall or not args self.fall = self.fall or (self.value in args) return self.fall
def function[match, parameter[self]]: constant[Whether or not to enter a given case statement] name[self].fall assign[=] <ast.BoolOp object at 0x7da1b04a6d40> name[self].fall assign[=] <ast.BoolOp object at 0x7da18f58f190> return[name[self].fall]
keyword[def] identifier[match] ( identifier[self] ,* identifier[args] ): literal[string] identifier[self] . identifier[fall] = identifier[self] . identifier[fall] keyword[or] keyword[not] identifier[args] identifier[self] . identifier[fall] = identifier[self] . identifier[fall] keyword[or] ( identifier[self] . identifier[value] keyword[in] identifier[args] ) keyword[return] identifier[self] . identifier[fall]
def match(self, *args): """Whether or not to enter a given case statement""" self.fall = self.fall or not args self.fall = self.fall or self.value in args return self.fall
def overloaded_build(type_, add_name=None): """Factory for constant transformers that apply to a given build instruction. Parameters ---------- type_ : type The object type to overload the construction of. This must be one of "buildable" types, or types with a "BUILD_*" instruction. add_name : str, optional The suffix of the instruction tha adds elements to the collection. For example: 'add' or 'append' Returns ------- transformer : subclass of CodeTransformer A new code transformer class that will overload the provided literal types. """ typename = type_.__name__ instrname = 'BUILD_' + typename.upper() dict_ = OrderedDict( __doc__=dedent( """ A CodeTransformer for overloading {name} instructions. """.format(name=instrname) ) ) try: build_instr = getattr(instructions, instrname) except AttributeError: raise TypeError("type %s is not buildable" % typename) if add_name is not None: try: add_instr = getattr( instructions, '_'.join((typename, add_name)).upper(), ) except AttributeError: TypeError("type %s is not addable" % typename) dict_['_start_comprehension'] = pattern( build_instr, matchany[var], add_instr, )(_start_comprehension) dict_['_return_value'] = pattern( instructions.RETURN_VALUE, startcodes=(IN_COMPREHENSION,), )(_return_value) else: add_instr = None dict_['_build'] = pattern(build_instr)(_build) if not typename.endswith('s'): typename = typename + 's' return type( 'overloaded_' + typename, (overloaded_constants(type_),), dict_, )
def function[overloaded_build, parameter[type_, add_name]]: constant[Factory for constant transformers that apply to a given build instruction. Parameters ---------- type_ : type The object type to overload the construction of. This must be one of "buildable" types, or types with a "BUILD_*" instruction. add_name : str, optional The suffix of the instruction tha adds elements to the collection. For example: 'add' or 'append' Returns ------- transformer : subclass of CodeTransformer A new code transformer class that will overload the provided literal types. ] variable[typename] assign[=] name[type_].__name__ variable[instrname] assign[=] binary_operation[constant[BUILD_] + call[name[typename].upper, parameter[]]] variable[dict_] assign[=] call[name[OrderedDict], parameter[]] <ast.Try object at 0x7da1b05db700> if compare[name[add_name] is_not constant[None]] begin[:] <ast.Try object at 0x7da1b05db2b0> call[name[dict_]][constant[_start_comprehension]] assign[=] call[call[name[pattern], parameter[name[build_instr], call[name[matchany]][name[var]], name[add_instr]]], parameter[name[_start_comprehension]]] call[name[dict_]][constant[_return_value]] assign[=] call[call[name[pattern], parameter[name[instructions].RETURN_VALUE]], parameter[name[_return_value]]] call[name[dict_]][constant[_build]] assign[=] call[call[name[pattern], parameter[name[build_instr]]], parameter[name[_build]]] if <ast.UnaryOp object at 0x7da1b05b45b0> begin[:] variable[typename] assign[=] binary_operation[name[typename] + constant[s]] return[call[name[type], parameter[binary_operation[constant[overloaded_] + name[typename]], tuple[[<ast.Call object at 0x7da1b05b6f80>]], name[dict_]]]]
keyword[def] identifier[overloaded_build] ( identifier[type_] , identifier[add_name] = keyword[None] ): literal[string] identifier[typename] = identifier[type_] . identifier[__name__] identifier[instrname] = literal[string] + identifier[typename] . identifier[upper] () identifier[dict_] = identifier[OrderedDict] ( identifier[__doc__] = identifier[dedent] ( literal[string] . identifier[format] ( identifier[name] = identifier[instrname] ) ) ) keyword[try] : identifier[build_instr] = identifier[getattr] ( identifier[instructions] , identifier[instrname] ) keyword[except] identifier[AttributeError] : keyword[raise] identifier[TypeError] ( literal[string] % identifier[typename] ) keyword[if] identifier[add_name] keyword[is] keyword[not] keyword[None] : keyword[try] : identifier[add_instr] = identifier[getattr] ( identifier[instructions] , literal[string] . identifier[join] (( identifier[typename] , identifier[add_name] )). identifier[upper] (), ) keyword[except] identifier[AttributeError] : identifier[TypeError] ( literal[string] % identifier[typename] ) identifier[dict_] [ literal[string] ]= identifier[pattern] ( identifier[build_instr] , identifier[matchany] [ identifier[var] ], identifier[add_instr] , )( identifier[_start_comprehension] ) identifier[dict_] [ literal[string] ]= identifier[pattern] ( identifier[instructions] . identifier[RETURN_VALUE] , identifier[startcodes] =( identifier[IN_COMPREHENSION] ,), )( identifier[_return_value] ) keyword[else] : identifier[add_instr] = keyword[None] identifier[dict_] [ literal[string] ]= identifier[pattern] ( identifier[build_instr] )( identifier[_build] ) keyword[if] keyword[not] identifier[typename] . identifier[endswith] ( literal[string] ): identifier[typename] = identifier[typename] + literal[string] keyword[return] identifier[type] ( literal[string] + identifier[typename] , ( identifier[overloaded_constants] ( identifier[type_] ),), identifier[dict_] , )
def overloaded_build(type_, add_name=None): """Factory for constant transformers that apply to a given build instruction. Parameters ---------- type_ : type The object type to overload the construction of. This must be one of "buildable" types, or types with a "BUILD_*" instruction. add_name : str, optional The suffix of the instruction tha adds elements to the collection. For example: 'add' or 'append' Returns ------- transformer : subclass of CodeTransformer A new code transformer class that will overload the provided literal types. """ typename = type_.__name__ instrname = 'BUILD_' + typename.upper() dict_ = OrderedDict(__doc__=dedent('\n A CodeTransformer for overloading {name} instructions.\n '.format(name=instrname))) try: build_instr = getattr(instructions, instrname) # depends on [control=['try'], data=[]] except AttributeError: raise TypeError('type %s is not buildable' % typename) # depends on [control=['except'], data=[]] if add_name is not None: try: add_instr = getattr(instructions, '_'.join((typename, add_name)).upper()) # depends on [control=['try'], data=[]] except AttributeError: TypeError('type %s is not addable' % typename) # depends on [control=['except'], data=[]] dict_['_start_comprehension'] = pattern(build_instr, matchany[var], add_instr)(_start_comprehension) dict_['_return_value'] = pattern(instructions.RETURN_VALUE, startcodes=(IN_COMPREHENSION,))(_return_value) # depends on [control=['if'], data=['add_name']] else: add_instr = None dict_['_build'] = pattern(build_instr)(_build) if not typename.endswith('s'): typename = typename + 's' # depends on [control=['if'], data=[]] return type('overloaded_' + typename, (overloaded_constants(type_),), dict_)
def _get_funcs(self): """ Returns a 32-bit value stating supported I2C functions. :rtype: int """ f = c_uint32() ioctl(self.fd, I2C_FUNCS, f) return f.value
def function[_get_funcs, parameter[self]]: constant[ Returns a 32-bit value stating supported I2C functions. :rtype: int ] variable[f] assign[=] call[name[c_uint32], parameter[]] call[name[ioctl], parameter[name[self].fd, name[I2C_FUNCS], name[f]]] return[name[f].value]
keyword[def] identifier[_get_funcs] ( identifier[self] ): literal[string] identifier[f] = identifier[c_uint32] () identifier[ioctl] ( identifier[self] . identifier[fd] , identifier[I2C_FUNCS] , identifier[f] ) keyword[return] identifier[f] . identifier[value]
def _get_funcs(self): """ Returns a 32-bit value stating supported I2C functions. :rtype: int """ f = c_uint32() ioctl(self.fd, I2C_FUNCS, f) return f.value
def byaxis_in(self): """Object to index along input (domain) dimensions. Examples -------- Indexing with integers or slices: >>> space = odl.uniform_discr([0, 0, 0], [1, 2, 3], (5, 10, 15)) >>> space.byaxis_in[0] uniform_discr(0.0, 1.0, 5) >>> space.byaxis_in[1] uniform_discr(0.0, 2.0, 10) >>> space.byaxis_in[1:] uniform_discr([ 0., 0.], [ 2., 3.], (10, 15)) Lists can be used to stack spaces arbitrarily: >>> space.byaxis_in[[2, 1, 2]] uniform_discr([ 0., 0., 0.], [ 3., 2., 3.], (15, 10, 15)) """ space = self class DiscreteLpByaxisIn(object): """Helper class for indexing by domain axes.""" def __getitem__(self, indices): """Return ``self[indices]``. Parameters ---------- indices : index expression Object used to index the space domain. Returns ------- space : `DiscreteLp` The resulting space with indexed domain and otherwise same properties (except possibly weighting). """ fspace = space.fspace.byaxis_in[indices] part = space.partition.byaxis[indices] if isinstance(space.weighting, ConstWeighting): # Need to manually construct `tspace` since it doesn't # know where its weighting factor comes from try: iter(indices) except TypeError: newshape = space.shape[indices] else: newshape = tuple(space.shape[int(i)] for i in indices) weighting = part.cell_volume tspace = type(space.tspace)( newshape, space.dtype, exponent=space.exponent, weighting=weighting) else: # Other weighting schemes are handled correctly by # the tensor space tspace = space.tspace.byaxis[indices] try: iter(indices) except TypeError: interp = space.interp_byaxis[indices] labels = space.axis_labels[indices] else: interp = tuple(space.interp_byaxis[int(i)] for i in indices) labels = tuple(space.axis_labels[int(i)] for i in indices) return DiscreteLp(fspace, part, tspace, interp, axis_labels=labels) def __repr__(self): """Return ``repr(self)``.""" return repr(space) + '.byaxis_in' return DiscreteLpByaxisIn()
def function[byaxis_in, parameter[self]]: constant[Object to index along input (domain) dimensions. Examples -------- Indexing with integers or slices: >>> space = odl.uniform_discr([0, 0, 0], [1, 2, 3], (5, 10, 15)) >>> space.byaxis_in[0] uniform_discr(0.0, 1.0, 5) >>> space.byaxis_in[1] uniform_discr(0.0, 2.0, 10) >>> space.byaxis_in[1:] uniform_discr([ 0., 0.], [ 2., 3.], (10, 15)) Lists can be used to stack spaces arbitrarily: >>> space.byaxis_in[[2, 1, 2]] uniform_discr([ 0., 0., 0.], [ 3., 2., 3.], (15, 10, 15)) ] variable[space] assign[=] name[self] class class[DiscreteLpByaxisIn, parameter[]] begin[:] constant[Helper class for indexing by domain axes.] def function[__getitem__, parameter[self, indices]]: constant[Return ``self[indices]``. Parameters ---------- indices : index expression Object used to index the space domain. Returns ------- space : `DiscreteLp` The resulting space with indexed domain and otherwise same properties (except possibly weighting). ] variable[fspace] assign[=] call[name[space].fspace.byaxis_in][name[indices]] variable[part] assign[=] call[name[space].partition.byaxis][name[indices]] if call[name[isinstance], parameter[name[space].weighting, name[ConstWeighting]]] begin[:] <ast.Try object at 0x7da1b1e5dff0> variable[weighting] assign[=] name[part].cell_volume variable[tspace] assign[=] call[call[name[type], parameter[name[space].tspace]], parameter[name[newshape], name[space].dtype]] <ast.Try object at 0x7da1b1e5d600> return[call[name[DiscreteLp], parameter[name[fspace], name[part], name[tspace], name[interp]]]] def function[__repr__, parameter[self]]: constant[Return ``repr(self)``.] return[binary_operation[call[name[repr], parameter[name[space]]] + constant[.byaxis_in]]] return[call[name[DiscreteLpByaxisIn], parameter[]]]
keyword[def] identifier[byaxis_in] ( identifier[self] ): literal[string] identifier[space] = identifier[self] keyword[class] identifier[DiscreteLpByaxisIn] ( identifier[object] ): literal[string] keyword[def] identifier[__getitem__] ( identifier[self] , identifier[indices] ): literal[string] identifier[fspace] = identifier[space] . identifier[fspace] . identifier[byaxis_in] [ identifier[indices] ] identifier[part] = identifier[space] . identifier[partition] . identifier[byaxis] [ identifier[indices] ] keyword[if] identifier[isinstance] ( identifier[space] . identifier[weighting] , identifier[ConstWeighting] ): keyword[try] : identifier[iter] ( identifier[indices] ) keyword[except] identifier[TypeError] : identifier[newshape] = identifier[space] . identifier[shape] [ identifier[indices] ] keyword[else] : identifier[newshape] = identifier[tuple] ( identifier[space] . identifier[shape] [ identifier[int] ( identifier[i] )] keyword[for] identifier[i] keyword[in] identifier[indices] ) identifier[weighting] = identifier[part] . identifier[cell_volume] identifier[tspace] = identifier[type] ( identifier[space] . identifier[tspace] )( identifier[newshape] , identifier[space] . identifier[dtype] , identifier[exponent] = identifier[space] . identifier[exponent] , identifier[weighting] = identifier[weighting] ) keyword[else] : identifier[tspace] = identifier[space] . identifier[tspace] . identifier[byaxis] [ identifier[indices] ] keyword[try] : identifier[iter] ( identifier[indices] ) keyword[except] identifier[TypeError] : identifier[interp] = identifier[space] . identifier[interp_byaxis] [ identifier[indices] ] identifier[labels] = identifier[space] . identifier[axis_labels] [ identifier[indices] ] keyword[else] : identifier[interp] = identifier[tuple] ( identifier[space] . identifier[interp_byaxis] [ identifier[int] ( identifier[i] )] keyword[for] identifier[i] keyword[in] identifier[indices] ) identifier[labels] = identifier[tuple] ( identifier[space] . identifier[axis_labels] [ identifier[int] ( identifier[i] )] keyword[for] identifier[i] keyword[in] identifier[indices] ) keyword[return] identifier[DiscreteLp] ( identifier[fspace] , identifier[part] , identifier[tspace] , identifier[interp] , identifier[axis_labels] = identifier[labels] ) keyword[def] identifier[__repr__] ( identifier[self] ): literal[string] keyword[return] identifier[repr] ( identifier[space] )+ literal[string] keyword[return] identifier[DiscreteLpByaxisIn] ()
def byaxis_in(self): """Object to index along input (domain) dimensions. Examples -------- Indexing with integers or slices: >>> space = odl.uniform_discr([0, 0, 0], [1, 2, 3], (5, 10, 15)) >>> space.byaxis_in[0] uniform_discr(0.0, 1.0, 5) >>> space.byaxis_in[1] uniform_discr(0.0, 2.0, 10) >>> space.byaxis_in[1:] uniform_discr([ 0., 0.], [ 2., 3.], (10, 15)) Lists can be used to stack spaces arbitrarily: >>> space.byaxis_in[[2, 1, 2]] uniform_discr([ 0., 0., 0.], [ 3., 2., 3.], (15, 10, 15)) """ space = self class DiscreteLpByaxisIn(object): """Helper class for indexing by domain axes.""" def __getitem__(self, indices): """Return ``self[indices]``. Parameters ---------- indices : index expression Object used to index the space domain. Returns ------- space : `DiscreteLp` The resulting space with indexed domain and otherwise same properties (except possibly weighting). """ fspace = space.fspace.byaxis_in[indices] part = space.partition.byaxis[indices] if isinstance(space.weighting, ConstWeighting): # Need to manually construct `tspace` since it doesn't # know where its weighting factor comes from try: iter(indices) # depends on [control=['try'], data=[]] except TypeError: newshape = space.shape[indices] # depends on [control=['except'], data=[]] else: newshape = tuple((space.shape[int(i)] for i in indices)) weighting = part.cell_volume tspace = type(space.tspace)(newshape, space.dtype, exponent=space.exponent, weighting=weighting) # depends on [control=['if'], data=[]] else: # Other weighting schemes are handled correctly by # the tensor space tspace = space.tspace.byaxis[indices] try: iter(indices) # depends on [control=['try'], data=[]] except TypeError: interp = space.interp_byaxis[indices] labels = space.axis_labels[indices] # depends on [control=['except'], data=[]] else: interp = tuple((space.interp_byaxis[int(i)] for i in indices)) labels = tuple((space.axis_labels[int(i)] for i in indices)) return DiscreteLp(fspace, part, tspace, interp, axis_labels=labels) def __repr__(self): """Return ``repr(self)``.""" return repr(space) + '.byaxis_in' return DiscreteLpByaxisIn()
def _start_watching_events(self, replace=False): """Start the events reflector If replace=False and the event reflector is already running, do nothing. If replace=True, a running pod reflector will be stopped and a new one started (for recovering from possible errors). """ return self._start_reflector( "events", EventReflector, fields={"involvedObject.kind": "Pod"}, replace=replace, )
def function[_start_watching_events, parameter[self, replace]]: constant[Start the events reflector If replace=False and the event reflector is already running, do nothing. If replace=True, a running pod reflector will be stopped and a new one started (for recovering from possible errors). ] return[call[name[self]._start_reflector, parameter[constant[events], name[EventReflector]]]]
keyword[def] identifier[_start_watching_events] ( identifier[self] , identifier[replace] = keyword[False] ): literal[string] keyword[return] identifier[self] . identifier[_start_reflector] ( literal[string] , identifier[EventReflector] , identifier[fields] ={ literal[string] : literal[string] }, identifier[replace] = identifier[replace] , )
def _start_watching_events(self, replace=False): """Start the events reflector If replace=False and the event reflector is already running, do nothing. If replace=True, a running pod reflector will be stopped and a new one started (for recovering from possible errors). """ return self._start_reflector('events', EventReflector, fields={'involvedObject.kind': 'Pod'}, replace=replace)
def form_valid(self, form): """This is what's called when the form is valid.""" instance = form.save(commit=False) if hasattr(self.request, 'user'): instance.user = self.request.user if settings.CONTACT_FORM_FILTER_MESSAGE: instance.message = bleach.clean( instance.message, tags=settings.CONTACT_FORM_ALLOWED_MESSAGE_TAGS, strip=settings.CONTACT_FORM_STRIP_MESSAGE ) instance.ip = get_user_ip(self.request) instance.site = self.site instance.save() if settings.CONTACT_FORM_USE_SIGNALS: contact_form_valid.send( sender=self, event=self.valid_event, ip=instance.ip, site=self.site, sender_name=instance.sender_name, sender_email=instance.sender_email, email=instance.subject.department.email, subject=instance.subject.title, message=instance.message ) return super(ContactFormView, self).form_valid(form)
def function[form_valid, parameter[self, form]]: constant[This is what's called when the form is valid.] variable[instance] assign[=] call[name[form].save, parameter[]] if call[name[hasattr], parameter[name[self].request, constant[user]]] begin[:] name[instance].user assign[=] name[self].request.user if name[settings].CONTACT_FORM_FILTER_MESSAGE begin[:] name[instance].message assign[=] call[name[bleach].clean, parameter[name[instance].message]] name[instance].ip assign[=] call[name[get_user_ip], parameter[name[self].request]] name[instance].site assign[=] name[self].site call[name[instance].save, parameter[]] if name[settings].CONTACT_FORM_USE_SIGNALS begin[:] call[name[contact_form_valid].send, parameter[]] return[call[call[name[super], parameter[name[ContactFormView], name[self]]].form_valid, parameter[name[form]]]]
keyword[def] identifier[form_valid] ( identifier[self] , identifier[form] ): literal[string] identifier[instance] = identifier[form] . identifier[save] ( identifier[commit] = keyword[False] ) keyword[if] identifier[hasattr] ( identifier[self] . identifier[request] , literal[string] ): identifier[instance] . identifier[user] = identifier[self] . identifier[request] . identifier[user] keyword[if] identifier[settings] . identifier[CONTACT_FORM_FILTER_MESSAGE] : identifier[instance] . identifier[message] = identifier[bleach] . identifier[clean] ( identifier[instance] . identifier[message] , identifier[tags] = identifier[settings] . identifier[CONTACT_FORM_ALLOWED_MESSAGE_TAGS] , identifier[strip] = identifier[settings] . identifier[CONTACT_FORM_STRIP_MESSAGE] ) identifier[instance] . identifier[ip] = identifier[get_user_ip] ( identifier[self] . identifier[request] ) identifier[instance] . identifier[site] = identifier[self] . identifier[site] identifier[instance] . identifier[save] () keyword[if] identifier[settings] . identifier[CONTACT_FORM_USE_SIGNALS] : identifier[contact_form_valid] . identifier[send] ( identifier[sender] = identifier[self] , identifier[event] = identifier[self] . identifier[valid_event] , identifier[ip] = identifier[instance] . identifier[ip] , identifier[site] = identifier[self] . identifier[site] , identifier[sender_name] = identifier[instance] . identifier[sender_name] , identifier[sender_email] = identifier[instance] . identifier[sender_email] , identifier[email] = identifier[instance] . identifier[subject] . identifier[department] . identifier[email] , identifier[subject] = identifier[instance] . identifier[subject] . identifier[title] , identifier[message] = identifier[instance] . identifier[message] ) keyword[return] identifier[super] ( identifier[ContactFormView] , identifier[self] ). identifier[form_valid] ( identifier[form] )
def form_valid(self, form): """This is what's called when the form is valid.""" instance = form.save(commit=False) if hasattr(self.request, 'user'): instance.user = self.request.user # depends on [control=['if'], data=[]] if settings.CONTACT_FORM_FILTER_MESSAGE: instance.message = bleach.clean(instance.message, tags=settings.CONTACT_FORM_ALLOWED_MESSAGE_TAGS, strip=settings.CONTACT_FORM_STRIP_MESSAGE) # depends on [control=['if'], data=[]] instance.ip = get_user_ip(self.request) instance.site = self.site instance.save() if settings.CONTACT_FORM_USE_SIGNALS: contact_form_valid.send(sender=self, event=self.valid_event, ip=instance.ip, site=self.site, sender_name=instance.sender_name, sender_email=instance.sender_email, email=instance.subject.department.email, subject=instance.subject.title, message=instance.message) # depends on [control=['if'], data=[]] return super(ContactFormView, self).form_valid(form)
def dispatch_hook(cls, s=None, *_args, **_kwds): # type: (Optional[str], *Any, **Any) -> base_classes.Packet_metaclass """dispatch_hook returns the subclass of HPackHeaders that must be used to dissect the string. """ if s is None: return config.conf.raw_layer fb = orb(s[0]) if fb & 0x80 != 0: return HPackIndexedHdr if fb & 0x40 != 0: return HPackLitHdrFldWithIncrIndexing if fb & 0x20 != 0: return HPackDynamicSizeUpdate return HPackLitHdrFldWithoutIndexing
def function[dispatch_hook, parameter[cls, s]]: constant[dispatch_hook returns the subclass of HPackHeaders that must be used to dissect the string. ] if compare[name[s] is constant[None]] begin[:] return[name[config].conf.raw_layer] variable[fb] assign[=] call[name[orb], parameter[call[name[s]][constant[0]]]] if compare[binary_operation[name[fb] <ast.BitAnd object at 0x7da2590d6b60> constant[128]] not_equal[!=] constant[0]] begin[:] return[name[HPackIndexedHdr]] if compare[binary_operation[name[fb] <ast.BitAnd object at 0x7da2590d6b60> constant[64]] not_equal[!=] constant[0]] begin[:] return[name[HPackLitHdrFldWithIncrIndexing]] if compare[binary_operation[name[fb] <ast.BitAnd object at 0x7da2590d6b60> constant[32]] not_equal[!=] constant[0]] begin[:] return[name[HPackDynamicSizeUpdate]] return[name[HPackLitHdrFldWithoutIndexing]]
keyword[def] identifier[dispatch_hook] ( identifier[cls] , identifier[s] = keyword[None] ,* identifier[_args] ,** identifier[_kwds] ): literal[string] keyword[if] identifier[s] keyword[is] keyword[None] : keyword[return] identifier[config] . identifier[conf] . identifier[raw_layer] identifier[fb] = identifier[orb] ( identifier[s] [ literal[int] ]) keyword[if] identifier[fb] & literal[int] != literal[int] : keyword[return] identifier[HPackIndexedHdr] keyword[if] identifier[fb] & literal[int] != literal[int] : keyword[return] identifier[HPackLitHdrFldWithIncrIndexing] keyword[if] identifier[fb] & literal[int] != literal[int] : keyword[return] identifier[HPackDynamicSizeUpdate] keyword[return] identifier[HPackLitHdrFldWithoutIndexing]
def dispatch_hook(cls, s=None, *_args, **_kwds): # type: (Optional[str], *Any, **Any) -> base_classes.Packet_metaclass 'dispatch_hook returns the subclass of HPackHeaders that must be used\n to dissect the string.\n ' if s is None: return config.conf.raw_layer # depends on [control=['if'], data=[]] fb = orb(s[0]) if fb & 128 != 0: return HPackIndexedHdr # depends on [control=['if'], data=[]] if fb & 64 != 0: return HPackLitHdrFldWithIncrIndexing # depends on [control=['if'], data=[]] if fb & 32 != 0: return HPackDynamicSizeUpdate # depends on [control=['if'], data=[]] return HPackLitHdrFldWithoutIndexing
def last_timestamp(self, event_key=None): """Obtain the last timestamp. Args: event_key: the type key of the sought events (e.g., constants.NAN_KEY). If None, includes all event type keys. Returns: Last (latest) timestamp of all the events of the given type (or all event types if event_key is None). """ if event_key is None: timestamps = [self._trackers[key].first_timestamp for key in self._trackers] return max(timestamp for timestamp in timestamps if timestamp >= 0) else: return self._trackers[event_key].last_timestamp
def function[last_timestamp, parameter[self, event_key]]: constant[Obtain the last timestamp. Args: event_key: the type key of the sought events (e.g., constants.NAN_KEY). If None, includes all event type keys. Returns: Last (latest) timestamp of all the events of the given type (or all event types if event_key is None). ] if compare[name[event_key] is constant[None]] begin[:] variable[timestamps] assign[=] <ast.ListComp object at 0x7da1b2168cd0> return[call[name[max], parameter[<ast.GeneratorExp object at 0x7da1b2169750>]]]
keyword[def] identifier[last_timestamp] ( identifier[self] , identifier[event_key] = keyword[None] ): literal[string] keyword[if] identifier[event_key] keyword[is] keyword[None] : identifier[timestamps] =[ identifier[self] . identifier[_trackers] [ identifier[key] ]. identifier[first_timestamp] keyword[for] identifier[key] keyword[in] identifier[self] . identifier[_trackers] ] keyword[return] identifier[max] ( identifier[timestamp] keyword[for] identifier[timestamp] keyword[in] identifier[timestamps] keyword[if] identifier[timestamp] >= literal[int] ) keyword[else] : keyword[return] identifier[self] . identifier[_trackers] [ identifier[event_key] ]. identifier[last_timestamp]
def last_timestamp(self, event_key=None): """Obtain the last timestamp. Args: event_key: the type key of the sought events (e.g., constants.NAN_KEY). If None, includes all event type keys. Returns: Last (latest) timestamp of all the events of the given type (or all event types if event_key is None). """ if event_key is None: timestamps = [self._trackers[key].first_timestamp for key in self._trackers] return max((timestamp for timestamp in timestamps if timestamp >= 0)) # depends on [control=['if'], data=[]] else: return self._trackers[event_key].last_timestamp
def decode_char_spot(raw_string): """Chop Line from DX-Cluster into pieces and return a dict with the spot data""" data = {} # Spotter callsign if re.match('[A-Za-z0-9\/]+[:$]', raw_string[6:15]): data[const.SPOTTER] = re.sub(':', '', re.match('[A-Za-z0-9\/]+[:$]', raw_string[6:15]).group(0)) else: raise ValueError if re.search('[0-9\.]{5,12}', raw_string[10:25]): data[const.FREQUENCY] = float(re.search('[0-9\.]{5,12}', raw_string[10:25]).group(0)) else: raise ValueError data[const.DX] = re.sub('[^A-Za-z0-9\/]+', '', raw_string[26:38]) data[const.COMMENT] = re.sub('[^\sA-Za-z0-9\.,;\#\+\-!\?\$\(\)@\/]+', ' ', raw_string[39:69]).strip() data[const.TIME] = datetime.now().replace(tzinfo=UTC) return data
def function[decode_char_spot, parameter[raw_string]]: constant[Chop Line from DX-Cluster into pieces and return a dict with the spot data] variable[data] assign[=] dictionary[[], []] if call[name[re].match, parameter[constant[[A-Za-z0-9\/]+[:$]], call[name[raw_string]][<ast.Slice object at 0x7da1b0f327a0>]]] begin[:] call[name[data]][name[const].SPOTTER] assign[=] call[name[re].sub, parameter[constant[:], constant[], call[call[name[re].match, parameter[constant[[A-Za-z0-9\/]+[:$]], call[name[raw_string]][<ast.Slice object at 0x7da1b0f32c80>]]].group, parameter[constant[0]]]]] if call[name[re].search, parameter[constant[[0-9\.]{5,12}], call[name[raw_string]][<ast.Slice object at 0x7da1b0f33a00>]]] begin[:] call[name[data]][name[const].FREQUENCY] assign[=] call[name[float], parameter[call[call[name[re].search, parameter[constant[[0-9\.]{5,12}], call[name[raw_string]][<ast.Slice object at 0x7da1b0f13b50>]]].group, parameter[constant[0]]]]] call[name[data]][name[const].DX] assign[=] call[name[re].sub, parameter[constant[[^A-Za-z0-9\/]+], constant[], call[name[raw_string]][<ast.Slice object at 0x7da1b106f160>]]] call[name[data]][name[const].COMMENT] assign[=] call[call[name[re].sub, parameter[constant[[^\sA-Za-z0-9\.,;\#\+\-!\?\$\(\)@\/]+], constant[ ], call[name[raw_string]][<ast.Slice object at 0x7da1b106cd60>]]].strip, parameter[]] call[name[data]][name[const].TIME] assign[=] call[call[name[datetime].now, parameter[]].replace, parameter[]] return[name[data]]
keyword[def] identifier[decode_char_spot] ( identifier[raw_string] ): literal[string] identifier[data] ={} keyword[if] identifier[re] . identifier[match] ( literal[string] , identifier[raw_string] [ literal[int] : literal[int] ]): identifier[data] [ identifier[const] . identifier[SPOTTER] ]= identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[re] . identifier[match] ( literal[string] , identifier[raw_string] [ literal[int] : literal[int] ]). identifier[group] ( literal[int] )) keyword[else] : keyword[raise] identifier[ValueError] keyword[if] identifier[re] . identifier[search] ( literal[string] , identifier[raw_string] [ literal[int] : literal[int] ]): identifier[data] [ identifier[const] . identifier[FREQUENCY] ]= identifier[float] ( identifier[re] . identifier[search] ( literal[string] , identifier[raw_string] [ literal[int] : literal[int] ]). identifier[group] ( literal[int] )) keyword[else] : keyword[raise] identifier[ValueError] identifier[data] [ identifier[const] . identifier[DX] ]= identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[raw_string] [ literal[int] : literal[int] ]) identifier[data] [ identifier[const] . identifier[COMMENT] ]= identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[raw_string] [ literal[int] : literal[int] ]). identifier[strip] () identifier[data] [ identifier[const] . identifier[TIME] ]= identifier[datetime] . identifier[now] (). identifier[replace] ( identifier[tzinfo] = identifier[UTC] ) keyword[return] identifier[data]
def decode_char_spot(raw_string): """Chop Line from DX-Cluster into pieces and return a dict with the spot data""" data = {} # Spotter callsign if re.match('[A-Za-z0-9\\/]+[:$]', raw_string[6:15]): data[const.SPOTTER] = re.sub(':', '', re.match('[A-Za-z0-9\\/]+[:$]', raw_string[6:15]).group(0)) # depends on [control=['if'], data=[]] else: raise ValueError if re.search('[0-9\\.]{5,12}', raw_string[10:25]): data[const.FREQUENCY] = float(re.search('[0-9\\.]{5,12}', raw_string[10:25]).group(0)) # depends on [control=['if'], data=[]] else: raise ValueError data[const.DX] = re.sub('[^A-Za-z0-9\\/]+', '', raw_string[26:38]) data[const.COMMENT] = re.sub('[^\\sA-Za-z0-9\\.,;\\#\\+\\-!\\?\\$\\(\\)@\\/]+', ' ', raw_string[39:69]).strip() data[const.TIME] = datetime.now().replace(tzinfo=UTC) return data
def _process_transfer(self, ud, ase, offsets, data): # type: (Uploader, blobxfer.models.upload.Descriptor, # blobxfer.models.azure.StorageEntity, # blobxfer.models.upload.Offsets, bytes) -> None """Process transfer instructions :param Uploader self: this :param blobxfer.models.upload.Descriptor ud: upload descriptor :param blobxfer.models.azure.StorageEntity ase: Storage entity :param blobxfer.models.upload.Offsets offsets: offsets :param bytes data: data to upload """ # issue put range self._put_data(ud, ase, offsets, data) # accounting with self._transfer_lock: if ud.local_path.use_stdin: self._upload_bytes_total += offsets.num_bytes elif offsets.chunk_num == 0: self._upload_bytes_total += ase.size self._upload_bytes_sofar += offsets.num_bytes self._transfer_set.remove( blobxfer.operations.upload.Uploader.create_unique_transfer_id( ud.local_path, ase, offsets)) ud.complete_offset_upload(offsets.chunk_num) # add descriptor back to upload queue only for append blobs if ud.entity.mode == blobxfer.models.azure.StorageModes.Append: self._upload_queue.put(ud) # update progress bar self._update_progress_bar(stdin=ud.local_path.use_stdin)
def function[_process_transfer, parameter[self, ud, ase, offsets, data]]: constant[Process transfer instructions :param Uploader self: this :param blobxfer.models.upload.Descriptor ud: upload descriptor :param blobxfer.models.azure.StorageEntity ase: Storage entity :param blobxfer.models.upload.Offsets offsets: offsets :param bytes data: data to upload ] call[name[self]._put_data, parameter[name[ud], name[ase], name[offsets], name[data]]] with name[self]._transfer_lock begin[:] if name[ud].local_path.use_stdin begin[:] <ast.AugAssign object at 0x7da18dc9b4c0> <ast.AugAssign object at 0x7da18dc99240> call[name[self]._transfer_set.remove, parameter[call[name[blobxfer].operations.upload.Uploader.create_unique_transfer_id, parameter[name[ud].local_path, name[ase], name[offsets]]]]] call[name[ud].complete_offset_upload, parameter[name[offsets].chunk_num]] if compare[name[ud].entity.mode equal[==] name[blobxfer].models.azure.StorageModes.Append] begin[:] call[name[self]._upload_queue.put, parameter[name[ud]]] call[name[self]._update_progress_bar, parameter[]]
keyword[def] identifier[_process_transfer] ( identifier[self] , identifier[ud] , identifier[ase] , identifier[offsets] , identifier[data] ): literal[string] identifier[self] . identifier[_put_data] ( identifier[ud] , identifier[ase] , identifier[offsets] , identifier[data] ) keyword[with] identifier[self] . identifier[_transfer_lock] : keyword[if] identifier[ud] . identifier[local_path] . identifier[use_stdin] : identifier[self] . identifier[_upload_bytes_total] += identifier[offsets] . identifier[num_bytes] keyword[elif] identifier[offsets] . identifier[chunk_num] == literal[int] : identifier[self] . identifier[_upload_bytes_total] += identifier[ase] . identifier[size] identifier[self] . identifier[_upload_bytes_sofar] += identifier[offsets] . identifier[num_bytes] identifier[self] . identifier[_transfer_set] . identifier[remove] ( identifier[blobxfer] . identifier[operations] . identifier[upload] . identifier[Uploader] . identifier[create_unique_transfer_id] ( identifier[ud] . identifier[local_path] , identifier[ase] , identifier[offsets] )) identifier[ud] . identifier[complete_offset_upload] ( identifier[offsets] . identifier[chunk_num] ) keyword[if] identifier[ud] . identifier[entity] . identifier[mode] == identifier[blobxfer] . identifier[models] . identifier[azure] . identifier[StorageModes] . identifier[Append] : identifier[self] . identifier[_upload_queue] . identifier[put] ( identifier[ud] ) identifier[self] . identifier[_update_progress_bar] ( identifier[stdin] = identifier[ud] . identifier[local_path] . identifier[use_stdin] )
def _process_transfer(self, ud, ase, offsets, data): # type: (Uploader, blobxfer.models.upload.Descriptor, # blobxfer.models.azure.StorageEntity, # blobxfer.models.upload.Offsets, bytes) -> None 'Process transfer instructions\n :param Uploader self: this\n :param blobxfer.models.upload.Descriptor ud: upload descriptor\n :param blobxfer.models.azure.StorageEntity ase: Storage entity\n :param blobxfer.models.upload.Offsets offsets: offsets\n :param bytes data: data to upload\n ' # issue put range self._put_data(ud, ase, offsets, data) # accounting with self._transfer_lock: if ud.local_path.use_stdin: self._upload_bytes_total += offsets.num_bytes # depends on [control=['if'], data=[]] elif offsets.chunk_num == 0: self._upload_bytes_total += ase.size # depends on [control=['if'], data=[]] self._upload_bytes_sofar += offsets.num_bytes self._transfer_set.remove(blobxfer.operations.upload.Uploader.create_unique_transfer_id(ud.local_path, ase, offsets)) # depends on [control=['with'], data=[]] ud.complete_offset_upload(offsets.chunk_num) # add descriptor back to upload queue only for append blobs if ud.entity.mode == blobxfer.models.azure.StorageModes.Append: self._upload_queue.put(ud) # depends on [control=['if'], data=[]] # update progress bar self._update_progress_bar(stdin=ud.local_path.use_stdin)
def load_ipython_extension(ipython): """ Entry point of the IPython extension Parameters ---------- IPython : IPython interpreter An instance of the IPython interpreter that is handed over to the extension """ import IPython # don't continue if IPython version is < 3.0 ipy_version = LooseVersion(IPython.__version__) if ipy_version < LooseVersion("3.0.0"): ipython.write_err("Your IPython version is older than " "version 3.0.0, the minimum for Vispy's" "IPython backend. Please upgrade your IPython" "version.") return _load_webgl_backend(ipython)
def function[load_ipython_extension, parameter[ipython]]: constant[ Entry point of the IPython extension Parameters ---------- IPython : IPython interpreter An instance of the IPython interpreter that is handed over to the extension ] import module[IPython] variable[ipy_version] assign[=] call[name[LooseVersion], parameter[name[IPython].__version__]] if compare[name[ipy_version] less[<] call[name[LooseVersion], parameter[constant[3.0.0]]]] begin[:] call[name[ipython].write_err, parameter[constant[Your IPython version is older than version 3.0.0, the minimum for Vispy'sIPython backend. Please upgrade your IPythonversion.]]] return[None] call[name[_load_webgl_backend], parameter[name[ipython]]]
keyword[def] identifier[load_ipython_extension] ( identifier[ipython] ): literal[string] keyword[import] identifier[IPython] identifier[ipy_version] = identifier[LooseVersion] ( identifier[IPython] . identifier[__version__] ) keyword[if] identifier[ipy_version] < identifier[LooseVersion] ( literal[string] ): identifier[ipython] . identifier[write_err] ( literal[string] literal[string] literal[string] literal[string] ) keyword[return] identifier[_load_webgl_backend] ( identifier[ipython] )
def load_ipython_extension(ipython): """ Entry point of the IPython extension Parameters ---------- IPython : IPython interpreter An instance of the IPython interpreter that is handed over to the extension """ import IPython # don't continue if IPython version is < 3.0 ipy_version = LooseVersion(IPython.__version__) if ipy_version < LooseVersion('3.0.0'): ipython.write_err("Your IPython version is older than version 3.0.0, the minimum for Vispy'sIPython backend. Please upgrade your IPythonversion.") return # depends on [control=['if'], data=[]] _load_webgl_backend(ipython)
def query_parameter(binding_key): """Returns the currently bound value to the specified `binding_key`. The `binding_key` argument should look like 'maybe/some/scope/maybe.moduels.configurable_name.parameter_name'. Note that this will not include default parameters. Args: binding_key: The parameter whose value should be set. Returns: The value bound to the configurable/parameter combination given in `binding_key`. Raises: ValueError: If no function can be found matching the configurable name specified by `biding_key`, or if the specified parameter name is blacklisted or not in the function's whitelist (if present) or if there is no value bound for the queried parameter or configurable. """ pbk = ParsedBindingKey(binding_key) if pbk.config_key not in _CONFIG: err_str = "Configurable '{}' has no bound parameters." raise ValueError(err_str.format(pbk.given_selector)) if pbk.arg_name not in _CONFIG[pbk.config_key]: err_str = "Configurable '{}' has no value bound for parameter '{}'." raise ValueError(err_str.format(pbk.given_selector, pbk.arg_name)) return _CONFIG[pbk.config_key][pbk.arg_name]
def function[query_parameter, parameter[binding_key]]: constant[Returns the currently bound value to the specified `binding_key`. The `binding_key` argument should look like 'maybe/some/scope/maybe.moduels.configurable_name.parameter_name'. Note that this will not include default parameters. Args: binding_key: The parameter whose value should be set. Returns: The value bound to the configurable/parameter combination given in `binding_key`. Raises: ValueError: If no function can be found matching the configurable name specified by `biding_key`, or if the specified parameter name is blacklisted or not in the function's whitelist (if present) or if there is no value bound for the queried parameter or configurable. ] variable[pbk] assign[=] call[name[ParsedBindingKey], parameter[name[binding_key]]] if compare[name[pbk].config_key <ast.NotIn object at 0x7da2590d7190> name[_CONFIG]] begin[:] variable[err_str] assign[=] constant[Configurable '{}' has no bound parameters.] <ast.Raise object at 0x7da1b0352020> if compare[name[pbk].arg_name <ast.NotIn object at 0x7da2590d7190> call[name[_CONFIG]][name[pbk].config_key]] begin[:] variable[err_str] assign[=] constant[Configurable '{}' has no value bound for parameter '{}'.] <ast.Raise object at 0x7da1b03536d0> return[call[call[name[_CONFIG]][name[pbk].config_key]][name[pbk].arg_name]]
keyword[def] identifier[query_parameter] ( identifier[binding_key] ): literal[string] identifier[pbk] = identifier[ParsedBindingKey] ( identifier[binding_key] ) keyword[if] identifier[pbk] . identifier[config_key] keyword[not] keyword[in] identifier[_CONFIG] : identifier[err_str] = literal[string] keyword[raise] identifier[ValueError] ( identifier[err_str] . identifier[format] ( identifier[pbk] . identifier[given_selector] )) keyword[if] identifier[pbk] . identifier[arg_name] keyword[not] keyword[in] identifier[_CONFIG] [ identifier[pbk] . identifier[config_key] ]: identifier[err_str] = literal[string] keyword[raise] identifier[ValueError] ( identifier[err_str] . identifier[format] ( identifier[pbk] . identifier[given_selector] , identifier[pbk] . identifier[arg_name] )) keyword[return] identifier[_CONFIG] [ identifier[pbk] . identifier[config_key] ][ identifier[pbk] . identifier[arg_name] ]
def query_parameter(binding_key): """Returns the currently bound value to the specified `binding_key`. The `binding_key` argument should look like 'maybe/some/scope/maybe.moduels.configurable_name.parameter_name'. Note that this will not include default parameters. Args: binding_key: The parameter whose value should be set. Returns: The value bound to the configurable/parameter combination given in `binding_key`. Raises: ValueError: If no function can be found matching the configurable name specified by `biding_key`, or if the specified parameter name is blacklisted or not in the function's whitelist (if present) or if there is no value bound for the queried parameter or configurable. """ pbk = ParsedBindingKey(binding_key) if pbk.config_key not in _CONFIG: err_str = "Configurable '{}' has no bound parameters." raise ValueError(err_str.format(pbk.given_selector)) # depends on [control=['if'], data=[]] if pbk.arg_name not in _CONFIG[pbk.config_key]: err_str = "Configurable '{}' has no value bound for parameter '{}'." raise ValueError(err_str.format(pbk.given_selector, pbk.arg_name)) # depends on [control=['if'], data=[]] return _CONFIG[pbk.config_key][pbk.arg_name]
def get_job_model(self): """ Returns a new JobModel instance with current loaded job data attached. :return: JobModel """ if not self.job: raise Exception('Job not loaded yet. Use load(id) first.') return JobModel(self.job_id, self.job, self.home_config['storage_dir'])
def function[get_job_model, parameter[self]]: constant[ Returns a new JobModel instance with current loaded job data attached. :return: JobModel ] if <ast.UnaryOp object at 0x7da20c6ab400> begin[:] <ast.Raise object at 0x7da20c6aa470> return[call[name[JobModel], parameter[name[self].job_id, name[self].job, call[name[self].home_config][constant[storage_dir]]]]]
keyword[def] identifier[get_job_model] ( identifier[self] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[job] : keyword[raise] identifier[Exception] ( literal[string] ) keyword[return] identifier[JobModel] ( identifier[self] . identifier[job_id] , identifier[self] . identifier[job] , identifier[self] . identifier[home_config] [ literal[string] ])
def get_job_model(self): """ Returns a new JobModel instance with current loaded job data attached. :return: JobModel """ if not self.job: raise Exception('Job not loaded yet. Use load(id) first.') # depends on [control=['if'], data=[]] return JobModel(self.job_id, self.job, self.home_config['storage_dir'])
def runProcess(cmd, *args): """Run `cmd` (which is searched for in the executable path) with `args` and return the exit status. In general (unless you know what you're doing) use:: runProcess('program', filename) rather than:: os.system('program %s' % filename) because the latter will not work as expected if `filename` contains spaces or shell-metacharacters. If you need more fine-grained control look at ``os.spawn*``. """ from os import spawnvp, P_WAIT return spawnvp(P_WAIT, cmd, (cmd,) + args)
def function[runProcess, parameter[cmd]]: constant[Run `cmd` (which is searched for in the executable path) with `args` and return the exit status. In general (unless you know what you're doing) use:: runProcess('program', filename) rather than:: os.system('program %s' % filename) because the latter will not work as expected if `filename` contains spaces or shell-metacharacters. If you need more fine-grained control look at ``os.spawn*``. ] from relative_module[os] import module[spawnvp], module[P_WAIT] return[call[name[spawnvp], parameter[name[P_WAIT], name[cmd], binary_operation[tuple[[<ast.Name object at 0x7da20c7c9d20>]] + name[args]]]]]
keyword[def] identifier[runProcess] ( identifier[cmd] ,* identifier[args] ): literal[string] keyword[from] identifier[os] keyword[import] identifier[spawnvp] , identifier[P_WAIT] keyword[return] identifier[spawnvp] ( identifier[P_WAIT] , identifier[cmd] ,( identifier[cmd] ,)+ identifier[args] )
def runProcess(cmd, *args): """Run `cmd` (which is searched for in the executable path) with `args` and return the exit status. In general (unless you know what you're doing) use:: runProcess('program', filename) rather than:: os.system('program %s' % filename) because the latter will not work as expected if `filename` contains spaces or shell-metacharacters. If you need more fine-grained control look at ``os.spawn*``. """ from os import spawnvp, P_WAIT return spawnvp(P_WAIT, cmd, (cmd,) + args)
def save(self, model, path=''): """Save the file model and return the model with no content.""" path = path.strip('/') if 'type' not in model: raise web.HTTPError(400, u'No file type provided') if 'content' not in model and model['type'] != 'directory': raise web.HTTPError(400, u'No file content provided') self.run_pre_save_hook(model=model, path=path) os_path = self._get_os_path(path) self.log.debug("Saving %s", os_path) try: if model['type'] == 'notebook': # NEW file_ext = _file_extension(os_path) if file_ext == '.ipynb': nb = nbformat.from_dict(model['content']) self.check_and_sign(nb, path) self._save_notebook(os_path, nb) else: contents = convert(model['content'], from_='notebook', to=self.format) # Save a text file. if (format_manager().file_type(self.format) in ('text', 'json')): self._save_file(os_path, contents, 'text') # Save to a binary file. else: format_manager().save(os_path, contents, name=self.format, overwrite=True) # One checkpoint should always exist for notebooks. if not self.checkpoints.list_checkpoints(path): self.create_checkpoint(path) elif model['type'] == 'file': # Missing format will be handled internally by _save_file. self._save_file(os_path, model['content'], model.get('format')) elif model['type'] == 'directory': self._save_directory(os_path, model, path) else: raise web.HTTPError(400, "Unhandled contents type: %s" % model['type']) except web.HTTPError: raise except Exception as e: self.log.error(u'Error while saving file: %s %s', path, e, exc_info=True) raise web.HTTPError(500, u'Unexpected error while saving file: %s %s' % (path, e)) validation_message = None if model['type'] == 'notebook': self.validate_notebook_model(model) validation_message = model.get('message', None) model = self.get(path, content=False) if validation_message: model['message'] = validation_message self.run_post_save_hook(model=model, os_path=os_path) return model
def function[save, parameter[self, model, path]]: constant[Save the file model and return the model with no content.] variable[path] assign[=] call[name[path].strip, parameter[constant[/]]] if compare[constant[type] <ast.NotIn object at 0x7da2590d7190> name[model]] begin[:] <ast.Raise object at 0x7da20c76dff0> if <ast.BoolOp object at 0x7da20c76c220> begin[:] <ast.Raise object at 0x7da20c76dae0> call[name[self].run_pre_save_hook, parameter[]] variable[os_path] assign[=] call[name[self]._get_os_path, parameter[name[path]]] call[name[self].log.debug, parameter[constant[Saving %s], name[os_path]]] <ast.Try object at 0x7da20c76d8d0> variable[validation_message] assign[=] constant[None] if compare[call[name[model]][constant[type]] equal[==] constant[notebook]] begin[:] call[name[self].validate_notebook_model, parameter[name[model]]] variable[validation_message] assign[=] call[name[model].get, parameter[constant[message], constant[None]]] variable[model] assign[=] call[name[self].get, parameter[name[path]]] if name[validation_message] begin[:] call[name[model]][constant[message]] assign[=] name[validation_message] call[name[self].run_post_save_hook, parameter[]] return[name[model]]
keyword[def] identifier[save] ( identifier[self] , identifier[model] , identifier[path] = literal[string] ): literal[string] identifier[path] = identifier[path] . identifier[strip] ( literal[string] ) keyword[if] literal[string] keyword[not] keyword[in] identifier[model] : keyword[raise] identifier[web] . identifier[HTTPError] ( literal[int] , literal[string] ) keyword[if] literal[string] keyword[not] keyword[in] identifier[model] keyword[and] identifier[model] [ literal[string] ]!= literal[string] : keyword[raise] identifier[web] . identifier[HTTPError] ( literal[int] , literal[string] ) identifier[self] . identifier[run_pre_save_hook] ( identifier[model] = identifier[model] , identifier[path] = identifier[path] ) identifier[os_path] = identifier[self] . identifier[_get_os_path] ( identifier[path] ) identifier[self] . identifier[log] . identifier[debug] ( literal[string] , identifier[os_path] ) keyword[try] : keyword[if] identifier[model] [ literal[string] ]== literal[string] : identifier[file_ext] = identifier[_file_extension] ( identifier[os_path] ) keyword[if] identifier[file_ext] == literal[string] : identifier[nb] = identifier[nbformat] . identifier[from_dict] ( identifier[model] [ literal[string] ]) identifier[self] . identifier[check_and_sign] ( identifier[nb] , identifier[path] ) identifier[self] . identifier[_save_notebook] ( identifier[os_path] , identifier[nb] ) keyword[else] : identifier[contents] = identifier[convert] ( identifier[model] [ literal[string] ], identifier[from_] = literal[string] , identifier[to] = identifier[self] . identifier[format] ) keyword[if] ( identifier[format_manager] (). identifier[file_type] ( identifier[self] . identifier[format] ) keyword[in] ( literal[string] , literal[string] )): identifier[self] . identifier[_save_file] ( identifier[os_path] , identifier[contents] , literal[string] ) keyword[else] : identifier[format_manager] (). identifier[save] ( identifier[os_path] , identifier[contents] , identifier[name] = identifier[self] . identifier[format] , identifier[overwrite] = keyword[True] ) keyword[if] keyword[not] identifier[self] . identifier[checkpoints] . identifier[list_checkpoints] ( identifier[path] ): identifier[self] . identifier[create_checkpoint] ( identifier[path] ) keyword[elif] identifier[model] [ literal[string] ]== literal[string] : identifier[self] . identifier[_save_file] ( identifier[os_path] , identifier[model] [ literal[string] ], identifier[model] . identifier[get] ( literal[string] )) keyword[elif] identifier[model] [ literal[string] ]== literal[string] : identifier[self] . identifier[_save_directory] ( identifier[os_path] , identifier[model] , identifier[path] ) keyword[else] : keyword[raise] identifier[web] . identifier[HTTPError] ( literal[int] , literal[string] % identifier[model] [ literal[string] ]) keyword[except] identifier[web] . identifier[HTTPError] : keyword[raise] keyword[except] identifier[Exception] keyword[as] identifier[e] : identifier[self] . identifier[log] . identifier[error] ( literal[string] , identifier[path] , identifier[e] , identifier[exc_info] = keyword[True] ) keyword[raise] identifier[web] . identifier[HTTPError] ( literal[int] , literal[string] %( identifier[path] , identifier[e] )) identifier[validation_message] = keyword[None] keyword[if] identifier[model] [ literal[string] ]== literal[string] : identifier[self] . identifier[validate_notebook_model] ( identifier[model] ) identifier[validation_message] = identifier[model] . identifier[get] ( literal[string] , keyword[None] ) identifier[model] = identifier[self] . identifier[get] ( identifier[path] , identifier[content] = keyword[False] ) keyword[if] identifier[validation_message] : identifier[model] [ literal[string] ]= identifier[validation_message] identifier[self] . identifier[run_post_save_hook] ( identifier[model] = identifier[model] , identifier[os_path] = identifier[os_path] ) keyword[return] identifier[model]
def save(self, model, path=''): """Save the file model and return the model with no content.""" path = path.strip('/') if 'type' not in model: raise web.HTTPError(400, u'No file type provided') # depends on [control=['if'], data=[]] if 'content' not in model and model['type'] != 'directory': raise web.HTTPError(400, u'No file content provided') # depends on [control=['if'], data=[]] self.run_pre_save_hook(model=model, path=path) os_path = self._get_os_path(path) self.log.debug('Saving %s', os_path) try: if model['type'] == 'notebook': # NEW file_ext = _file_extension(os_path) if file_ext == '.ipynb': nb = nbformat.from_dict(model['content']) self.check_and_sign(nb, path) self._save_notebook(os_path, nb) # depends on [control=['if'], data=[]] else: contents = convert(model['content'], from_='notebook', to=self.format) # Save a text file. if format_manager().file_type(self.format) in ('text', 'json'): self._save_file(os_path, contents, 'text') # depends on [control=['if'], data=[]] else: # Save to a binary file. format_manager().save(os_path, contents, name=self.format, overwrite=True) # One checkpoint should always exist for notebooks. if not self.checkpoints.list_checkpoints(path): self.create_checkpoint(path) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif model['type'] == 'file': # Missing format will be handled internally by _save_file. self._save_file(os_path, model['content'], model.get('format')) # depends on [control=['if'], data=[]] elif model['type'] == 'directory': self._save_directory(os_path, model, path) # depends on [control=['if'], data=[]] else: raise web.HTTPError(400, 'Unhandled contents type: %s' % model['type']) # depends on [control=['try'], data=[]] except web.HTTPError: raise # depends on [control=['except'], data=[]] except Exception as e: self.log.error(u'Error while saving file: %s %s', path, e, exc_info=True) raise web.HTTPError(500, u'Unexpected error while saving file: %s %s' % (path, e)) # depends on [control=['except'], data=['e']] validation_message = None if model['type'] == 'notebook': self.validate_notebook_model(model) validation_message = model.get('message', None) # depends on [control=['if'], data=[]] model = self.get(path, content=False) if validation_message: model['message'] = validation_message # depends on [control=['if'], data=[]] self.run_post_save_hook(model=model, os_path=os_path) return model
def list(request, content_type, id): """ Wrapper exposing comment's render_comment_list tag as a view. """ # get object app_label, model = content_type.split('-') ctype = ContentType.objects.get(app_label=app_label, model=model) obj = ctype.get_object_for_this_type(id=id) # setup template and return result t = Template("{% load comments %}{% render_comment_list for object %}") context = RequestContext(request) context.update({'object': obj}) result = t.render(context) return HttpResponse(result)
def function[list, parameter[request, content_type, id]]: constant[ Wrapper exposing comment's render_comment_list tag as a view. ] <ast.Tuple object at 0x7da20c7c81c0> assign[=] call[name[content_type].split, parameter[constant[-]]] variable[ctype] assign[=] call[name[ContentType].objects.get, parameter[]] variable[obj] assign[=] call[name[ctype].get_object_for_this_type, parameter[]] variable[t] assign[=] call[name[Template], parameter[constant[{% load comments %}{% render_comment_list for object %}]]] variable[context] assign[=] call[name[RequestContext], parameter[name[request]]] call[name[context].update, parameter[dictionary[[<ast.Constant object at 0x7da20c7cada0>], [<ast.Name object at 0x7da20c7c9fc0>]]]] variable[result] assign[=] call[name[t].render, parameter[name[context]]] return[call[name[HttpResponse], parameter[name[result]]]]
keyword[def] identifier[list] ( identifier[request] , identifier[content_type] , identifier[id] ): literal[string] identifier[app_label] , identifier[model] = identifier[content_type] . identifier[split] ( literal[string] ) identifier[ctype] = identifier[ContentType] . identifier[objects] . identifier[get] ( identifier[app_label] = identifier[app_label] , identifier[model] = identifier[model] ) identifier[obj] = identifier[ctype] . identifier[get_object_for_this_type] ( identifier[id] = identifier[id] ) identifier[t] = identifier[Template] ( literal[string] ) identifier[context] = identifier[RequestContext] ( identifier[request] ) identifier[context] . identifier[update] ({ literal[string] : identifier[obj] }) identifier[result] = identifier[t] . identifier[render] ( identifier[context] ) keyword[return] identifier[HttpResponse] ( identifier[result] )
def list(request, content_type, id): """ Wrapper exposing comment's render_comment_list tag as a view. """ # get object (app_label, model) = content_type.split('-') ctype = ContentType.objects.get(app_label=app_label, model=model) obj = ctype.get_object_for_this_type(id=id) # setup template and return result t = Template('{% load comments %}{% render_comment_list for object %}') context = RequestContext(request) context.update({'object': obj}) result = t.render(context) return HttpResponse(result)
def build_duration_pretty(self): """Return the difference between build and build_done states, in a human readable format""" from ambry.util import pretty_time from time import time if not self.state.building: return None built = self.state.built or time() try: return pretty_time(int(built) - int(self.state.building)) except TypeError: # one of the values is None or not a number return None
def function[build_duration_pretty, parameter[self]]: constant[Return the difference between build and build_done states, in a human readable format] from relative_module[ambry.util] import module[pretty_time] from relative_module[time] import module[time] if <ast.UnaryOp object at 0x7da20c796050> begin[:] return[constant[None]] variable[built] assign[=] <ast.BoolOp object at 0x7da18eb54460> <ast.Try object at 0x7da18eb55660>
keyword[def] identifier[build_duration_pretty] ( identifier[self] ): literal[string] keyword[from] identifier[ambry] . identifier[util] keyword[import] identifier[pretty_time] keyword[from] identifier[time] keyword[import] identifier[time] keyword[if] keyword[not] identifier[self] . identifier[state] . identifier[building] : keyword[return] keyword[None] identifier[built] = identifier[self] . identifier[state] . identifier[built] keyword[or] identifier[time] () keyword[try] : keyword[return] identifier[pretty_time] ( identifier[int] ( identifier[built] )- identifier[int] ( identifier[self] . identifier[state] . identifier[building] )) keyword[except] identifier[TypeError] : keyword[return] keyword[None]
def build_duration_pretty(self): """Return the difference between build and build_done states, in a human readable format""" from ambry.util import pretty_time from time import time if not self.state.building: return None # depends on [control=['if'], data=[]] built = self.state.built or time() try: return pretty_time(int(built) - int(self.state.building)) # depends on [control=['try'], data=[]] except TypeError: # one of the values is None or not a number return None # depends on [control=['except'], data=[]]
def mcc(y, z): """Matthews correlation coefficient """ tp, tn, fp, fn = contingency_table(y, z) return (tp * tn - fp * fn) / K.sqrt((tp + fp) * (tp + fn) * (tn + fp) * (tn + fn))
def function[mcc, parameter[y, z]]: constant[Matthews correlation coefficient ] <ast.Tuple object at 0x7da1b054bfd0> assign[=] call[name[contingency_table], parameter[name[y], name[z]]] return[binary_operation[binary_operation[binary_operation[name[tp] * name[tn]] - binary_operation[name[fp] * name[fn]]] / call[name[K].sqrt, parameter[binary_operation[binary_operation[binary_operation[binary_operation[name[tp] + name[fp]] * binary_operation[name[tp] + name[fn]]] * binary_operation[name[tn] + name[fp]]] * binary_operation[name[tn] + name[fn]]]]]]]
keyword[def] identifier[mcc] ( identifier[y] , identifier[z] ): literal[string] identifier[tp] , identifier[tn] , identifier[fp] , identifier[fn] = identifier[contingency_table] ( identifier[y] , identifier[z] ) keyword[return] ( identifier[tp] * identifier[tn] - identifier[fp] * identifier[fn] )/ identifier[K] . identifier[sqrt] (( identifier[tp] + identifier[fp] )*( identifier[tp] + identifier[fn] )*( identifier[tn] + identifier[fp] )*( identifier[tn] + identifier[fn] ))
def mcc(y, z): """Matthews correlation coefficient """ (tp, tn, fp, fn) = contingency_table(y, z) return (tp * tn - fp * fn) / K.sqrt((tp + fp) * (tp + fn) * (tn + fp) * (tn + fn))
def warning(self, message, *args, **kwargs): """Alias to warn """ self._log(logging.WARNING, message, *args, **kwargs)
def function[warning, parameter[self, message]]: constant[Alias to warn ] call[name[self]._log, parameter[name[logging].WARNING, name[message], <ast.Starred object at 0x7da1b1435bd0>]]
keyword[def] identifier[warning] ( identifier[self] , identifier[message] ,* identifier[args] ,** identifier[kwargs] ): literal[string] identifier[self] . identifier[_log] ( identifier[logging] . identifier[WARNING] , identifier[message] ,* identifier[args] ,** identifier[kwargs] )
def warning(self, message, *args, **kwargs): """Alias to warn """ self._log(logging.WARNING, message, *args, **kwargs)
def sanity_check(args): """ Verify if the work folder is a django app. A valid django app always must have a models.py file :return: None """ if not os.path.isfile( os.path.join( args['django_application_folder'], 'models.py' ) ): print("django_application_folder is not a Django application folder") sys.exit(1)
def function[sanity_check, parameter[args]]: constant[ Verify if the work folder is a django app. A valid django app always must have a models.py file :return: None ] if <ast.UnaryOp object at 0x7da18f813340> begin[:] call[name[print], parameter[constant[django_application_folder is not a Django application folder]]] call[name[sys].exit, parameter[constant[1]]]
keyword[def] identifier[sanity_check] ( identifier[args] ): literal[string] keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isfile] ( identifier[os] . identifier[path] . identifier[join] ( identifier[args] [ literal[string] ], literal[string] ) ): identifier[print] ( literal[string] ) identifier[sys] . identifier[exit] ( literal[int] )
def sanity_check(args): """ Verify if the work folder is a django app. A valid django app always must have a models.py file :return: None """ if not os.path.isfile(os.path.join(args['django_application_folder'], 'models.py')): print('django_application_folder is not a Django application folder') sys.exit(1) # depends on [control=['if'], data=[]]
def draw_points(self, *points): """Draw multiple points on the current rendering target. Args: *points (Point): The points to draw. Raises: SDLError: If an error is encountered. """ point_array = ffi.new('SDL_Point[]', len(points)) for i, p in enumerate(points): point_array[i] = p._ptr[0] check_int_err(lib.SDL_RenderDrawPoints(self._ptr, point_array, len(points)))
def function[draw_points, parameter[self]]: constant[Draw multiple points on the current rendering target. Args: *points (Point): The points to draw. Raises: SDLError: If an error is encountered. ] variable[point_array] assign[=] call[name[ffi].new, parameter[constant[SDL_Point[]], call[name[len], parameter[name[points]]]]] for taget[tuple[[<ast.Name object at 0x7da1b09b8a00>, <ast.Name object at 0x7da1b09bb070>]]] in starred[call[name[enumerate], parameter[name[points]]]] begin[:] call[name[point_array]][name[i]] assign[=] call[name[p]._ptr][constant[0]] call[name[check_int_err], parameter[call[name[lib].SDL_RenderDrawPoints, parameter[name[self]._ptr, name[point_array], call[name[len], parameter[name[points]]]]]]]
keyword[def] identifier[draw_points] ( identifier[self] ,* identifier[points] ): literal[string] identifier[point_array] = identifier[ffi] . identifier[new] ( literal[string] , identifier[len] ( identifier[points] )) keyword[for] identifier[i] , identifier[p] keyword[in] identifier[enumerate] ( identifier[points] ): identifier[point_array] [ identifier[i] ]= identifier[p] . identifier[_ptr] [ literal[int] ] identifier[check_int_err] ( identifier[lib] . identifier[SDL_RenderDrawPoints] ( identifier[self] . identifier[_ptr] , identifier[point_array] , identifier[len] ( identifier[points] )))
def draw_points(self, *points): """Draw multiple points on the current rendering target. Args: *points (Point): The points to draw. Raises: SDLError: If an error is encountered. """ point_array = ffi.new('SDL_Point[]', len(points)) for (i, p) in enumerate(points): point_array[i] = p._ptr[0] # depends on [control=['for'], data=[]] check_int_err(lib.SDL_RenderDrawPoints(self._ptr, point_array, len(points)))
def extract_dmg(self, path='.'): """ Extract builds with .dmg extension Will only work if `hdiutil` is available. @type path: @param path: """ dmg_fd, dmg_fn = tempfile.mkstemp(prefix='fuzzfetch-', suffix='.dmg') os.close(dmg_fd) out_tmp = tempfile.mkdtemp(prefix='fuzzfetch-', suffix='.tmp') try: _download_url(self.artifact_url('dmg'), dmg_fn) if std_platform.system() == 'Darwin': LOG.info('.. extracting') subprocess.check_call(['hdiutil', 'attach', '-quiet', '-mountpoint', out_tmp, dmg_fn]) try: apps = [mt for mt in os.listdir(out_tmp) if mt.endswith('app')] assert len(apps) == 1 shutil.copytree(os.path.join(out_tmp, apps[0]), os.path.join(path, apps[0]), symlinks=True) finally: subprocess.check_call(['hdiutil', 'detach', '-quiet', out_tmp]) else: LOG.warning('.. can\'t extract target.dmg on %s', std_platform.system()) shutil.copy(dmg_fn, os.path.join(path, 'target.dmg')) finally: shutil.rmtree(out_tmp, onerror=onerror) os.unlink(dmg_fn)
def function[extract_dmg, parameter[self, path]]: constant[ Extract builds with .dmg extension Will only work if `hdiutil` is available. @type path: @param path: ] <ast.Tuple object at 0x7da1b2345810> assign[=] call[name[tempfile].mkstemp, parameter[]] call[name[os].close, parameter[name[dmg_fd]]] variable[out_tmp] assign[=] call[name[tempfile].mkdtemp, parameter[]] <ast.Try object at 0x7da1b2345ab0>
keyword[def] identifier[extract_dmg] ( identifier[self] , identifier[path] = literal[string] ): literal[string] identifier[dmg_fd] , identifier[dmg_fn] = identifier[tempfile] . identifier[mkstemp] ( identifier[prefix] = literal[string] , identifier[suffix] = literal[string] ) identifier[os] . identifier[close] ( identifier[dmg_fd] ) identifier[out_tmp] = identifier[tempfile] . identifier[mkdtemp] ( identifier[prefix] = literal[string] , identifier[suffix] = literal[string] ) keyword[try] : identifier[_download_url] ( identifier[self] . identifier[artifact_url] ( literal[string] ), identifier[dmg_fn] ) keyword[if] identifier[std_platform] . identifier[system] ()== literal[string] : identifier[LOG] . identifier[info] ( literal[string] ) identifier[subprocess] . identifier[check_call] ([ literal[string] , literal[string] , literal[string] , literal[string] , identifier[out_tmp] , identifier[dmg_fn] ]) keyword[try] : identifier[apps] =[ identifier[mt] keyword[for] identifier[mt] keyword[in] identifier[os] . identifier[listdir] ( identifier[out_tmp] ) keyword[if] identifier[mt] . identifier[endswith] ( literal[string] )] keyword[assert] identifier[len] ( identifier[apps] )== literal[int] identifier[shutil] . identifier[copytree] ( identifier[os] . identifier[path] . identifier[join] ( identifier[out_tmp] , identifier[apps] [ literal[int] ]), identifier[os] . identifier[path] . identifier[join] ( identifier[path] , identifier[apps] [ literal[int] ]), identifier[symlinks] = keyword[True] ) keyword[finally] : identifier[subprocess] . identifier[check_call] ([ literal[string] , literal[string] , literal[string] , identifier[out_tmp] ]) keyword[else] : identifier[LOG] . identifier[warning] ( literal[string] , identifier[std_platform] . identifier[system] ()) identifier[shutil] . identifier[copy] ( identifier[dmg_fn] , identifier[os] . identifier[path] . identifier[join] ( identifier[path] , literal[string] )) keyword[finally] : identifier[shutil] . identifier[rmtree] ( identifier[out_tmp] , identifier[onerror] = identifier[onerror] ) identifier[os] . identifier[unlink] ( identifier[dmg_fn] )
def extract_dmg(self, path='.'): """ Extract builds with .dmg extension Will only work if `hdiutil` is available. @type path: @param path: """ (dmg_fd, dmg_fn) = tempfile.mkstemp(prefix='fuzzfetch-', suffix='.dmg') os.close(dmg_fd) out_tmp = tempfile.mkdtemp(prefix='fuzzfetch-', suffix='.tmp') try: _download_url(self.artifact_url('dmg'), dmg_fn) if std_platform.system() == 'Darwin': LOG.info('.. extracting') subprocess.check_call(['hdiutil', 'attach', '-quiet', '-mountpoint', out_tmp, dmg_fn]) try: apps = [mt for mt in os.listdir(out_tmp) if mt.endswith('app')] assert len(apps) == 1 shutil.copytree(os.path.join(out_tmp, apps[0]), os.path.join(path, apps[0]), symlinks=True) # depends on [control=['try'], data=[]] finally: subprocess.check_call(['hdiutil', 'detach', '-quiet', out_tmp]) # depends on [control=['if'], data=[]] else: LOG.warning(".. can't extract target.dmg on %s", std_platform.system()) shutil.copy(dmg_fn, os.path.join(path, 'target.dmg')) # depends on [control=['try'], data=[]] finally: shutil.rmtree(out_tmp, onerror=onerror) os.unlink(dmg_fn)
def on_error(self, ws, error): """ Todo """ if type(error).__name__ == "KeyboardInterrupt": sys.exit() self.logger.debug("error")
def function[on_error, parameter[self, ws, error]]: constant[ Todo ] if compare[call[name[type], parameter[name[error]]].__name__ equal[==] constant[KeyboardInterrupt]] begin[:] call[name[sys].exit, parameter[]] call[name[self].logger.debug, parameter[constant[error]]]
keyword[def] identifier[on_error] ( identifier[self] , identifier[ws] , identifier[error] ): literal[string] keyword[if] identifier[type] ( identifier[error] ). identifier[__name__] == literal[string] : identifier[sys] . identifier[exit] () identifier[self] . identifier[logger] . identifier[debug] ( literal[string] )
def on_error(self, ws, error): """ Todo """ if type(error).__name__ == 'KeyboardInterrupt': sys.exit() # depends on [control=['if'], data=[]] self.logger.debug('error')
def get_properties(self): """ Add property to variables in BIF Returns ------- dict: dict of type {variable: list of properties } Example ------- >>> from pgmpy.readwrite import BIFReader, BIFWriter >>> model = BIFReader('dog-problem.bif').get_model() >>> writer = BIFWriter(model) >>> writer.get_properties() {'bowel-problem': ['position = (335, 99)'], 'dog-out': ['position = (300, 195)'], 'family-out': ['position = (257, 99)'], 'hear-bark': ['position = (296, 268)'], 'light-on': ['position = (218, 195)']} """ variables = self.model.nodes() property_tag = {} for variable in sorted(variables): properties = self.model.node[variable] properties = collections.OrderedDict(sorted(properties.items())) property_tag[variable] = [] for prop, val in properties.items(): property_tag[variable].append(str(prop) + " = " + str(val)) return property_tag
def function[get_properties, parameter[self]]: constant[ Add property to variables in BIF Returns ------- dict: dict of type {variable: list of properties } Example ------- >>> from pgmpy.readwrite import BIFReader, BIFWriter >>> model = BIFReader('dog-problem.bif').get_model() >>> writer = BIFWriter(model) >>> writer.get_properties() {'bowel-problem': ['position = (335, 99)'], 'dog-out': ['position = (300, 195)'], 'family-out': ['position = (257, 99)'], 'hear-bark': ['position = (296, 268)'], 'light-on': ['position = (218, 195)']} ] variable[variables] assign[=] call[name[self].model.nodes, parameter[]] variable[property_tag] assign[=] dictionary[[], []] for taget[name[variable]] in starred[call[name[sorted], parameter[name[variables]]]] begin[:] variable[properties] assign[=] call[name[self].model.node][name[variable]] variable[properties] assign[=] call[name[collections].OrderedDict, parameter[call[name[sorted], parameter[call[name[properties].items, parameter[]]]]]] call[name[property_tag]][name[variable]] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da20c6a8190>, <ast.Name object at 0x7da20c6ab700>]]] in starred[call[name[properties].items, parameter[]]] begin[:] call[call[name[property_tag]][name[variable]].append, parameter[binary_operation[binary_operation[call[name[str], parameter[name[prop]]] + constant[ = ]] + call[name[str], parameter[name[val]]]]]] return[name[property_tag]]
keyword[def] identifier[get_properties] ( identifier[self] ): literal[string] identifier[variables] = identifier[self] . identifier[model] . identifier[nodes] () identifier[property_tag] ={} keyword[for] identifier[variable] keyword[in] identifier[sorted] ( identifier[variables] ): identifier[properties] = identifier[self] . identifier[model] . identifier[node] [ identifier[variable] ] identifier[properties] = identifier[collections] . identifier[OrderedDict] ( identifier[sorted] ( identifier[properties] . identifier[items] ())) identifier[property_tag] [ identifier[variable] ]=[] keyword[for] identifier[prop] , identifier[val] keyword[in] identifier[properties] . identifier[items] (): identifier[property_tag] [ identifier[variable] ]. identifier[append] ( identifier[str] ( identifier[prop] )+ literal[string] + identifier[str] ( identifier[val] )) keyword[return] identifier[property_tag]
def get_properties(self): """ Add property to variables in BIF Returns ------- dict: dict of type {variable: list of properties } Example ------- >>> from pgmpy.readwrite import BIFReader, BIFWriter >>> model = BIFReader('dog-problem.bif').get_model() >>> writer = BIFWriter(model) >>> writer.get_properties() {'bowel-problem': ['position = (335, 99)'], 'dog-out': ['position = (300, 195)'], 'family-out': ['position = (257, 99)'], 'hear-bark': ['position = (296, 268)'], 'light-on': ['position = (218, 195)']} """ variables = self.model.nodes() property_tag = {} for variable in sorted(variables): properties = self.model.node[variable] properties = collections.OrderedDict(sorted(properties.items())) property_tag[variable] = [] for (prop, val) in properties.items(): property_tag[variable].append(str(prop) + ' = ' + str(val)) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['variable']] return property_tag
def get_argument_offset(self): """ Helper function to get offset argument. Raises exception if argument is missing. Returns the offset argument. """ try: offset = self.get_argument(constants.PARAM_OFFSET) return offset except tornado.web.MissingArgumentError as e: raise Exception(e.log_message)
def function[get_argument_offset, parameter[self]]: constant[ Helper function to get offset argument. Raises exception if argument is missing. Returns the offset argument. ] <ast.Try object at 0x7da2054a4b80>
keyword[def] identifier[get_argument_offset] ( identifier[self] ): literal[string] keyword[try] : identifier[offset] = identifier[self] . identifier[get_argument] ( identifier[constants] . identifier[PARAM_OFFSET] ) keyword[return] identifier[offset] keyword[except] identifier[tornado] . identifier[web] . identifier[MissingArgumentError] keyword[as] identifier[e] : keyword[raise] identifier[Exception] ( identifier[e] . identifier[log_message] )
def get_argument_offset(self): """ Helper function to get offset argument. Raises exception if argument is missing. Returns the offset argument. """ try: offset = self.get_argument(constants.PARAM_OFFSET) return offset # depends on [control=['try'], data=[]] except tornado.web.MissingArgumentError as e: raise Exception(e.log_message) # depends on [control=['except'], data=['e']]
def verify(df, check, *args, **kwargs): """ Generic verify. Assert that ``check(df, *args, **kwargs)`` is true. Parameters ========== df : DataFrame check : function Should take DataFrame and **kwargs. Returns bool Returns ======= df : DataFrame same as the input. """ result = check(df, *args, **kwargs) try: assert result except AssertionError as e: msg = '{} is not true'.format(check.__name__) e.args = (msg, df) raise return df
def function[verify, parameter[df, check]]: constant[ Generic verify. Assert that ``check(df, *args, **kwargs)`` is true. Parameters ========== df : DataFrame check : function Should take DataFrame and **kwargs. Returns bool Returns ======= df : DataFrame same as the input. ] variable[result] assign[=] call[name[check], parameter[name[df], <ast.Starred object at 0x7da1b07b0700>]] <ast.Try object at 0x7da1b07b0640> return[name[df]]
keyword[def] identifier[verify] ( identifier[df] , identifier[check] ,* identifier[args] ,** identifier[kwargs] ): literal[string] identifier[result] = identifier[check] ( identifier[df] ,* identifier[args] ,** identifier[kwargs] ) keyword[try] : keyword[assert] identifier[result] keyword[except] identifier[AssertionError] keyword[as] identifier[e] : identifier[msg] = literal[string] . identifier[format] ( identifier[check] . identifier[__name__] ) identifier[e] . identifier[args] =( identifier[msg] , identifier[df] ) keyword[raise] keyword[return] identifier[df]
def verify(df, check, *args, **kwargs): """ Generic verify. Assert that ``check(df, *args, **kwargs)`` is true. Parameters ========== df : DataFrame check : function Should take DataFrame and **kwargs. Returns bool Returns ======= df : DataFrame same as the input. """ result = check(df, *args, **kwargs) try: assert result # depends on [control=['try'], data=[]] except AssertionError as e: msg = '{} is not true'.format(check.__name__) e.args = (msg, df) raise # depends on [control=['except'], data=['e']] return df
def find_primitive(cell, symprec=1e-5, angle_tolerance=-1.0): """Primitive cell is searched in the input cell. The primitive cell is returned by a tuple of (lattice, positions, numbers). If it fails, None is returned. """ _set_no_error() lattice, positions, numbers, _ = _expand_cell(cell) if lattice is None: return None num_atom_prim = spg.primitive(lattice, positions, numbers, symprec, angle_tolerance) _set_error_message() if num_atom_prim > 0: return (np.array(lattice.T, dtype='double', order='C'), np.array(positions[:num_atom_prim], dtype='double', order='C'), np.array(numbers[:num_atom_prim], dtype='intc')) else: return None
def function[find_primitive, parameter[cell, symprec, angle_tolerance]]: constant[Primitive cell is searched in the input cell. The primitive cell is returned by a tuple of (lattice, positions, numbers). If it fails, None is returned. ] call[name[_set_no_error], parameter[]] <ast.Tuple object at 0x7da18ede4a90> assign[=] call[name[_expand_cell], parameter[name[cell]]] if compare[name[lattice] is constant[None]] begin[:] return[constant[None]] variable[num_atom_prim] assign[=] call[name[spg].primitive, parameter[name[lattice], name[positions], name[numbers], name[symprec], name[angle_tolerance]]] call[name[_set_error_message], parameter[]] if compare[name[num_atom_prim] greater[>] constant[0]] begin[:] return[tuple[[<ast.Call object at 0x7da18ede5c60>, <ast.Call object at 0x7da18ede5870>, <ast.Call object at 0x7da18ede7d30>]]]
keyword[def] identifier[find_primitive] ( identifier[cell] , identifier[symprec] = literal[int] , identifier[angle_tolerance] =- literal[int] ): literal[string] identifier[_set_no_error] () identifier[lattice] , identifier[positions] , identifier[numbers] , identifier[_] = identifier[_expand_cell] ( identifier[cell] ) keyword[if] identifier[lattice] keyword[is] keyword[None] : keyword[return] keyword[None] identifier[num_atom_prim] = identifier[spg] . identifier[primitive] ( identifier[lattice] , identifier[positions] , identifier[numbers] , identifier[symprec] , identifier[angle_tolerance] ) identifier[_set_error_message] () keyword[if] identifier[num_atom_prim] > literal[int] : keyword[return] ( identifier[np] . identifier[array] ( identifier[lattice] . identifier[T] , identifier[dtype] = literal[string] , identifier[order] = literal[string] ), identifier[np] . identifier[array] ( identifier[positions] [: identifier[num_atom_prim] ], identifier[dtype] = literal[string] , identifier[order] = literal[string] ), identifier[np] . identifier[array] ( identifier[numbers] [: identifier[num_atom_prim] ], identifier[dtype] = literal[string] )) keyword[else] : keyword[return] keyword[None]
def find_primitive(cell, symprec=1e-05, angle_tolerance=-1.0): """Primitive cell is searched in the input cell. The primitive cell is returned by a tuple of (lattice, positions, numbers). If it fails, None is returned. """ _set_no_error() (lattice, positions, numbers, _) = _expand_cell(cell) if lattice is None: return None # depends on [control=['if'], data=[]] num_atom_prim = spg.primitive(lattice, positions, numbers, symprec, angle_tolerance) _set_error_message() if num_atom_prim > 0: return (np.array(lattice.T, dtype='double', order='C'), np.array(positions[:num_atom_prim], dtype='double', order='C'), np.array(numbers[:num_atom_prim], dtype='intc')) # depends on [control=['if'], data=['num_atom_prim']] else: return None
def modify(self, pk=None, create_on_missing=False, **kwargs): """Modify an already existing. To edit the project's organizations, see help for organizations. Fields in the resource's `identity` tuple can be used in lieu of a primary key for a lookup; in such a case, only other fields are written. To modify unique fields, you must use the primary key for the lookup. =====API DOCS===== Modify an already existing project. :param pk: Primary key of the resource to be modified. :type pk: int :param create_on_missing: Flag that if set, a new object is created if ``pk`` is not set and objects matching the appropriate unique criteria is not found. :type create_on_missing: bool :param `**kwargs`: Keyword arguments which, all together, will be used as PATCH body to modify the resource object. if ``pk`` is not set, key-value pairs of ``**kwargs`` which are also in resource's identity will be used to lookup existing reosource. :returns: A dictionary combining the JSON output of the modified resource, as well as two extra fields: "changed", a flag indicating if the resource is successfully updated; "id", an integer which is the primary key of the updated object. :rtype: dict =====API DOCS===== """ # Associated with issue #52, the organization can't be modified # with the 'modify' command. This would create confusion about # whether its flag is an identifier versus a field to modify. if 'job_timeout' in kwargs and 'timeout' not in kwargs: kwargs['timeout'] = kwargs.pop('job_timeout') return super(Resource, self).write( pk, create_on_missing=create_on_missing, force_on_exists=True, **kwargs )
def function[modify, parameter[self, pk, create_on_missing]]: constant[Modify an already existing. To edit the project's organizations, see help for organizations. Fields in the resource's `identity` tuple can be used in lieu of a primary key for a lookup; in such a case, only other fields are written. To modify unique fields, you must use the primary key for the lookup. =====API DOCS===== Modify an already existing project. :param pk: Primary key of the resource to be modified. :type pk: int :param create_on_missing: Flag that if set, a new object is created if ``pk`` is not set and objects matching the appropriate unique criteria is not found. :type create_on_missing: bool :param `**kwargs`: Keyword arguments which, all together, will be used as PATCH body to modify the resource object. if ``pk`` is not set, key-value pairs of ``**kwargs`` which are also in resource's identity will be used to lookup existing reosource. :returns: A dictionary combining the JSON output of the modified resource, as well as two extra fields: "changed", a flag indicating if the resource is successfully updated; "id", an integer which is the primary key of the updated object. :rtype: dict =====API DOCS===== ] if <ast.BoolOp object at 0x7da20c7964a0> begin[:] call[name[kwargs]][constant[timeout]] assign[=] call[name[kwargs].pop, parameter[constant[job_timeout]]] return[call[call[name[super], parameter[name[Resource], name[self]]].write, parameter[name[pk]]]]
keyword[def] identifier[modify] ( identifier[self] , identifier[pk] = keyword[None] , identifier[create_on_missing] = keyword[False] ,** identifier[kwargs] ): literal[string] keyword[if] literal[string] keyword[in] identifier[kwargs] keyword[and] literal[string] keyword[not] keyword[in] identifier[kwargs] : identifier[kwargs] [ literal[string] ]= identifier[kwargs] . identifier[pop] ( literal[string] ) keyword[return] identifier[super] ( identifier[Resource] , identifier[self] ). identifier[write] ( identifier[pk] , identifier[create_on_missing] = identifier[create_on_missing] , identifier[force_on_exists] = keyword[True] ,** identifier[kwargs] )
def modify(self, pk=None, create_on_missing=False, **kwargs): """Modify an already existing. To edit the project's organizations, see help for organizations. Fields in the resource's `identity` tuple can be used in lieu of a primary key for a lookup; in such a case, only other fields are written. To modify unique fields, you must use the primary key for the lookup. =====API DOCS===== Modify an already existing project. :param pk: Primary key of the resource to be modified. :type pk: int :param create_on_missing: Flag that if set, a new object is created if ``pk`` is not set and objects matching the appropriate unique criteria is not found. :type create_on_missing: bool :param `**kwargs`: Keyword arguments which, all together, will be used as PATCH body to modify the resource object. if ``pk`` is not set, key-value pairs of ``**kwargs`` which are also in resource's identity will be used to lookup existing reosource. :returns: A dictionary combining the JSON output of the modified resource, as well as two extra fields: "changed", a flag indicating if the resource is successfully updated; "id", an integer which is the primary key of the updated object. :rtype: dict =====API DOCS===== """ # Associated with issue #52, the organization can't be modified # with the 'modify' command. This would create confusion about # whether its flag is an identifier versus a field to modify. if 'job_timeout' in kwargs and 'timeout' not in kwargs: kwargs['timeout'] = kwargs.pop('job_timeout') # depends on [control=['if'], data=[]] return super(Resource, self).write(pk, create_on_missing=create_on_missing, force_on_exists=True, **kwargs)
def message(MSG_LEVEL,msg,verbose=1): """ MESSAGE : print function wrapper. Print a message depending on the verbose level @param MSG_LEVEL {in}{required}{type=int} level of the message to be compared with self.verbose @example self.message(0,'This message will be shown for any verbose level') """ caller=get_caller() if MSG_LEVEL <= verbose : print('[{0}.{1}()] {2}'.format(__name__,caller.co_name,msg))
def function[message, parameter[MSG_LEVEL, msg, verbose]]: constant[ MESSAGE : print function wrapper. Print a message depending on the verbose level @param MSG_LEVEL {in}{required}{type=int} level of the message to be compared with self.verbose @example self.message(0,'This message will be shown for any verbose level') ] variable[caller] assign[=] call[name[get_caller], parameter[]] if compare[name[MSG_LEVEL] less_or_equal[<=] name[verbose]] begin[:] call[name[print], parameter[call[constant[[{0}.{1}()] {2}].format, parameter[name[__name__], name[caller].co_name, name[msg]]]]]
keyword[def] identifier[message] ( identifier[MSG_LEVEL] , identifier[msg] , identifier[verbose] = literal[int] ): literal[string] identifier[caller] = identifier[get_caller] () keyword[if] identifier[MSG_LEVEL] <= identifier[verbose] : identifier[print] ( literal[string] . identifier[format] ( identifier[__name__] , identifier[caller] . identifier[co_name] , identifier[msg] ))
def message(MSG_LEVEL, msg, verbose=1): """ MESSAGE : print function wrapper. Print a message depending on the verbose level @param MSG_LEVEL {in}{required}{type=int} level of the message to be compared with self.verbose @example self.message(0,'This message will be shown for any verbose level') """ caller = get_caller() if MSG_LEVEL <= verbose: print('[{0}.{1}()] {2}'.format(__name__, caller.co_name, msg)) # depends on [control=['if'], data=[]]
def _close(self): """ Close connection to remote host. """ if self._process is None: return self.quit() self._process.stdin.close() logger.debug("Waiting for ssh process to finish...") self._process.wait() # Wait for ssh session to finish. # self._process.terminate() # self._process.kill() self._process = None
def function[_close, parameter[self]]: constant[ Close connection to remote host. ] if compare[name[self]._process is constant[None]] begin[:] return[None] call[name[self].quit, parameter[]] call[name[self]._process.stdin.close, parameter[]] call[name[logger].debug, parameter[constant[Waiting for ssh process to finish...]]] call[name[self]._process.wait, parameter[]] name[self]._process assign[=] constant[None]
keyword[def] identifier[_close] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[_process] keyword[is] keyword[None] : keyword[return] identifier[self] . identifier[quit] () identifier[self] . identifier[_process] . identifier[stdin] . identifier[close] () identifier[logger] . identifier[debug] ( literal[string] ) identifier[self] . identifier[_process] . identifier[wait] () identifier[self] . identifier[_process] = keyword[None]
def _close(self): """ Close connection to remote host. """ if self._process is None: return # depends on [control=['if'], data=[]] self.quit() self._process.stdin.close() logger.debug('Waiting for ssh process to finish...') self._process.wait() # Wait for ssh session to finish. # self._process.terminate() # self._process.kill() self._process = None
def all_possible_hands(self): ''' Yields all possible hands for all players, given the information known by the player whose turn it is. This information includes the current player's hand, the sizes of the other players' hands, and the moves played by every player, including the passes. :yields: a list of possible Hand objects, corresponding to each player ''' # compute values that must be missing from # each hand, to rule out impossible hands missing = self.missing_values() # get the dominoes that are in all of the other hands. note that, even # though we are 'looking' at the other hands to get these dominoes, we # are not 'cheating' because these dominoes could also be computed by # subtracting the dominoes that have been played (which are public # knowledge) and the dominoes in the current player's hand from the # initial set of dominoes other_dominoes = {d for p, h in enumerate(self.hands) for d in h if p != self.turn} # get the lengths of all the other hands, so # that we know how many dominoes to place in each other_hand_lengths = [len(h) for p, h in enumerate(self.hands) if p != self.turn] # iterate over all possible hands that the other players might have for possible_hands in _all_possible_partitionings(other_dominoes, other_hand_lengths): # given possible hands for all players, this is a generator for # tuples containing the dominoes that are in the other players' hands possible_hands = (h for h in possible_hands) # build a list containing possible hands for all players. since we # know the current player's hand, we just use a shallow copy of it hands = [] for player, hand in enumerate(self.hands): if player != self.turn: hand = next(possible_hands) hands.append(dominoes.Hand(hand)) # only yield the hands if they are possible, according # to the values we know to be missing from each hand if _validate_hands(hands, missing): yield hands
def function[all_possible_hands, parameter[self]]: constant[ Yields all possible hands for all players, given the information known by the player whose turn it is. This information includes the current player's hand, the sizes of the other players' hands, and the moves played by every player, including the passes. :yields: a list of possible Hand objects, corresponding to each player ] variable[missing] assign[=] call[name[self].missing_values, parameter[]] variable[other_dominoes] assign[=] <ast.SetComp object at 0x7da18f09d480> variable[other_hand_lengths] assign[=] <ast.ListComp object at 0x7da18f09e860> for taget[name[possible_hands]] in starred[call[name[_all_possible_partitionings], parameter[name[other_dominoes], name[other_hand_lengths]]]] begin[:] variable[possible_hands] assign[=] <ast.GeneratorExp object at 0x7da18f09dfc0> variable[hands] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da18f09c280>, <ast.Name object at 0x7da18f09cfa0>]]] in starred[call[name[enumerate], parameter[name[self].hands]]] begin[:] if compare[name[player] not_equal[!=] name[self].turn] begin[:] variable[hand] assign[=] call[name[next], parameter[name[possible_hands]]] call[name[hands].append, parameter[call[name[dominoes].Hand, parameter[name[hand]]]]] if call[name[_validate_hands], parameter[name[hands], name[missing]]] begin[:] <ast.Yield object at 0x7da18f09f220>
keyword[def] identifier[all_possible_hands] ( identifier[self] ): literal[string] identifier[missing] = identifier[self] . identifier[missing_values] () identifier[other_dominoes] ={ identifier[d] keyword[for] identifier[p] , identifier[h] keyword[in] identifier[enumerate] ( identifier[self] . identifier[hands] ) keyword[for] identifier[d] keyword[in] identifier[h] keyword[if] identifier[p] != identifier[self] . identifier[turn] } identifier[other_hand_lengths] =[ identifier[len] ( identifier[h] ) keyword[for] identifier[p] , identifier[h] keyword[in] identifier[enumerate] ( identifier[self] . identifier[hands] ) keyword[if] identifier[p] != identifier[self] . identifier[turn] ] keyword[for] identifier[possible_hands] keyword[in] identifier[_all_possible_partitionings] ( identifier[other_dominoes] , identifier[other_hand_lengths] ): identifier[possible_hands] =( identifier[h] keyword[for] identifier[h] keyword[in] identifier[possible_hands] ) identifier[hands] =[] keyword[for] identifier[player] , identifier[hand] keyword[in] identifier[enumerate] ( identifier[self] . identifier[hands] ): keyword[if] identifier[player] != identifier[self] . identifier[turn] : identifier[hand] = identifier[next] ( identifier[possible_hands] ) identifier[hands] . identifier[append] ( identifier[dominoes] . identifier[Hand] ( identifier[hand] )) keyword[if] identifier[_validate_hands] ( identifier[hands] , identifier[missing] ): keyword[yield] identifier[hands]
def all_possible_hands(self): """ Yields all possible hands for all players, given the information known by the player whose turn it is. This information includes the current player's hand, the sizes of the other players' hands, and the moves played by every player, including the passes. :yields: a list of possible Hand objects, corresponding to each player """ # compute values that must be missing from # each hand, to rule out impossible hands missing = self.missing_values() # get the dominoes that are in all of the other hands. note that, even # though we are 'looking' at the other hands to get these dominoes, we # are not 'cheating' because these dominoes could also be computed by # subtracting the dominoes that have been played (which are public # knowledge) and the dominoes in the current player's hand from the # initial set of dominoes other_dominoes = {d for (p, h) in enumerate(self.hands) for d in h if p != self.turn} # get the lengths of all the other hands, so # that we know how many dominoes to place in each other_hand_lengths = [len(h) for (p, h) in enumerate(self.hands) if p != self.turn] # iterate over all possible hands that the other players might have for possible_hands in _all_possible_partitionings(other_dominoes, other_hand_lengths): # given possible hands for all players, this is a generator for # tuples containing the dominoes that are in the other players' hands possible_hands = (h for h in possible_hands) # build a list containing possible hands for all players. since we # know the current player's hand, we just use a shallow copy of it hands = [] for (player, hand) in enumerate(self.hands): if player != self.turn: hand = next(possible_hands) # depends on [control=['if'], data=[]] hands.append(dominoes.Hand(hand)) # depends on [control=['for'], data=[]] # only yield the hands if they are possible, according # to the values we know to be missing from each hand if _validate_hands(hands, missing): yield hands # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['possible_hands']]
def overwrite_stage_variables(self, ret, stage_variables): ''' overwrite the given stage_name's stage variables with the given stage_variables ''' res = __salt__['boto_apigateway.overwrite_api_stage_variables'](restApiId=self.restApiId, stageName=self._stage_name, variables=stage_variables, **self._common_aws_args) if not res.get('overwrite'): ret['result'] = False ret['abort'] = True ret['comment'] = res.get('error') else: ret = _log_changes(ret, 'overwrite_stage_variables', res.get('stage')) return ret
def function[overwrite_stage_variables, parameter[self, ret, stage_variables]]: constant[ overwrite the given stage_name's stage variables with the given stage_variables ] variable[res] assign[=] call[call[name[__salt__]][constant[boto_apigateway.overwrite_api_stage_variables]], parameter[]] if <ast.UnaryOp object at 0x7da1b21a23e0> begin[:] call[name[ret]][constant[result]] assign[=] constant[False] call[name[ret]][constant[abort]] assign[=] constant[True] call[name[ret]][constant[comment]] assign[=] call[name[res].get, parameter[constant[error]]] return[name[ret]]
keyword[def] identifier[overwrite_stage_variables] ( identifier[self] , identifier[ret] , identifier[stage_variables] ): literal[string] identifier[res] = identifier[__salt__] [ literal[string] ]( identifier[restApiId] = identifier[self] . identifier[restApiId] , identifier[stageName] = identifier[self] . identifier[_stage_name] , identifier[variables] = identifier[stage_variables] , ** identifier[self] . identifier[_common_aws_args] ) keyword[if] keyword[not] identifier[res] . identifier[get] ( literal[string] ): identifier[ret] [ literal[string] ]= keyword[False] identifier[ret] [ literal[string] ]= keyword[True] identifier[ret] [ literal[string] ]= identifier[res] . identifier[get] ( literal[string] ) keyword[else] : identifier[ret] = identifier[_log_changes] ( identifier[ret] , literal[string] , identifier[res] . identifier[get] ( literal[string] )) keyword[return] identifier[ret]
def overwrite_stage_variables(self, ret, stage_variables): """ overwrite the given stage_name's stage variables with the given stage_variables """ res = __salt__['boto_apigateway.overwrite_api_stage_variables'](restApiId=self.restApiId, stageName=self._stage_name, variables=stage_variables, **self._common_aws_args) if not res.get('overwrite'): ret['result'] = False ret['abort'] = True ret['comment'] = res.get('error') # depends on [control=['if'], data=[]] else: ret = _log_changes(ret, 'overwrite_stage_variables', res.get('stage')) return ret