code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def generate_mechanism(graph: BELGraph, node: BaseEntity, key: Optional[str] = None) -> BELGraph: """Generate a mechanistic sub-graph upstream of the given node. :param graph: A BEL graph :param node: A BEL node :param key: The key in the node data dictionary representing the experimental data. :return: A sub-graph grown around the target BEL node """ subgraph = get_upstream_causal_subgraph(graph, node) expand_upstream_causal(graph, subgraph) remove_inconsistent_edges(subgraph) collapse_consistent_edges(subgraph) if key is not None: # FIXME when is it not pruned? prune_mechanism_by_data(subgraph, key) return subgraph
def function[generate_mechanism, parameter[graph, node, key]]: constant[Generate a mechanistic sub-graph upstream of the given node. :param graph: A BEL graph :param node: A BEL node :param key: The key in the node data dictionary representing the experimental data. :return: A sub-graph grown around the target BEL node ] variable[subgraph] assign[=] call[name[get_upstream_causal_subgraph], parameter[name[graph], name[node]]] call[name[expand_upstream_causal], parameter[name[graph], name[subgraph]]] call[name[remove_inconsistent_edges], parameter[name[subgraph]]] call[name[collapse_consistent_edges], parameter[name[subgraph]]] if compare[name[key] is_not constant[None]] begin[:] call[name[prune_mechanism_by_data], parameter[name[subgraph], name[key]]] return[name[subgraph]]
keyword[def] identifier[generate_mechanism] ( identifier[graph] : identifier[BELGraph] , identifier[node] : identifier[BaseEntity] , identifier[key] : identifier[Optional] [ identifier[str] ]= keyword[None] )-> identifier[BELGraph] : literal[string] identifier[subgraph] = identifier[get_upstream_causal_subgraph] ( identifier[graph] , identifier[node] ) identifier[expand_upstream_causal] ( identifier[graph] , identifier[subgraph] ) identifier[remove_inconsistent_edges] ( identifier[subgraph] ) identifier[collapse_consistent_edges] ( identifier[subgraph] ) keyword[if] identifier[key] keyword[is] keyword[not] keyword[None] : identifier[prune_mechanism_by_data] ( identifier[subgraph] , identifier[key] ) keyword[return] identifier[subgraph]
def generate_mechanism(graph: BELGraph, node: BaseEntity, key: Optional[str]=None) -> BELGraph: """Generate a mechanistic sub-graph upstream of the given node. :param graph: A BEL graph :param node: A BEL node :param key: The key in the node data dictionary representing the experimental data. :return: A sub-graph grown around the target BEL node """ subgraph = get_upstream_causal_subgraph(graph, node) expand_upstream_causal(graph, subgraph) remove_inconsistent_edges(subgraph) collapse_consistent_edges(subgraph) if key is not None: # FIXME when is it not pruned? prune_mechanism_by_data(subgraph, key) # depends on [control=['if'], data=['key']] return subgraph
def contains_opposite_color_piece(self, square, position): """ Finds if square on the board is occupied by a ``Piece`` belonging to the opponent. :type: square: Location :type: position: Board :rtype: bool """ return not position.is_square_empty(square) and \ position.piece_at_square(square).color != self.color
def function[contains_opposite_color_piece, parameter[self, square, position]]: constant[ Finds if square on the board is occupied by a ``Piece`` belonging to the opponent. :type: square: Location :type: position: Board :rtype: bool ] return[<ast.BoolOp object at 0x7da20c990ac0>]
keyword[def] identifier[contains_opposite_color_piece] ( identifier[self] , identifier[square] , identifier[position] ): literal[string] keyword[return] keyword[not] identifier[position] . identifier[is_square_empty] ( identifier[square] ) keyword[and] identifier[position] . identifier[piece_at_square] ( identifier[square] ). identifier[color] != identifier[self] . identifier[color]
def contains_opposite_color_piece(self, square, position): """ Finds if square on the board is occupied by a ``Piece`` belonging to the opponent. :type: square: Location :type: position: Board :rtype: bool """ return not position.is_square_empty(square) and position.piece_at_square(square).color != self.color
def lint(filename, options=()): """Pylint the given file. When run from emacs we will be in the directory of a file, and passed its filename. If this file is part of a package and is trying to import other modules from within its own package or another package rooted in a directory below it, pylint will classify it as a failed import. To get around this, we traverse down the directory tree to find the root of the package this module is in. We then invoke pylint from this directory. Finally, we must correct the filenames in the output generated by pylint so Emacs doesn't become confused (it will expect just the original filename, while pylint may extend it with extra directories if we've traversed down the tree) """ # traverse downwards until we are out of a python package full_path = osp.abspath(filename) parent_path = osp.dirname(full_path) child_path = osp.basename(full_path) while parent_path != "/" and osp.exists(osp.join(parent_path, "__init__.py")): child_path = osp.join(osp.basename(parent_path), child_path) parent_path = osp.dirname(parent_path) # Start pylint # Ensure we use the python and pylint associated with the running epylint run_cmd = "import sys; from pylint.lint import Run; Run(sys.argv[1:])" cmd = ( [sys.executable, "-c", run_cmd] + [ "--msg-template", "{path}:{line}: {category} ({msg_id}, {symbol}, {obj}) {msg}", "-r", "n", child_path, ] + list(options) ) process = Popen( cmd, stdout=PIPE, cwd=parent_path, env=_get_env(), universal_newlines=True ) for line in process.stdout: # remove pylintrc warning if line.startswith("No config file found"): continue # modify the file name thats output to reverse the path traversal we made parts = line.split(":") if parts and parts[0] == child_path: line = ":".join([filename] + parts[1:]) print(line, end=" ") process.wait() return process.returncode
def function[lint, parameter[filename, options]]: constant[Pylint the given file. When run from emacs we will be in the directory of a file, and passed its filename. If this file is part of a package and is trying to import other modules from within its own package or another package rooted in a directory below it, pylint will classify it as a failed import. To get around this, we traverse down the directory tree to find the root of the package this module is in. We then invoke pylint from this directory. Finally, we must correct the filenames in the output generated by pylint so Emacs doesn't become confused (it will expect just the original filename, while pylint may extend it with extra directories if we've traversed down the tree) ] variable[full_path] assign[=] call[name[osp].abspath, parameter[name[filename]]] variable[parent_path] assign[=] call[name[osp].dirname, parameter[name[full_path]]] variable[child_path] assign[=] call[name[osp].basename, parameter[name[full_path]]] while <ast.BoolOp object at 0x7da1b059dba0> begin[:] variable[child_path] assign[=] call[name[osp].join, parameter[call[name[osp].basename, parameter[name[parent_path]]], name[child_path]]] variable[parent_path] assign[=] call[name[osp].dirname, parameter[name[parent_path]]] variable[run_cmd] assign[=] constant[import sys; from pylint.lint import Run; Run(sys.argv[1:])] variable[cmd] assign[=] binary_operation[binary_operation[list[[<ast.Attribute object at 0x7da1b03814b0>, <ast.Constant object at 0x7da1b0382530>, <ast.Name object at 0x7da1b03833a0>]] + list[[<ast.Constant object at 0x7da1b0382aa0>, <ast.Constant object at 0x7da1b03809a0>, <ast.Constant object at 0x7da1b0381f00>, <ast.Constant object at 0x7da1b0383610>, <ast.Name object at 0x7da1b0383be0>]]] + call[name[list], parameter[name[options]]]] variable[process] assign[=] call[name[Popen], parameter[name[cmd]]] for taget[name[line]] in starred[name[process].stdout] begin[:] if call[name[line].startswith, parameter[constant[No config file found]]] begin[:] continue variable[parts] assign[=] call[name[line].split, parameter[constant[:]]] if <ast.BoolOp object at 0x7da1b0382350> begin[:] variable[line] assign[=] call[constant[:].join, parameter[binary_operation[list[[<ast.Name object at 0x7da1b0382290>]] + call[name[parts]][<ast.Slice object at 0x7da1b0383d00>]]]] call[name[print], parameter[name[line]]] call[name[process].wait, parameter[]] return[name[process].returncode]
keyword[def] identifier[lint] ( identifier[filename] , identifier[options] =()): literal[string] identifier[full_path] = identifier[osp] . identifier[abspath] ( identifier[filename] ) identifier[parent_path] = identifier[osp] . identifier[dirname] ( identifier[full_path] ) identifier[child_path] = identifier[osp] . identifier[basename] ( identifier[full_path] ) keyword[while] identifier[parent_path] != literal[string] keyword[and] identifier[osp] . identifier[exists] ( identifier[osp] . identifier[join] ( identifier[parent_path] , literal[string] )): identifier[child_path] = identifier[osp] . identifier[join] ( identifier[osp] . identifier[basename] ( identifier[parent_path] ), identifier[child_path] ) identifier[parent_path] = identifier[osp] . identifier[dirname] ( identifier[parent_path] ) identifier[run_cmd] = literal[string] identifier[cmd] =( [ identifier[sys] . identifier[executable] , literal[string] , identifier[run_cmd] ] +[ literal[string] , literal[string] , literal[string] , literal[string] , identifier[child_path] , ] + identifier[list] ( identifier[options] ) ) identifier[process] = identifier[Popen] ( identifier[cmd] , identifier[stdout] = identifier[PIPE] , identifier[cwd] = identifier[parent_path] , identifier[env] = identifier[_get_env] (), identifier[universal_newlines] = keyword[True] ) keyword[for] identifier[line] keyword[in] identifier[process] . identifier[stdout] : keyword[if] identifier[line] . identifier[startswith] ( literal[string] ): keyword[continue] identifier[parts] = identifier[line] . identifier[split] ( literal[string] ) keyword[if] identifier[parts] keyword[and] identifier[parts] [ literal[int] ]== identifier[child_path] : identifier[line] = literal[string] . identifier[join] ([ identifier[filename] ]+ identifier[parts] [ literal[int] :]) identifier[print] ( identifier[line] , identifier[end] = literal[string] ) identifier[process] . identifier[wait] () keyword[return] identifier[process] . identifier[returncode]
def lint(filename, options=()): """Pylint the given file. When run from emacs we will be in the directory of a file, and passed its filename. If this file is part of a package and is trying to import other modules from within its own package or another package rooted in a directory below it, pylint will classify it as a failed import. To get around this, we traverse down the directory tree to find the root of the package this module is in. We then invoke pylint from this directory. Finally, we must correct the filenames in the output generated by pylint so Emacs doesn't become confused (it will expect just the original filename, while pylint may extend it with extra directories if we've traversed down the tree) """ # traverse downwards until we are out of a python package full_path = osp.abspath(filename) parent_path = osp.dirname(full_path) child_path = osp.basename(full_path) while parent_path != '/' and osp.exists(osp.join(parent_path, '__init__.py')): child_path = osp.join(osp.basename(parent_path), child_path) parent_path = osp.dirname(parent_path) # depends on [control=['while'], data=[]] # Start pylint # Ensure we use the python and pylint associated with the running epylint run_cmd = 'import sys; from pylint.lint import Run; Run(sys.argv[1:])' cmd = [sys.executable, '-c', run_cmd] + ['--msg-template', '{path}:{line}: {category} ({msg_id}, {symbol}, {obj}) {msg}', '-r', 'n', child_path] + list(options) process = Popen(cmd, stdout=PIPE, cwd=parent_path, env=_get_env(), universal_newlines=True) for line in process.stdout: # remove pylintrc warning if line.startswith('No config file found'): continue # depends on [control=['if'], data=[]] # modify the file name thats output to reverse the path traversal we made parts = line.split(':') if parts and parts[0] == child_path: line = ':'.join([filename] + parts[1:]) # depends on [control=['if'], data=[]] print(line, end=' ') # depends on [control=['for'], data=['line']] process.wait() return process.returncode
def unassign_floating_ip(self, ip_addr): """ Unassign a Floating IP from a Droplet. The Floating IP will be reserved in the region but not assigned to a Droplet. """ if self.api_version == 2: params = {'type': 'unassign'} json = self.request('/floating_ips/' + ip_addr + '/actions', params=params, method='POST') return json['action'] else: raise DoError(v2_api_required_str)
def function[unassign_floating_ip, parameter[self, ip_addr]]: constant[ Unassign a Floating IP from a Droplet. The Floating IP will be reserved in the region but not assigned to a Droplet. ] if compare[name[self].api_version equal[==] constant[2]] begin[:] variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da1b26b3df0>], [<ast.Constant object at 0x7da1b26b3e20>]] variable[json] assign[=] call[name[self].request, parameter[binary_operation[binary_operation[constant[/floating_ips/] + name[ip_addr]] + constant[/actions]]]] return[call[name[json]][constant[action]]]
keyword[def] identifier[unassign_floating_ip] ( identifier[self] , identifier[ip_addr] ): literal[string] keyword[if] identifier[self] . identifier[api_version] == literal[int] : identifier[params] ={ literal[string] : literal[string] } identifier[json] = identifier[self] . identifier[request] ( literal[string] + identifier[ip_addr] + literal[string] , identifier[params] = identifier[params] , identifier[method] = literal[string] ) keyword[return] identifier[json] [ literal[string] ] keyword[else] : keyword[raise] identifier[DoError] ( identifier[v2_api_required_str] )
def unassign_floating_ip(self, ip_addr): """ Unassign a Floating IP from a Droplet. The Floating IP will be reserved in the region but not assigned to a Droplet. """ if self.api_version == 2: params = {'type': 'unassign'} json = self.request('/floating_ips/' + ip_addr + '/actions', params=params, method='POST') return json['action'] # depends on [control=['if'], data=[]] else: raise DoError(v2_api_required_str)
def text_uk_extremes(self, request): """ Return textual data of UK extremes. request: metoffer.CAPABILITIES Returns available extreme date and issue time metoffer.LATEST Returns data of latest extremes for all regions """ return json.loads(self._query(TEXT, OBSERVATIONS, UK_EXTREMES, request, "").decode(errors="replace"))
def function[text_uk_extremes, parameter[self, request]]: constant[ Return textual data of UK extremes. request: metoffer.CAPABILITIES Returns available extreme date and issue time metoffer.LATEST Returns data of latest extremes for all regions ] return[call[name[json].loads, parameter[call[call[name[self]._query, parameter[name[TEXT], name[OBSERVATIONS], name[UK_EXTREMES], name[request], constant[]]].decode, parameter[]]]]]
keyword[def] identifier[text_uk_extremes] ( identifier[self] , identifier[request] ): literal[string] keyword[return] identifier[json] . identifier[loads] ( identifier[self] . identifier[_query] ( identifier[TEXT] , identifier[OBSERVATIONS] , identifier[UK_EXTREMES] , identifier[request] , literal[string] ). identifier[decode] ( identifier[errors] = literal[string] ))
def text_uk_extremes(self, request): """ Return textual data of UK extremes. request: metoffer.CAPABILITIES Returns available extreme date and issue time metoffer.LATEST Returns data of latest extremes for all regions """ return json.loads(self._query(TEXT, OBSERVATIONS, UK_EXTREMES, request, '').decode(errors='replace'))
def _decrypt_object(obj, **kwargs): ''' Recursively try to decrypt any object. If the object is a six.string_types (string or unicode), and it contains a valid NACLENC pretext, decrypt it, otherwise keep going until a string is found. ''' if salt.utils.stringio.is_readable(obj): return _decrypt_object(obj.getvalue(), **kwargs) if isinstance(obj, six.string_types): if re.search(NACL_REGEX, obj) is not None: return __salt__['nacl.dec'](re.search(NACL_REGEX, obj).group(1), **kwargs) else: return obj elif isinstance(obj, dict): for key, value in six.iteritems(obj): obj[key] = _decrypt_object(value, **kwargs) return obj elif isinstance(obj, list): for key, value in enumerate(obj): obj[key] = _decrypt_object(value, **kwargs) return obj else: return obj
def function[_decrypt_object, parameter[obj]]: constant[ Recursively try to decrypt any object. If the object is a six.string_types (string or unicode), and it contains a valid NACLENC pretext, decrypt it, otherwise keep going until a string is found. ] if call[name[salt].utils.stringio.is_readable, parameter[name[obj]]] begin[:] return[call[name[_decrypt_object], parameter[call[name[obj].getvalue, parameter[]]]]] if call[name[isinstance], parameter[name[obj], name[six].string_types]] begin[:] if compare[call[name[re].search, parameter[name[NACL_REGEX], name[obj]]] is_not constant[None]] begin[:] return[call[call[name[__salt__]][constant[nacl.dec]], parameter[call[call[name[re].search, parameter[name[NACL_REGEX], name[obj]]].group, parameter[constant[1]]]]]]
keyword[def] identifier[_decrypt_object] ( identifier[obj] ,** identifier[kwargs] ): literal[string] keyword[if] identifier[salt] . identifier[utils] . identifier[stringio] . identifier[is_readable] ( identifier[obj] ): keyword[return] identifier[_decrypt_object] ( identifier[obj] . identifier[getvalue] (),** identifier[kwargs] ) keyword[if] identifier[isinstance] ( identifier[obj] , identifier[six] . identifier[string_types] ): keyword[if] identifier[re] . identifier[search] ( identifier[NACL_REGEX] , identifier[obj] ) keyword[is] keyword[not] keyword[None] : keyword[return] identifier[__salt__] [ literal[string] ]( identifier[re] . identifier[search] ( identifier[NACL_REGEX] , identifier[obj] ). identifier[group] ( literal[int] ),** identifier[kwargs] ) keyword[else] : keyword[return] identifier[obj] keyword[elif] identifier[isinstance] ( identifier[obj] , identifier[dict] ): keyword[for] identifier[key] , identifier[value] keyword[in] identifier[six] . identifier[iteritems] ( identifier[obj] ): identifier[obj] [ identifier[key] ]= identifier[_decrypt_object] ( identifier[value] ,** identifier[kwargs] ) keyword[return] identifier[obj] keyword[elif] identifier[isinstance] ( identifier[obj] , identifier[list] ): keyword[for] identifier[key] , identifier[value] keyword[in] identifier[enumerate] ( identifier[obj] ): identifier[obj] [ identifier[key] ]= identifier[_decrypt_object] ( identifier[value] ,** identifier[kwargs] ) keyword[return] identifier[obj] keyword[else] : keyword[return] identifier[obj]
def _decrypt_object(obj, **kwargs): """ Recursively try to decrypt any object. If the object is a six.string_types (string or unicode), and it contains a valid NACLENC pretext, decrypt it, otherwise keep going until a string is found. """ if salt.utils.stringio.is_readable(obj): return _decrypt_object(obj.getvalue(), **kwargs) # depends on [control=['if'], data=[]] if isinstance(obj, six.string_types): if re.search(NACL_REGEX, obj) is not None: return __salt__['nacl.dec'](re.search(NACL_REGEX, obj).group(1), **kwargs) # depends on [control=['if'], data=[]] else: return obj # depends on [control=['if'], data=[]] elif isinstance(obj, dict): for (key, value) in six.iteritems(obj): obj[key] = _decrypt_object(value, **kwargs) # depends on [control=['for'], data=[]] return obj # depends on [control=['if'], data=[]] elif isinstance(obj, list): for (key, value) in enumerate(obj): obj[key] = _decrypt_object(value, **kwargs) # depends on [control=['for'], data=[]] return obj # depends on [control=['if'], data=[]] else: return obj
def geomap_rscale(xyin,xyref,center=None): """ Set up the products used for computing the fit derived using the code from lib/geofit.x for the function 'geo_fmagnify()'. Comparisons with results from geomap (no additional clipping) were made and produced the same results out to 5 decimal places. Output ------ fit: dict Dictionary containing full solution for fit. """ if center is not None: xcen = center[0] ycen = center[1] else: xcen = xyref[:,0].mean() ycen = xyref[:,1].mean() dx = xyref[:,0].astype(ndfloat128) dy = xyref[:,1].astype(ndfloat128) du = xyin[:,0].astype(ndfloat128) dv = xyin[:,1].astype(ndfloat128) n = xyref.shape[0] Sx = dx.sum() Sy = dy.sum() Su = du.sum() Sv = dv.sum() xr0 = Sx/n yr0 = Sy/n xi0 = Su/n yi0 = Sv/n Sxrxr = np.power((dx-xr0),2).sum() Syryr = np.power((dy-yr0),2).sum() Syrxi = ((dy-yr0)*(du-xi0)).sum() Sxryi = ((dx-xr0)*(dv-yi0)).sum() Sxrxi = ((dx-xr0)*(du-xi0)).sum() Syryi = ((dy-yr0)*(dv-yi0)).sum() rot_num = Sxrxi * Syryi rot_denom = Syrxi * Sxryi if rot_num == rot_denom: det = 0.0 else: det = rot_num - rot_denom if (det < 0): rot_num = Syrxi + Sxryi rot_denom = Sxrxi - Syryi else: rot_num = Syrxi - Sxryi rot_denom = Sxrxi + Syryi if rot_num == rot_denom: theta = 0.0 else: theta = np.rad2deg(np.arctan2(rot_num,rot_denom)) if theta < 0: theta += 360.0 ctheta = np.cos(np.deg2rad(theta)) stheta = np.sin(np.deg2rad(theta)) s_num = rot_denom*ctheta + rot_num*stheta s_denom = Sxrxr + Syryr if s_denom < 0: mag = 1.0 else: mag = s_num/s_denom if det < 0: # "flip" y-axis (reflection about x-axis *after* rotation) # NOTE: keep in mind that 'fit_matrix' # is the transposed rotation matrix. sthetax = -mag*stheta cthetay = -mag*ctheta else: sthetax = mag*stheta cthetay = mag*ctheta cthetax = mag*ctheta sthetay = mag*stheta sdet = np.sign(det) xshift = (xi0 - (xr0*cthetax + sdet*yr0*sthetax)).astype(np.float64) yshift = (yi0 - (-sdet*xr0*sthetay + yr0*cthetay)).astype(np.float64) P = np.array([ cthetax, sthetay, xshift],dtype=np.float64) Q = np.array([ -sthetax, cthetay, yshift],dtype=np.float64) # Return the shift, rotation, and scale changes result = build_fit(P, Q, fitgeom='rscale') resids = xyin - np.dot((xyref), result['fit_matrix']) - result['offset'] result['rms'] = resids.std(axis=0) result['resids'] = resids result['rmse'] = float(np.sqrt(np.mean(2 * resids**2))) result['mae'] = float(np.mean(np.linalg.norm(resids, axis=1))) return result
def function[geomap_rscale, parameter[xyin, xyref, center]]: constant[ Set up the products used for computing the fit derived using the code from lib/geofit.x for the function 'geo_fmagnify()'. Comparisons with results from geomap (no additional clipping) were made and produced the same results out to 5 decimal places. Output ------ fit: dict Dictionary containing full solution for fit. ] if compare[name[center] is_not constant[None]] begin[:] variable[xcen] assign[=] call[name[center]][constant[0]] variable[ycen] assign[=] call[name[center]][constant[1]] variable[dx] assign[=] call[call[name[xyref]][tuple[[<ast.Slice object at 0x7da204622d40>, <ast.Constant object at 0x7da204623250>]]].astype, parameter[name[ndfloat128]]] variable[dy] assign[=] call[call[name[xyref]][tuple[[<ast.Slice object at 0x7da204620130>, <ast.Constant object at 0x7da204623e20>]]].astype, parameter[name[ndfloat128]]] variable[du] assign[=] call[call[name[xyin]][tuple[[<ast.Slice object at 0x7da204622e90>, <ast.Constant object at 0x7da2046219f0>]]].astype, parameter[name[ndfloat128]]] variable[dv] assign[=] call[call[name[xyin]][tuple[[<ast.Slice object at 0x7da204623850>, <ast.Constant object at 0x7da204621d20>]]].astype, parameter[name[ndfloat128]]] variable[n] assign[=] call[name[xyref].shape][constant[0]] variable[Sx] assign[=] call[name[dx].sum, parameter[]] variable[Sy] assign[=] call[name[dy].sum, parameter[]] variable[Su] assign[=] call[name[du].sum, parameter[]] variable[Sv] assign[=] call[name[dv].sum, parameter[]] variable[xr0] assign[=] binary_operation[name[Sx] / name[n]] variable[yr0] assign[=] binary_operation[name[Sy] / name[n]] variable[xi0] assign[=] binary_operation[name[Su] / name[n]] variable[yi0] assign[=] binary_operation[name[Sv] / name[n]] variable[Sxrxr] assign[=] call[call[name[np].power, parameter[binary_operation[name[dx] - name[xr0]], constant[2]]].sum, parameter[]] variable[Syryr] assign[=] call[call[name[np].power, parameter[binary_operation[name[dy] - name[yr0]], constant[2]]].sum, parameter[]] variable[Syrxi] assign[=] call[binary_operation[binary_operation[name[dy] - name[yr0]] * binary_operation[name[du] - name[xi0]]].sum, parameter[]] variable[Sxryi] assign[=] call[binary_operation[binary_operation[name[dx] - name[xr0]] * binary_operation[name[dv] - name[yi0]]].sum, parameter[]] variable[Sxrxi] assign[=] call[binary_operation[binary_operation[name[dx] - name[xr0]] * binary_operation[name[du] - name[xi0]]].sum, parameter[]] variable[Syryi] assign[=] call[binary_operation[binary_operation[name[dy] - name[yr0]] * binary_operation[name[dv] - name[yi0]]].sum, parameter[]] variable[rot_num] assign[=] binary_operation[name[Sxrxi] * name[Syryi]] variable[rot_denom] assign[=] binary_operation[name[Syrxi] * name[Sxryi]] if compare[name[rot_num] equal[==] name[rot_denom]] begin[:] variable[det] assign[=] constant[0.0] if compare[name[det] less[<] constant[0]] begin[:] variable[rot_num] assign[=] binary_operation[name[Syrxi] + name[Sxryi]] variable[rot_denom] assign[=] binary_operation[name[Sxrxi] - name[Syryi]] if compare[name[rot_num] equal[==] name[rot_denom]] begin[:] variable[theta] assign[=] constant[0.0] variable[ctheta] assign[=] call[name[np].cos, parameter[call[name[np].deg2rad, parameter[name[theta]]]]] variable[stheta] assign[=] call[name[np].sin, parameter[call[name[np].deg2rad, parameter[name[theta]]]]] variable[s_num] assign[=] binary_operation[binary_operation[name[rot_denom] * name[ctheta]] + binary_operation[name[rot_num] * name[stheta]]] variable[s_denom] assign[=] binary_operation[name[Sxrxr] + name[Syryr]] if compare[name[s_denom] less[<] constant[0]] begin[:] variable[mag] assign[=] constant[1.0] if compare[name[det] less[<] constant[0]] begin[:] variable[sthetax] assign[=] binary_operation[<ast.UnaryOp object at 0x7da1b1a7d450> * name[stheta]] variable[cthetay] assign[=] binary_operation[<ast.UnaryOp object at 0x7da1b1a7cdf0> * name[ctheta]] variable[cthetax] assign[=] binary_operation[name[mag] * name[ctheta]] variable[sthetay] assign[=] binary_operation[name[mag] * name[stheta]] variable[sdet] assign[=] call[name[np].sign, parameter[name[det]]] variable[xshift] assign[=] call[binary_operation[name[xi0] - binary_operation[binary_operation[name[xr0] * name[cthetax]] + binary_operation[binary_operation[name[sdet] * name[yr0]] * name[sthetax]]]].astype, parameter[name[np].float64]] variable[yshift] assign[=] call[binary_operation[name[yi0] - binary_operation[binary_operation[binary_operation[<ast.UnaryOp object at 0x7da1b1a7c2e0> * name[xr0]] * name[sthetay]] + binary_operation[name[yr0] * name[cthetay]]]].astype, parameter[name[np].float64]] variable[P] assign[=] call[name[np].array, parameter[list[[<ast.Name object at 0x7da1b1a7ce80>, <ast.Name object at 0x7da1b1a7c280>, <ast.Name object at 0x7da1b1a7d210>]]]] variable[Q] assign[=] call[name[np].array, parameter[list[[<ast.UnaryOp object at 0x7da1b1a7d690>, <ast.Name object at 0x7da1b1a7d000>, <ast.Name object at 0x7da1b1a7d4e0>]]]] variable[result] assign[=] call[name[build_fit], parameter[name[P], name[Q]]] variable[resids] assign[=] binary_operation[binary_operation[name[xyin] - call[name[np].dot, parameter[name[xyref], call[name[result]][constant[fit_matrix]]]]] - call[name[result]][constant[offset]]] call[name[result]][constant[rms]] assign[=] call[name[resids].std, parameter[]] call[name[result]][constant[resids]] assign[=] name[resids] call[name[result]][constant[rmse]] assign[=] call[name[float], parameter[call[name[np].sqrt, parameter[call[name[np].mean, parameter[binary_operation[constant[2] * binary_operation[name[resids] ** constant[2]]]]]]]]] call[name[result]][constant[mae]] assign[=] call[name[float], parameter[call[name[np].mean, parameter[call[name[np].linalg.norm, parameter[name[resids]]]]]]] return[name[result]]
keyword[def] identifier[geomap_rscale] ( identifier[xyin] , identifier[xyref] , identifier[center] = keyword[None] ): literal[string] keyword[if] identifier[center] keyword[is] keyword[not] keyword[None] : identifier[xcen] = identifier[center] [ literal[int] ] identifier[ycen] = identifier[center] [ literal[int] ] keyword[else] : identifier[xcen] = identifier[xyref] [:, literal[int] ]. identifier[mean] () identifier[ycen] = identifier[xyref] [:, literal[int] ]. identifier[mean] () identifier[dx] = identifier[xyref] [:, literal[int] ]. identifier[astype] ( identifier[ndfloat128] ) identifier[dy] = identifier[xyref] [:, literal[int] ]. identifier[astype] ( identifier[ndfloat128] ) identifier[du] = identifier[xyin] [:, literal[int] ]. identifier[astype] ( identifier[ndfloat128] ) identifier[dv] = identifier[xyin] [:, literal[int] ]. identifier[astype] ( identifier[ndfloat128] ) identifier[n] = identifier[xyref] . identifier[shape] [ literal[int] ] identifier[Sx] = identifier[dx] . identifier[sum] () identifier[Sy] = identifier[dy] . identifier[sum] () identifier[Su] = identifier[du] . identifier[sum] () identifier[Sv] = identifier[dv] . identifier[sum] () identifier[xr0] = identifier[Sx] / identifier[n] identifier[yr0] = identifier[Sy] / identifier[n] identifier[xi0] = identifier[Su] / identifier[n] identifier[yi0] = identifier[Sv] / identifier[n] identifier[Sxrxr] = identifier[np] . identifier[power] (( identifier[dx] - identifier[xr0] ), literal[int] ). identifier[sum] () identifier[Syryr] = identifier[np] . identifier[power] (( identifier[dy] - identifier[yr0] ), literal[int] ). identifier[sum] () identifier[Syrxi] =(( identifier[dy] - identifier[yr0] )*( identifier[du] - identifier[xi0] )). identifier[sum] () identifier[Sxryi] =(( identifier[dx] - identifier[xr0] )*( identifier[dv] - identifier[yi0] )). identifier[sum] () identifier[Sxrxi] =(( identifier[dx] - identifier[xr0] )*( identifier[du] - identifier[xi0] )). identifier[sum] () identifier[Syryi] =(( identifier[dy] - identifier[yr0] )*( identifier[dv] - identifier[yi0] )). identifier[sum] () identifier[rot_num] = identifier[Sxrxi] * identifier[Syryi] identifier[rot_denom] = identifier[Syrxi] * identifier[Sxryi] keyword[if] identifier[rot_num] == identifier[rot_denom] : identifier[det] = literal[int] keyword[else] : identifier[det] = identifier[rot_num] - identifier[rot_denom] keyword[if] ( identifier[det] < literal[int] ): identifier[rot_num] = identifier[Syrxi] + identifier[Sxryi] identifier[rot_denom] = identifier[Sxrxi] - identifier[Syryi] keyword[else] : identifier[rot_num] = identifier[Syrxi] - identifier[Sxryi] identifier[rot_denom] = identifier[Sxrxi] + identifier[Syryi] keyword[if] identifier[rot_num] == identifier[rot_denom] : identifier[theta] = literal[int] keyword[else] : identifier[theta] = identifier[np] . identifier[rad2deg] ( identifier[np] . identifier[arctan2] ( identifier[rot_num] , identifier[rot_denom] )) keyword[if] identifier[theta] < literal[int] : identifier[theta] += literal[int] identifier[ctheta] = identifier[np] . identifier[cos] ( identifier[np] . identifier[deg2rad] ( identifier[theta] )) identifier[stheta] = identifier[np] . identifier[sin] ( identifier[np] . identifier[deg2rad] ( identifier[theta] )) identifier[s_num] = identifier[rot_denom] * identifier[ctheta] + identifier[rot_num] * identifier[stheta] identifier[s_denom] = identifier[Sxrxr] + identifier[Syryr] keyword[if] identifier[s_denom] < literal[int] : identifier[mag] = literal[int] keyword[else] : identifier[mag] = identifier[s_num] / identifier[s_denom] keyword[if] identifier[det] < literal[int] : identifier[sthetax] =- identifier[mag] * identifier[stheta] identifier[cthetay] =- identifier[mag] * identifier[ctheta] keyword[else] : identifier[sthetax] = identifier[mag] * identifier[stheta] identifier[cthetay] = identifier[mag] * identifier[ctheta] identifier[cthetax] = identifier[mag] * identifier[ctheta] identifier[sthetay] = identifier[mag] * identifier[stheta] identifier[sdet] = identifier[np] . identifier[sign] ( identifier[det] ) identifier[xshift] =( identifier[xi0] -( identifier[xr0] * identifier[cthetax] + identifier[sdet] * identifier[yr0] * identifier[sthetax] )). identifier[astype] ( identifier[np] . identifier[float64] ) identifier[yshift] =( identifier[yi0] -(- identifier[sdet] * identifier[xr0] * identifier[sthetay] + identifier[yr0] * identifier[cthetay] )). identifier[astype] ( identifier[np] . identifier[float64] ) identifier[P] = identifier[np] . identifier[array] ([ identifier[cthetax] , identifier[sthetay] , identifier[xshift] ], identifier[dtype] = identifier[np] . identifier[float64] ) identifier[Q] = identifier[np] . identifier[array] ([- identifier[sthetax] , identifier[cthetay] , identifier[yshift] ], identifier[dtype] = identifier[np] . identifier[float64] ) identifier[result] = identifier[build_fit] ( identifier[P] , identifier[Q] , identifier[fitgeom] = literal[string] ) identifier[resids] = identifier[xyin] - identifier[np] . identifier[dot] (( identifier[xyref] ), identifier[result] [ literal[string] ])- identifier[result] [ literal[string] ] identifier[result] [ literal[string] ]= identifier[resids] . identifier[std] ( identifier[axis] = literal[int] ) identifier[result] [ literal[string] ]= identifier[resids] identifier[result] [ literal[string] ]= identifier[float] ( identifier[np] . identifier[sqrt] ( identifier[np] . identifier[mean] ( literal[int] * identifier[resids] ** literal[int] ))) identifier[result] [ literal[string] ]= identifier[float] ( identifier[np] . identifier[mean] ( identifier[np] . identifier[linalg] . identifier[norm] ( identifier[resids] , identifier[axis] = literal[int] ))) keyword[return] identifier[result]
def geomap_rscale(xyin, xyref, center=None): """ Set up the products used for computing the fit derived using the code from lib/geofit.x for the function 'geo_fmagnify()'. Comparisons with results from geomap (no additional clipping) were made and produced the same results out to 5 decimal places. Output ------ fit: dict Dictionary containing full solution for fit. """ if center is not None: xcen = center[0] ycen = center[1] # depends on [control=['if'], data=['center']] else: xcen = xyref[:, 0].mean() ycen = xyref[:, 1].mean() dx = xyref[:, 0].astype(ndfloat128) dy = xyref[:, 1].astype(ndfloat128) du = xyin[:, 0].astype(ndfloat128) dv = xyin[:, 1].astype(ndfloat128) n = xyref.shape[0] Sx = dx.sum() Sy = dy.sum() Su = du.sum() Sv = dv.sum() xr0 = Sx / n yr0 = Sy / n xi0 = Su / n yi0 = Sv / n Sxrxr = np.power(dx - xr0, 2).sum() Syryr = np.power(dy - yr0, 2).sum() Syrxi = ((dy - yr0) * (du - xi0)).sum() Sxryi = ((dx - xr0) * (dv - yi0)).sum() Sxrxi = ((dx - xr0) * (du - xi0)).sum() Syryi = ((dy - yr0) * (dv - yi0)).sum() rot_num = Sxrxi * Syryi rot_denom = Syrxi * Sxryi if rot_num == rot_denom: det = 0.0 # depends on [control=['if'], data=[]] else: det = rot_num - rot_denom if det < 0: rot_num = Syrxi + Sxryi rot_denom = Sxrxi - Syryi # depends on [control=['if'], data=[]] else: rot_num = Syrxi - Sxryi rot_denom = Sxrxi + Syryi if rot_num == rot_denom: theta = 0.0 # depends on [control=['if'], data=[]] else: theta = np.rad2deg(np.arctan2(rot_num, rot_denom)) if theta < 0: theta += 360.0 # depends on [control=['if'], data=['theta']] ctheta = np.cos(np.deg2rad(theta)) stheta = np.sin(np.deg2rad(theta)) s_num = rot_denom * ctheta + rot_num * stheta s_denom = Sxrxr + Syryr if s_denom < 0: mag = 1.0 # depends on [control=['if'], data=[]] else: mag = s_num / s_denom if det < 0: # "flip" y-axis (reflection about x-axis *after* rotation) # NOTE: keep in mind that 'fit_matrix' # is the transposed rotation matrix. sthetax = -mag * stheta cthetay = -mag * ctheta # depends on [control=['if'], data=[]] else: sthetax = mag * stheta cthetay = mag * ctheta cthetax = mag * ctheta sthetay = mag * stheta sdet = np.sign(det) xshift = (xi0 - (xr0 * cthetax + sdet * yr0 * sthetax)).astype(np.float64) yshift = (yi0 - (-sdet * xr0 * sthetay + yr0 * cthetay)).astype(np.float64) P = np.array([cthetax, sthetay, xshift], dtype=np.float64) Q = np.array([-sthetax, cthetay, yshift], dtype=np.float64) # Return the shift, rotation, and scale changes result = build_fit(P, Q, fitgeom='rscale') resids = xyin - np.dot(xyref, result['fit_matrix']) - result['offset'] result['rms'] = resids.std(axis=0) result['resids'] = resids result['rmse'] = float(np.sqrt(np.mean(2 * resids ** 2))) result['mae'] = float(np.mean(np.linalg.norm(resids, axis=1))) return result
def create_topology(self, topologyName, topology): """ crate topology """ if not topology or not topology.IsInitialized(): raise_(StateException("Topology protobuf not init properly", StateException.EX_TYPE_PROTOBUF_ERROR), sys.exc_info()[2]) path = self.get_topology_path(topologyName) LOG.info("Adding topology: {0} to path: {1}".format( topologyName, path)) topologyString = topology.SerializeToString() try: self.client.create(path, value=topologyString, makepath=True) return True except NoNodeError: raise_(StateException("NoNodeError while creating topology", StateException.EX_TYPE_NO_NODE_ERROR), sys.exc_info()[2]) except NodeExistsError: raise_(StateException("NodeExistsError while creating topology", StateException.EX_TYPE_NODE_EXISTS_ERROR), sys.exc_info()[2]) except ZookeeperError: raise_(StateException("Zookeeper while creating topology", StateException.EX_TYPE_ZOOKEEPER_ERROR), sys.exc_info()[2]) except Exception: # Just re raise the exception. raise
def function[create_topology, parameter[self, topologyName, topology]]: constant[ crate topology ] if <ast.BoolOp object at 0x7da18c4ccdc0> begin[:] call[name[raise_], parameter[call[name[StateException], parameter[constant[Topology protobuf not init properly], name[StateException].EX_TYPE_PROTOBUF_ERROR]], call[call[name[sys].exc_info, parameter[]]][constant[2]]]] variable[path] assign[=] call[name[self].get_topology_path, parameter[name[topologyName]]] call[name[LOG].info, parameter[call[constant[Adding topology: {0} to path: {1}].format, parameter[name[topologyName], name[path]]]]] variable[topologyString] assign[=] call[name[topology].SerializeToString, parameter[]] <ast.Try object at 0x7da18c4ccca0>
keyword[def] identifier[create_topology] ( identifier[self] , identifier[topologyName] , identifier[topology] ): literal[string] keyword[if] keyword[not] identifier[topology] keyword[or] keyword[not] identifier[topology] . identifier[IsInitialized] (): identifier[raise_] ( identifier[StateException] ( literal[string] , identifier[StateException] . identifier[EX_TYPE_PROTOBUF_ERROR] ), identifier[sys] . identifier[exc_info] ()[ literal[int] ]) identifier[path] = identifier[self] . identifier[get_topology_path] ( identifier[topologyName] ) identifier[LOG] . identifier[info] ( literal[string] . identifier[format] ( identifier[topologyName] , identifier[path] )) identifier[topologyString] = identifier[topology] . identifier[SerializeToString] () keyword[try] : identifier[self] . identifier[client] . identifier[create] ( identifier[path] , identifier[value] = identifier[topologyString] , identifier[makepath] = keyword[True] ) keyword[return] keyword[True] keyword[except] identifier[NoNodeError] : identifier[raise_] ( identifier[StateException] ( literal[string] , identifier[StateException] . identifier[EX_TYPE_NO_NODE_ERROR] ), identifier[sys] . identifier[exc_info] ()[ literal[int] ]) keyword[except] identifier[NodeExistsError] : identifier[raise_] ( identifier[StateException] ( literal[string] , identifier[StateException] . identifier[EX_TYPE_NODE_EXISTS_ERROR] ), identifier[sys] . identifier[exc_info] ()[ literal[int] ]) keyword[except] identifier[ZookeeperError] : identifier[raise_] ( identifier[StateException] ( literal[string] , identifier[StateException] . identifier[EX_TYPE_ZOOKEEPER_ERROR] ), identifier[sys] . identifier[exc_info] ()[ literal[int] ]) keyword[except] identifier[Exception] : keyword[raise]
def create_topology(self, topologyName, topology): """ crate topology """ if not topology or not topology.IsInitialized(): raise_(StateException('Topology protobuf not init properly', StateException.EX_TYPE_PROTOBUF_ERROR), sys.exc_info()[2]) # depends on [control=['if'], data=[]] path = self.get_topology_path(topologyName) LOG.info('Adding topology: {0} to path: {1}'.format(topologyName, path)) topologyString = topology.SerializeToString() try: self.client.create(path, value=topologyString, makepath=True) return True # depends on [control=['try'], data=[]] except NoNodeError: raise_(StateException('NoNodeError while creating topology', StateException.EX_TYPE_NO_NODE_ERROR), sys.exc_info()[2]) # depends on [control=['except'], data=[]] except NodeExistsError: raise_(StateException('NodeExistsError while creating topology', StateException.EX_TYPE_NODE_EXISTS_ERROR), sys.exc_info()[2]) # depends on [control=['except'], data=[]] except ZookeeperError: raise_(StateException('Zookeeper while creating topology', StateException.EX_TYPE_ZOOKEEPER_ERROR), sys.exc_info()[2]) # depends on [control=['except'], data=[]] except Exception: # Just re raise the exception. raise # depends on [control=['except'], data=[]]
def add_relations(self, relations): """Add multiple relations to a bijection""" for source, destination in relations: self.add_relation(source, destination)
def function[add_relations, parameter[self, relations]]: constant[Add multiple relations to a bijection] for taget[tuple[[<ast.Name object at 0x7da18ede7cd0>, <ast.Name object at 0x7da18ede4f70>]]] in starred[name[relations]] begin[:] call[name[self].add_relation, parameter[name[source], name[destination]]]
keyword[def] identifier[add_relations] ( identifier[self] , identifier[relations] ): literal[string] keyword[for] identifier[source] , identifier[destination] keyword[in] identifier[relations] : identifier[self] . identifier[add_relation] ( identifier[source] , identifier[destination] )
def add_relations(self, relations): """Add multiple relations to a bijection""" for (source, destination) in relations: self.add_relation(source, destination) # depends on [control=['for'], data=[]]
def _set_hardware(self, v, load=False): """ Setter method for hardware, mapped from YANG variable /hardware (container) If this variable is read-only (config: false) in the source YANG file, then _set_hardware is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_hardware() directly. YANG Description: This specifies the group of configuration/operational elements to manage the hardware chracteristics of this managed entity. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=hardware.hardware, is_container='container', presence=False, yang_name="hardware", rest_name="hardware", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Hardware Management configuration', u'cli-add-mode': None, u'cli-suppress-show-path': None, u'cli-suppress-no': None, u'cli-full-command': None}}, namespace='urn:brocade.com:mgmt:brocade-hardware', defining_module='brocade-hardware', yang_type='container', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """hardware must be of a type compatible with container""", 'defined-type': "container", 'generated-type': """YANGDynClass(base=hardware.hardware, is_container='container', presence=False, yang_name="hardware", rest_name="hardware", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Hardware Management configuration', u'cli-add-mode': None, u'cli-suppress-show-path': None, u'cli-suppress-no': None, u'cli-full-command': None}}, namespace='urn:brocade.com:mgmt:brocade-hardware', defining_module='brocade-hardware', yang_type='container', is_config=True)""", }) self.__hardware = t if hasattr(self, '_set'): self._set()
def function[_set_hardware, parameter[self, v, load]]: constant[ Setter method for hardware, mapped from YANG variable /hardware (container) If this variable is read-only (config: false) in the source YANG file, then _set_hardware is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_hardware() directly. YANG Description: This specifies the group of configuration/operational elements to manage the hardware chracteristics of this managed entity. ] if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:] variable[v] assign[=] call[name[v]._utype, parameter[name[v]]] <ast.Try object at 0x7da18bcca140> name[self].__hardware assign[=] name[t] if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:] call[name[self]._set, parameter[]]
keyword[def] identifier[_set_hardware] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ): literal[string] keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ): identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] ) keyword[try] : identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[hardware] . identifier[hardware] , identifier[is_container] = literal[string] , identifier[presence] = keyword[False] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[True] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] }}, identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[True] ) keyword[except] ( identifier[TypeError] , identifier[ValueError] ): keyword[raise] identifier[ValueError] ({ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , }) identifier[self] . identifier[__hardware] = identifier[t] keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ): identifier[self] . identifier[_set] ()
def _set_hardware(self, v, load=False): """ Setter method for hardware, mapped from YANG variable /hardware (container) If this variable is read-only (config: false) in the source YANG file, then _set_hardware is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_hardware() directly. YANG Description: This specifies the group of configuration/operational elements to manage the hardware chracteristics of this managed entity. """ if hasattr(v, '_utype'): v = v._utype(v) # depends on [control=['if'], data=[]] try: t = YANGDynClass(v, base=hardware.hardware, is_container='container', presence=False, yang_name='hardware', rest_name='hardware', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Hardware Management configuration', u'cli-add-mode': None, u'cli-suppress-show-path': None, u'cli-suppress-no': None, u'cli-full-command': None}}, namespace='urn:brocade.com:mgmt:brocade-hardware', defining_module='brocade-hardware', yang_type='container', is_config=True) # depends on [control=['try'], data=[]] except (TypeError, ValueError): raise ValueError({'error-string': 'hardware must be of a type compatible with container', 'defined-type': 'container', 'generated-type': 'YANGDynClass(base=hardware.hardware, is_container=\'container\', presence=False, yang_name="hardware", rest_name="hardware", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u\'tailf-common\': {u\'info\': u\'Hardware Management configuration\', u\'cli-add-mode\': None, u\'cli-suppress-show-path\': None, u\'cli-suppress-no\': None, u\'cli-full-command\': None}}, namespace=\'urn:brocade.com:mgmt:brocade-hardware\', defining_module=\'brocade-hardware\', yang_type=\'container\', is_config=True)'}) # depends on [control=['except'], data=[]] self.__hardware = t if hasattr(self, '_set'): self._set() # depends on [control=['if'], data=[]]
def _refresh_token_flow(self): """Given a refresh token, obtain a new access token.""" url = '%s%s/oauth2/token' % (self.scheme, self.host) options = { 'grant_type': 'refresh_token', 'client_id': self.options.get('client_id'), 'client_secret': self.options.get('client_secret'), 'refresh_token': self.options.get('refresh_token') } options.update({ 'verify_ssl': self.options.get('verify_ssl', True), 'proxies': self.options.get('proxies', None) }) self.token = wrapped_resource( make_request('post', url, options)) self.access_token = self.token.access_token
def function[_refresh_token_flow, parameter[self]]: constant[Given a refresh token, obtain a new access token.] variable[url] assign[=] binary_operation[constant[%s%s/oauth2/token] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b26ae8f0>, <ast.Attribute object at 0x7da1b26acc70>]]] variable[options] assign[=] dictionary[[<ast.Constant object at 0x7da1b26ae6b0>, <ast.Constant object at 0x7da1b26afee0>, <ast.Constant object at 0x7da1b26ac730>, <ast.Constant object at 0x7da1b26af550>], [<ast.Constant object at 0x7da1b26ac550>, <ast.Call object at 0x7da1b26ae8c0>, <ast.Call object at 0x7da1b26aebf0>, <ast.Call object at 0x7da1b26ae080>]] call[name[options].update, parameter[dictionary[[<ast.Constant object at 0x7da1b26af2b0>, <ast.Constant object at 0x7da1b26acf40>], [<ast.Call object at 0x7da1b26ac340>, <ast.Call object at 0x7da1b26ad510>]]]] name[self].token assign[=] call[name[wrapped_resource], parameter[call[name[make_request], parameter[constant[post], name[url], name[options]]]]] name[self].access_token assign[=] name[self].token.access_token
keyword[def] identifier[_refresh_token_flow] ( identifier[self] ): literal[string] identifier[url] = literal[string] %( identifier[self] . identifier[scheme] , identifier[self] . identifier[host] ) identifier[options] ={ literal[string] : literal[string] , literal[string] : identifier[self] . identifier[options] . identifier[get] ( literal[string] ), literal[string] : identifier[self] . identifier[options] . identifier[get] ( literal[string] ), literal[string] : identifier[self] . identifier[options] . identifier[get] ( literal[string] ) } identifier[options] . identifier[update] ({ literal[string] : identifier[self] . identifier[options] . identifier[get] ( literal[string] , keyword[True] ), literal[string] : identifier[self] . identifier[options] . identifier[get] ( literal[string] , keyword[None] ) }) identifier[self] . identifier[token] = identifier[wrapped_resource] ( identifier[make_request] ( literal[string] , identifier[url] , identifier[options] )) identifier[self] . identifier[access_token] = identifier[self] . identifier[token] . identifier[access_token]
def _refresh_token_flow(self): """Given a refresh token, obtain a new access token.""" url = '%s%s/oauth2/token' % (self.scheme, self.host) options = {'grant_type': 'refresh_token', 'client_id': self.options.get('client_id'), 'client_secret': self.options.get('client_secret'), 'refresh_token': self.options.get('refresh_token')} options.update({'verify_ssl': self.options.get('verify_ssl', True), 'proxies': self.options.get('proxies', None)}) self.token = wrapped_resource(make_request('post', url, options)) self.access_token = self.token.access_token
def generate_parameter_set_excel_file(self): """ Generate an excel file containing the parameter sets in a format you can import into SimaPro Developer. The file will be called "ParameterSet_<ModelName>_input_file.xlsx" """ parameter_sets = self.parameter_sets p_set = [] filename = "ParameterSet_{}_input_file.xlsx".format(self.name) if self.save_option == 'curdir': base_dir = os.getcwd() else: base_dir = os.path.join(storage.simapro_dir, self.name.replace(" ", "_")) if not os.path.isdir(base_dir): os.mkdir(base_dir) p_set_name = os.path.join(base_dir, filename) p = self.params for k in p.keys(): if p[k]['function'] is None: base_dict = {'id': k, 'name': p[k]['description'], 'unit': p[k]['unit']} for s in parameter_sets.keys(): base_dict[s] = parameter_sets[s][k] p_set.append(base_dict) else: pass #print("{} is determined by a function".format(p[k]['description'])) for e in self.ext_params: base_dict = {'id': '{}'.format(e['name']), 'type': 'external', 'name': e['description'], 'unit': ''} for s in parameter_sets.keys(): base_dict[s] = parameter_sets[s][e['name']] p_set.append(base_dict) df = pd.DataFrame(p_set) with pd.ExcelWriter(p_set_name, engine='xlsxwriter') as writer: ps_columns = [k for k in parameter_sets.keys()] #print (ps_columns) my_columns = ['name', 'unit', 'id'] my_columns.extend(ps_columns) #print (my_columns) #print(df) df.to_excel(writer, sheet_name=self.name, columns=my_columns, index=False, merge_cells=False) return p_set_name
def function[generate_parameter_set_excel_file, parameter[self]]: constant[ Generate an excel file containing the parameter sets in a format you can import into SimaPro Developer. The file will be called "ParameterSet_<ModelName>_input_file.xlsx" ] variable[parameter_sets] assign[=] name[self].parameter_sets variable[p_set] assign[=] list[[]] variable[filename] assign[=] call[constant[ParameterSet_{}_input_file.xlsx].format, parameter[name[self].name]] if compare[name[self].save_option equal[==] constant[curdir]] begin[:] variable[base_dir] assign[=] call[name[os].getcwd, parameter[]] if <ast.UnaryOp object at 0x7da1b2373460> begin[:] call[name[os].mkdir, parameter[name[base_dir]]] variable[p_set_name] assign[=] call[name[os].path.join, parameter[name[base_dir], name[filename]]] variable[p] assign[=] name[self].params for taget[name[k]] in starred[call[name[p].keys, parameter[]]] begin[:] if compare[call[call[name[p]][name[k]]][constant[function]] is constant[None]] begin[:] variable[base_dict] assign[=] dictionary[[<ast.Constant object at 0x7da1b244b610>, <ast.Constant object at 0x7da1b2448dc0>, <ast.Constant object at 0x7da1b244b160>], [<ast.Name object at 0x7da1b24489d0>, <ast.Subscript object at 0x7da1b24494e0>, <ast.Subscript object at 0x7da1b2448700>]] for taget[name[s]] in starred[call[name[parameter_sets].keys, parameter[]]] begin[:] call[name[base_dict]][name[s]] assign[=] call[call[name[parameter_sets]][name[s]]][name[k]] call[name[p_set].append, parameter[name[base_dict]]] for taget[name[e]] in starred[name[self].ext_params] begin[:] variable[base_dict] assign[=] dictionary[[<ast.Constant object at 0x7da1b24496c0>, <ast.Constant object at 0x7da1b244a8f0>, <ast.Constant object at 0x7da1b2448430>, <ast.Constant object at 0x7da1b2449060>], [<ast.Call object at 0x7da1b2449870>, <ast.Constant object at 0x7da1b244ae00>, <ast.Subscript object at 0x7da1b2449e10>, <ast.Constant object at 0x7da1b24482b0>]] for taget[name[s]] in starred[call[name[parameter_sets].keys, parameter[]]] begin[:] call[name[base_dict]][name[s]] assign[=] call[call[name[parameter_sets]][name[s]]][call[name[e]][constant[name]]] call[name[p_set].append, parameter[name[base_dict]]] variable[df] assign[=] call[name[pd].DataFrame, parameter[name[p_set]]] with call[name[pd].ExcelWriter, parameter[name[p_set_name]]] begin[:] variable[ps_columns] assign[=] <ast.ListComp object at 0x7da1b23e8a00> variable[my_columns] assign[=] list[[<ast.Constant object at 0x7da1b23e9c60>, <ast.Constant object at 0x7da1b23e8d60>, <ast.Constant object at 0x7da1b23ea350>]] call[name[my_columns].extend, parameter[name[ps_columns]]] call[name[df].to_excel, parameter[name[writer]]] return[name[p_set_name]]
keyword[def] identifier[generate_parameter_set_excel_file] ( identifier[self] ): literal[string] identifier[parameter_sets] = identifier[self] . identifier[parameter_sets] identifier[p_set] =[] identifier[filename] = literal[string] . identifier[format] ( identifier[self] . identifier[name] ) keyword[if] identifier[self] . identifier[save_option] == literal[string] : identifier[base_dir] = identifier[os] . identifier[getcwd] () keyword[else] : identifier[base_dir] = identifier[os] . identifier[path] . identifier[join] ( identifier[storage] . identifier[simapro_dir] , identifier[self] . identifier[name] . identifier[replace] ( literal[string] , literal[string] )) keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isdir] ( identifier[base_dir] ): identifier[os] . identifier[mkdir] ( identifier[base_dir] ) identifier[p_set_name] = identifier[os] . identifier[path] . identifier[join] ( identifier[base_dir] , identifier[filename] ) identifier[p] = identifier[self] . identifier[params] keyword[for] identifier[k] keyword[in] identifier[p] . identifier[keys] (): keyword[if] identifier[p] [ identifier[k] ][ literal[string] ] keyword[is] keyword[None] : identifier[base_dict] ={ literal[string] : identifier[k] , literal[string] : identifier[p] [ identifier[k] ][ literal[string] ], literal[string] : identifier[p] [ identifier[k] ][ literal[string] ]} keyword[for] identifier[s] keyword[in] identifier[parameter_sets] . identifier[keys] (): identifier[base_dict] [ identifier[s] ]= identifier[parameter_sets] [ identifier[s] ][ identifier[k] ] identifier[p_set] . identifier[append] ( identifier[base_dict] ) keyword[else] : keyword[pass] keyword[for] identifier[e] keyword[in] identifier[self] . identifier[ext_params] : identifier[base_dict] ={ literal[string] : literal[string] . identifier[format] ( identifier[e] [ literal[string] ]), literal[string] : literal[string] , literal[string] : identifier[e] [ literal[string] ], literal[string] : literal[string] } keyword[for] identifier[s] keyword[in] identifier[parameter_sets] . identifier[keys] (): identifier[base_dict] [ identifier[s] ]= identifier[parameter_sets] [ identifier[s] ][ identifier[e] [ literal[string] ]] identifier[p_set] . identifier[append] ( identifier[base_dict] ) identifier[df] = identifier[pd] . identifier[DataFrame] ( identifier[p_set] ) keyword[with] identifier[pd] . identifier[ExcelWriter] ( identifier[p_set_name] , identifier[engine] = literal[string] ) keyword[as] identifier[writer] : identifier[ps_columns] =[ identifier[k] keyword[for] identifier[k] keyword[in] identifier[parameter_sets] . identifier[keys] ()] identifier[my_columns] =[ literal[string] , literal[string] , literal[string] ] identifier[my_columns] . identifier[extend] ( identifier[ps_columns] ) identifier[df] . identifier[to_excel] ( identifier[writer] , identifier[sheet_name] = identifier[self] . identifier[name] , identifier[columns] = identifier[my_columns] , identifier[index] = keyword[False] , identifier[merge_cells] = keyword[False] ) keyword[return] identifier[p_set_name]
def generate_parameter_set_excel_file(self): """ Generate an excel file containing the parameter sets in a format you can import into SimaPro Developer. The file will be called "ParameterSet_<ModelName>_input_file.xlsx" """ parameter_sets = self.parameter_sets p_set = [] filename = 'ParameterSet_{}_input_file.xlsx'.format(self.name) if self.save_option == 'curdir': base_dir = os.getcwd() # depends on [control=['if'], data=[]] else: base_dir = os.path.join(storage.simapro_dir, self.name.replace(' ', '_')) if not os.path.isdir(base_dir): os.mkdir(base_dir) # depends on [control=['if'], data=[]] p_set_name = os.path.join(base_dir, filename) p = self.params for k in p.keys(): if p[k]['function'] is None: base_dict = {'id': k, 'name': p[k]['description'], 'unit': p[k]['unit']} for s in parameter_sets.keys(): base_dict[s] = parameter_sets[s][k] # depends on [control=['for'], data=['s']] p_set.append(base_dict) # depends on [control=['if'], data=[]] else: pass # depends on [control=['for'], data=['k']] #print("{} is determined by a function".format(p[k]['description'])) for e in self.ext_params: base_dict = {'id': '{}'.format(e['name']), 'type': 'external', 'name': e['description'], 'unit': ''} for s in parameter_sets.keys(): base_dict[s] = parameter_sets[s][e['name']] # depends on [control=['for'], data=['s']] p_set.append(base_dict) # depends on [control=['for'], data=['e']] df = pd.DataFrame(p_set) with pd.ExcelWriter(p_set_name, engine='xlsxwriter') as writer: ps_columns = [k for k in parameter_sets.keys()] #print (ps_columns) my_columns = ['name', 'unit', 'id'] my_columns.extend(ps_columns) #print (my_columns) #print(df) df.to_excel(writer, sheet_name=self.name, columns=my_columns, index=False, merge_cells=False) # depends on [control=['with'], data=['writer']] return p_set_name
def pdf(self, x_data, step=1e-7): """ Probability density function. If possible the density will be calculated analytically. If not possible, it will be approximated by approximating the one-dimensional derivative of the forward Rosenblatt transformation and multiplying the component parts. Note that even if the distribution is multivariate, each component of the Rosenblatt is one-dimensional. Args: x_data (numpy.ndarray): Location for the density function. ``x_data.shape`` must be compatible with distribution shape. step (float, numpy.ndarray): If approximation is used, the step length given in the approximation of the derivative. If array provided, elements are used along each axis. Returns: (numpy.ndarray): Evaluated density function values. Shapes are related through the identity ``x_data.shape == dist.shape+out.shape``. """ x_data = numpy.asfarray(x_data) shape = x_data.shape x_data = x_data.reshape(len(self), -1) lower, upper = evaluation.evaluate_bound(self, x_data) f_data = numpy.zeros(x_data.shape) indices = (x_data <= upper) & (x_data >= lower) f_data[indices] = evaluation.evaluate_density(self, x_data)[indices] f_data = f_data.reshape(shape) if len(self) > 1: f_data = numpy.prod(f_data, 0) return f_data
def function[pdf, parameter[self, x_data, step]]: constant[ Probability density function. If possible the density will be calculated analytically. If not possible, it will be approximated by approximating the one-dimensional derivative of the forward Rosenblatt transformation and multiplying the component parts. Note that even if the distribution is multivariate, each component of the Rosenblatt is one-dimensional. Args: x_data (numpy.ndarray): Location for the density function. ``x_data.shape`` must be compatible with distribution shape. step (float, numpy.ndarray): If approximation is used, the step length given in the approximation of the derivative. If array provided, elements are used along each axis. Returns: (numpy.ndarray): Evaluated density function values. Shapes are related through the identity ``x_data.shape == dist.shape+out.shape``. ] variable[x_data] assign[=] call[name[numpy].asfarray, parameter[name[x_data]]] variable[shape] assign[=] name[x_data].shape variable[x_data] assign[=] call[name[x_data].reshape, parameter[call[name[len], parameter[name[self]]], <ast.UnaryOp object at 0x7da2043467d0>]] <ast.Tuple object at 0x7da204346b00> assign[=] call[name[evaluation].evaluate_bound, parameter[name[self], name[x_data]]] variable[f_data] assign[=] call[name[numpy].zeros, parameter[name[x_data].shape]] variable[indices] assign[=] binary_operation[compare[name[x_data] less_or_equal[<=] name[upper]] <ast.BitAnd object at 0x7da2590d6b60> compare[name[x_data] greater_or_equal[>=] name[lower]]] call[name[f_data]][name[indices]] assign[=] call[call[name[evaluation].evaluate_density, parameter[name[self], name[x_data]]]][name[indices]] variable[f_data] assign[=] call[name[f_data].reshape, parameter[name[shape]]] if compare[call[name[len], parameter[name[self]]] greater[>] constant[1]] begin[:] variable[f_data] assign[=] call[name[numpy].prod, parameter[name[f_data], constant[0]]] return[name[f_data]]
keyword[def] identifier[pdf] ( identifier[self] , identifier[x_data] , identifier[step] = literal[int] ): literal[string] identifier[x_data] = identifier[numpy] . identifier[asfarray] ( identifier[x_data] ) identifier[shape] = identifier[x_data] . identifier[shape] identifier[x_data] = identifier[x_data] . identifier[reshape] ( identifier[len] ( identifier[self] ),- literal[int] ) identifier[lower] , identifier[upper] = identifier[evaluation] . identifier[evaluate_bound] ( identifier[self] , identifier[x_data] ) identifier[f_data] = identifier[numpy] . identifier[zeros] ( identifier[x_data] . identifier[shape] ) identifier[indices] =( identifier[x_data] <= identifier[upper] )&( identifier[x_data] >= identifier[lower] ) identifier[f_data] [ identifier[indices] ]= identifier[evaluation] . identifier[evaluate_density] ( identifier[self] , identifier[x_data] )[ identifier[indices] ] identifier[f_data] = identifier[f_data] . identifier[reshape] ( identifier[shape] ) keyword[if] identifier[len] ( identifier[self] )> literal[int] : identifier[f_data] = identifier[numpy] . identifier[prod] ( identifier[f_data] , literal[int] ) keyword[return] identifier[f_data]
def pdf(self, x_data, step=1e-07): """ Probability density function. If possible the density will be calculated analytically. If not possible, it will be approximated by approximating the one-dimensional derivative of the forward Rosenblatt transformation and multiplying the component parts. Note that even if the distribution is multivariate, each component of the Rosenblatt is one-dimensional. Args: x_data (numpy.ndarray): Location for the density function. ``x_data.shape`` must be compatible with distribution shape. step (float, numpy.ndarray): If approximation is used, the step length given in the approximation of the derivative. If array provided, elements are used along each axis. Returns: (numpy.ndarray): Evaluated density function values. Shapes are related through the identity ``x_data.shape == dist.shape+out.shape``. """ x_data = numpy.asfarray(x_data) shape = x_data.shape x_data = x_data.reshape(len(self), -1) (lower, upper) = evaluation.evaluate_bound(self, x_data) f_data = numpy.zeros(x_data.shape) indices = (x_data <= upper) & (x_data >= lower) f_data[indices] = evaluation.evaluate_density(self, x_data)[indices] f_data = f_data.reshape(shape) if len(self) > 1: f_data = numpy.prod(f_data, 0) # depends on [control=['if'], data=[]] return f_data
def create_contentkey_authorization_policy(access_token, content): '''Create Media Service Content Key Authorization Policy. Args: access_token (str): A valid Azure authentication token. content (str): Content Payload. Returns: HTTP response. JSON body. ''' path = '/ContentKeyAuthorizationPolicies' endpoint = ''.join([ams_rest_endpoint, path]) body = content return do_ams_post(endpoint, path, body, access_token)
def function[create_contentkey_authorization_policy, parameter[access_token, content]]: constant[Create Media Service Content Key Authorization Policy. Args: access_token (str): A valid Azure authentication token. content (str): Content Payload. Returns: HTTP response. JSON body. ] variable[path] assign[=] constant[/ContentKeyAuthorizationPolicies] variable[endpoint] assign[=] call[constant[].join, parameter[list[[<ast.Name object at 0x7da1b04ca4a0>, <ast.Name object at 0x7da1b04c8310>]]]] variable[body] assign[=] name[content] return[call[name[do_ams_post], parameter[name[endpoint], name[path], name[body], name[access_token]]]]
keyword[def] identifier[create_contentkey_authorization_policy] ( identifier[access_token] , identifier[content] ): literal[string] identifier[path] = literal[string] identifier[endpoint] = literal[string] . identifier[join] ([ identifier[ams_rest_endpoint] , identifier[path] ]) identifier[body] = identifier[content] keyword[return] identifier[do_ams_post] ( identifier[endpoint] , identifier[path] , identifier[body] , identifier[access_token] )
def create_contentkey_authorization_policy(access_token, content): """Create Media Service Content Key Authorization Policy. Args: access_token (str): A valid Azure authentication token. content (str): Content Payload. Returns: HTTP response. JSON body. """ path = '/ContentKeyAuthorizationPolicies' endpoint = ''.join([ams_rest_endpoint, path]) body = content return do_ams_post(endpoint, path, body, access_token)
def initialize_zones(self): """initialize receiver zones""" zone_list = self.location_info.get('zone_list', {'main': True}) for zone_id in zone_list: if zone_list[zone_id]: # Location setup is valid self.zones[zone_id] = Zone(self, zone_id=zone_id) else: # Location setup is not valid _LOGGER.debug("Ignoring zone: %s", zone_id)
def function[initialize_zones, parameter[self]]: constant[initialize receiver zones] variable[zone_list] assign[=] call[name[self].location_info.get, parameter[constant[zone_list], dictionary[[<ast.Constant object at 0x7da204566bc0>], [<ast.Constant object at 0x7da204566f20>]]]] for taget[name[zone_id]] in starred[name[zone_list]] begin[:] if call[name[zone_list]][name[zone_id]] begin[:] call[name[self].zones][name[zone_id]] assign[=] call[name[Zone], parameter[name[self]]]
keyword[def] identifier[initialize_zones] ( identifier[self] ): literal[string] identifier[zone_list] = identifier[self] . identifier[location_info] . identifier[get] ( literal[string] ,{ literal[string] : keyword[True] }) keyword[for] identifier[zone_id] keyword[in] identifier[zone_list] : keyword[if] identifier[zone_list] [ identifier[zone_id] ]: identifier[self] . identifier[zones] [ identifier[zone_id] ]= identifier[Zone] ( identifier[self] , identifier[zone_id] = identifier[zone_id] ) keyword[else] : identifier[_LOGGER] . identifier[debug] ( literal[string] , identifier[zone_id] )
def initialize_zones(self): """initialize receiver zones""" zone_list = self.location_info.get('zone_list', {'main': True}) for zone_id in zone_list: if zone_list[zone_id]: # Location setup is valid self.zones[zone_id] = Zone(self, zone_id=zone_id) # depends on [control=['if'], data=[]] else: # Location setup is not valid _LOGGER.debug('Ignoring zone: %s', zone_id) # depends on [control=['for'], data=['zone_id']]
def delete_selection(self, selection=None): """Deletes selection, marks content as changed If selection is None then the current grid selection is used. Parameters ---------- selection: Selection, defaults to None \tSelection that shall be deleted """ # Mark content as changed post_command_event(self.main_window, self.ContentChangedMsg) if selection is None: selection = self.get_selection() current_table = self.grid.current_table for row, col, tab in self.grid.code_array.dict_grid.keys(): if tab == current_table and (row, col) in selection: self.grid.actions.delete_cell((row, col, tab)) self.grid.code_array.result_cache.clear()
def function[delete_selection, parameter[self, selection]]: constant[Deletes selection, marks content as changed If selection is None then the current grid selection is used. Parameters ---------- selection: Selection, defaults to None Selection that shall be deleted ] call[name[post_command_event], parameter[name[self].main_window, name[self].ContentChangedMsg]] if compare[name[selection] is constant[None]] begin[:] variable[selection] assign[=] call[name[self].get_selection, parameter[]] variable[current_table] assign[=] name[self].grid.current_table for taget[tuple[[<ast.Name object at 0x7da1b15037f0>, <ast.Name object at 0x7da1b1501720>, <ast.Name object at 0x7da1b15013c0>]]] in starred[call[name[self].grid.code_array.dict_grid.keys, parameter[]]] begin[:] if <ast.BoolOp object at 0x7da1b1503d90> begin[:] call[name[self].grid.actions.delete_cell, parameter[tuple[[<ast.Name object at 0x7da1b1502d70>, <ast.Name object at 0x7da1b1502ec0>, <ast.Name object at 0x7da1b1501f60>]]]] call[name[self].grid.code_array.result_cache.clear, parameter[]]
keyword[def] identifier[delete_selection] ( identifier[self] , identifier[selection] = keyword[None] ): literal[string] identifier[post_command_event] ( identifier[self] . identifier[main_window] , identifier[self] . identifier[ContentChangedMsg] ) keyword[if] identifier[selection] keyword[is] keyword[None] : identifier[selection] = identifier[self] . identifier[get_selection] () identifier[current_table] = identifier[self] . identifier[grid] . identifier[current_table] keyword[for] identifier[row] , identifier[col] , identifier[tab] keyword[in] identifier[self] . identifier[grid] . identifier[code_array] . identifier[dict_grid] . identifier[keys] (): keyword[if] identifier[tab] == identifier[current_table] keyword[and] ( identifier[row] , identifier[col] ) keyword[in] identifier[selection] : identifier[self] . identifier[grid] . identifier[actions] . identifier[delete_cell] (( identifier[row] , identifier[col] , identifier[tab] )) identifier[self] . identifier[grid] . identifier[code_array] . identifier[result_cache] . identifier[clear] ()
def delete_selection(self, selection=None): """Deletes selection, marks content as changed If selection is None then the current grid selection is used. Parameters ---------- selection: Selection, defaults to None Selection that shall be deleted """ # Mark content as changed post_command_event(self.main_window, self.ContentChangedMsg) if selection is None: selection = self.get_selection() # depends on [control=['if'], data=['selection']] current_table = self.grid.current_table for (row, col, tab) in self.grid.code_array.dict_grid.keys(): if tab == current_table and (row, col) in selection: self.grid.actions.delete_cell((row, col, tab)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] self.grid.code_array.result_cache.clear()
def _add_discovery_config(self): """Add the Discovery configuration to our list of configs. This should only be called with self._config_lock. The code here assumes the lock is held. """ lookup_key = (discovery_service.DiscoveryService.API_CONFIG['name'], discovery_service.DiscoveryService.API_CONFIG['version']) self._configs[lookup_key] = discovery_service.DiscoveryService.API_CONFIG
def function[_add_discovery_config, parameter[self]]: constant[Add the Discovery configuration to our list of configs. This should only be called with self._config_lock. The code here assumes the lock is held. ] variable[lookup_key] assign[=] tuple[[<ast.Subscript object at 0x7da1b0ec1bd0>, <ast.Subscript object at 0x7da1b0ec1c00>]] call[name[self]._configs][name[lookup_key]] assign[=] name[discovery_service].DiscoveryService.API_CONFIG
keyword[def] identifier[_add_discovery_config] ( identifier[self] ): literal[string] identifier[lookup_key] =( identifier[discovery_service] . identifier[DiscoveryService] . identifier[API_CONFIG] [ literal[string] ], identifier[discovery_service] . identifier[DiscoveryService] . identifier[API_CONFIG] [ literal[string] ]) identifier[self] . identifier[_configs] [ identifier[lookup_key] ]= identifier[discovery_service] . identifier[DiscoveryService] . identifier[API_CONFIG]
def _add_discovery_config(self): """Add the Discovery configuration to our list of configs. This should only be called with self._config_lock. The code here assumes the lock is held. """ lookup_key = (discovery_service.DiscoveryService.API_CONFIG['name'], discovery_service.DiscoveryService.API_CONFIG['version']) self._configs[lookup_key] = discovery_service.DiscoveryService.API_CONFIG
def _getDistances(self, inputPattern, partitionId=None): """Return the distances from inputPattern to all stored patterns. :param inputPattern The pattern from which distances to all other patterns are returned :param partitionId If provided, ignore all training vectors with this partitionId. """ if not self._finishedLearning: self.finishLearning() self._finishedLearning = True if self._vt is not None and len(self._vt) > 0: inputPattern = numpy.dot(self._vt, inputPattern - self._mean) sparseInput = self._sparsifyVector(inputPattern) # Compute distances dist = self._calcDistance(sparseInput) # Invalidate results where category is -1 if self._specificIndexTraining: dist[numpy.array(self._categoryList) == -1] = numpy.inf # Ignore vectors with this partition id by setting their distances to inf if partitionId is not None: dist[self._partitionIdMap.get(partitionId, [])] = numpy.inf return dist
def function[_getDistances, parameter[self, inputPattern, partitionId]]: constant[Return the distances from inputPattern to all stored patterns. :param inputPattern The pattern from which distances to all other patterns are returned :param partitionId If provided, ignore all training vectors with this partitionId. ] if <ast.UnaryOp object at 0x7da20c7cbeb0> begin[:] call[name[self].finishLearning, parameter[]] name[self]._finishedLearning assign[=] constant[True] if <ast.BoolOp object at 0x7da20c7c8d60> begin[:] variable[inputPattern] assign[=] call[name[numpy].dot, parameter[name[self]._vt, binary_operation[name[inputPattern] - name[self]._mean]]] variable[sparseInput] assign[=] call[name[self]._sparsifyVector, parameter[name[inputPattern]]] variable[dist] assign[=] call[name[self]._calcDistance, parameter[name[sparseInput]]] if name[self]._specificIndexTraining begin[:] call[name[dist]][compare[call[name[numpy].array, parameter[name[self]._categoryList]] equal[==] <ast.UnaryOp object at 0x7da18f09eb90>]] assign[=] name[numpy].inf if compare[name[partitionId] is_not constant[None]] begin[:] call[name[dist]][call[name[self]._partitionIdMap.get, parameter[name[partitionId], list[[]]]]] assign[=] name[numpy].inf return[name[dist]]
keyword[def] identifier[_getDistances] ( identifier[self] , identifier[inputPattern] , identifier[partitionId] = keyword[None] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[_finishedLearning] : identifier[self] . identifier[finishLearning] () identifier[self] . identifier[_finishedLearning] = keyword[True] keyword[if] identifier[self] . identifier[_vt] keyword[is] keyword[not] keyword[None] keyword[and] identifier[len] ( identifier[self] . identifier[_vt] )> literal[int] : identifier[inputPattern] = identifier[numpy] . identifier[dot] ( identifier[self] . identifier[_vt] , identifier[inputPattern] - identifier[self] . identifier[_mean] ) identifier[sparseInput] = identifier[self] . identifier[_sparsifyVector] ( identifier[inputPattern] ) identifier[dist] = identifier[self] . identifier[_calcDistance] ( identifier[sparseInput] ) keyword[if] identifier[self] . identifier[_specificIndexTraining] : identifier[dist] [ identifier[numpy] . identifier[array] ( identifier[self] . identifier[_categoryList] )==- literal[int] ]= identifier[numpy] . identifier[inf] keyword[if] identifier[partitionId] keyword[is] keyword[not] keyword[None] : identifier[dist] [ identifier[self] . identifier[_partitionIdMap] . identifier[get] ( identifier[partitionId] ,[])]= identifier[numpy] . identifier[inf] keyword[return] identifier[dist]
def _getDistances(self, inputPattern, partitionId=None): """Return the distances from inputPattern to all stored patterns. :param inputPattern The pattern from which distances to all other patterns are returned :param partitionId If provided, ignore all training vectors with this partitionId. """ if not self._finishedLearning: self.finishLearning() self._finishedLearning = True # depends on [control=['if'], data=[]] if self._vt is not None and len(self._vt) > 0: inputPattern = numpy.dot(self._vt, inputPattern - self._mean) # depends on [control=['if'], data=[]] sparseInput = self._sparsifyVector(inputPattern) # Compute distances dist = self._calcDistance(sparseInput) # Invalidate results where category is -1 if self._specificIndexTraining: dist[numpy.array(self._categoryList) == -1] = numpy.inf # depends on [control=['if'], data=[]] # Ignore vectors with this partition id by setting their distances to inf if partitionId is not None: dist[self._partitionIdMap.get(partitionId, [])] = numpy.inf # depends on [control=['if'], data=['partitionId']] return dist
def OSLibraries(self): """ Microsoft Windows SDK Libraries """ if self.vc_ver <= 10.0: arch_subdir = self.pi.target_dir(hidex86=True, x64=True) return [os.path.join(self.si.WindowsSdkDir, 'Lib%s' % arch_subdir)] else: arch_subdir = self.pi.target_dir(x64=True) lib = os.path.join(self.si.WindowsSdkDir, 'lib') libver = self._sdk_subdir return [os.path.join(lib, '%sum%s' % (libver , arch_subdir))]
def function[OSLibraries, parameter[self]]: constant[ Microsoft Windows SDK Libraries ] if compare[name[self].vc_ver less_or_equal[<=] constant[10.0]] begin[:] variable[arch_subdir] assign[=] call[name[self].pi.target_dir, parameter[]] return[list[[<ast.Call object at 0x7da1b1bee590>]]]
keyword[def] identifier[OSLibraries] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[vc_ver] <= literal[int] : identifier[arch_subdir] = identifier[self] . identifier[pi] . identifier[target_dir] ( identifier[hidex86] = keyword[True] , identifier[x64] = keyword[True] ) keyword[return] [ identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[si] . identifier[WindowsSdkDir] , literal[string] % identifier[arch_subdir] )] keyword[else] : identifier[arch_subdir] = identifier[self] . identifier[pi] . identifier[target_dir] ( identifier[x64] = keyword[True] ) identifier[lib] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[si] . identifier[WindowsSdkDir] , literal[string] ) identifier[libver] = identifier[self] . identifier[_sdk_subdir] keyword[return] [ identifier[os] . identifier[path] . identifier[join] ( identifier[lib] , literal[string] %( identifier[libver] , identifier[arch_subdir] ))]
def OSLibraries(self): """ Microsoft Windows SDK Libraries """ if self.vc_ver <= 10.0: arch_subdir = self.pi.target_dir(hidex86=True, x64=True) return [os.path.join(self.si.WindowsSdkDir, 'Lib%s' % arch_subdir)] # depends on [control=['if'], data=[]] else: arch_subdir = self.pi.target_dir(x64=True) lib = os.path.join(self.si.WindowsSdkDir, 'lib') libver = self._sdk_subdir return [os.path.join(lib, '%sum%s' % (libver, arch_subdir))]
def check_temperature_sensors(): """ Check all temperature sensors of the server All sensors with the value or threshold is -99 or 0 are ignored """ # walk all temperature sensor values and thresholds env_temp = walk_data(sess, oid_env_temp, helper)[0] env_temp_thresh = walk_data(sess, oid_env_temp_thres, helper)[0] env_temp_zipped = zip(env_temp, env_temp_thresh) for x, data in enumerate(env_temp_zipped, 1): # skip the check if -99 or 0 is in the value or threshold, because these data we can not use if '-99' not in data and '0' not in data: #check if the value is over the treshold if int(data[0]) > int(data[1]): helper.add_summary('Temperature at sensor %d above threshold (%s / %s)' % (x, data[0], data[1])) helper.status(critical) # always add the sensor to the output helper.add_long_output('Temperature %d: %s Celsius (threshold: %s Celsius)' % (x, data[0], data[1])) # for the first sensor (envirnoment temperature, we add performance data) if x == 1: helper.add_metric("Environment Temperature", data[0], '', ":" + data[1], "", "", "Celsius")
def function[check_temperature_sensors, parameter[]]: constant[ Check all temperature sensors of the server All sensors with the value or threshold is -99 or 0 are ignored ] variable[env_temp] assign[=] call[call[name[walk_data], parameter[name[sess], name[oid_env_temp], name[helper]]]][constant[0]] variable[env_temp_thresh] assign[=] call[call[name[walk_data], parameter[name[sess], name[oid_env_temp_thres], name[helper]]]][constant[0]] variable[env_temp_zipped] assign[=] call[name[zip], parameter[name[env_temp], name[env_temp_thresh]]] for taget[tuple[[<ast.Name object at 0x7da1b1b026e0>, <ast.Name object at 0x7da1b1b00760>]]] in starred[call[name[enumerate], parameter[name[env_temp_zipped], constant[1]]]] begin[:] if <ast.BoolOp object at 0x7da1b1b02800> begin[:] if compare[call[name[int], parameter[call[name[data]][constant[0]]]] greater[>] call[name[int], parameter[call[name[data]][constant[1]]]]] begin[:] call[name[helper].add_summary, parameter[binary_operation[constant[Temperature at sensor %d above threshold (%s / %s)] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b1b013f0>, <ast.Subscript object at 0x7da1b1b01240>, <ast.Subscript object at 0x7da1b1b019c0>]]]]] call[name[helper].status, parameter[name[critical]]] call[name[helper].add_long_output, parameter[binary_operation[constant[Temperature %d: %s Celsius (threshold: %s Celsius)] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b1b01c00>, <ast.Subscript object at 0x7da1b1b015d0>, <ast.Subscript object at 0x7da1b1b01e10>]]]]] if compare[name[x] equal[==] constant[1]] begin[:] call[name[helper].add_metric, parameter[constant[Environment Temperature], call[name[data]][constant[0]], constant[], binary_operation[constant[:] + call[name[data]][constant[1]]], constant[], constant[], constant[Celsius]]]
keyword[def] identifier[check_temperature_sensors] (): literal[string] identifier[env_temp] = identifier[walk_data] ( identifier[sess] , identifier[oid_env_temp] , identifier[helper] )[ literal[int] ] identifier[env_temp_thresh] = identifier[walk_data] ( identifier[sess] , identifier[oid_env_temp_thres] , identifier[helper] )[ literal[int] ] identifier[env_temp_zipped] = identifier[zip] ( identifier[env_temp] , identifier[env_temp_thresh] ) keyword[for] identifier[x] , identifier[data] keyword[in] identifier[enumerate] ( identifier[env_temp_zipped] , literal[int] ): keyword[if] literal[string] keyword[not] keyword[in] identifier[data] keyword[and] literal[string] keyword[not] keyword[in] identifier[data] : keyword[if] identifier[int] ( identifier[data] [ literal[int] ])> identifier[int] ( identifier[data] [ literal[int] ]): identifier[helper] . identifier[add_summary] ( literal[string] %( identifier[x] , identifier[data] [ literal[int] ], identifier[data] [ literal[int] ])) identifier[helper] . identifier[status] ( identifier[critical] ) identifier[helper] . identifier[add_long_output] ( literal[string] %( identifier[x] , identifier[data] [ literal[int] ], identifier[data] [ literal[int] ])) keyword[if] identifier[x] == literal[int] : identifier[helper] . identifier[add_metric] ( literal[string] , identifier[data] [ literal[int] ], literal[string] , literal[string] + identifier[data] [ literal[int] ], literal[string] , literal[string] , literal[string] )
def check_temperature_sensors(): """ Check all temperature sensors of the server All sensors with the value or threshold is -99 or 0 are ignored """ # walk all temperature sensor values and thresholds env_temp = walk_data(sess, oid_env_temp, helper)[0] env_temp_thresh = walk_data(sess, oid_env_temp_thres, helper)[0] env_temp_zipped = zip(env_temp, env_temp_thresh) for (x, data) in enumerate(env_temp_zipped, 1): # skip the check if -99 or 0 is in the value or threshold, because these data we can not use if '-99' not in data and '0' not in data: #check if the value is over the treshold if int(data[0]) > int(data[1]): helper.add_summary('Temperature at sensor %d above threshold (%s / %s)' % (x, data[0], data[1])) helper.status(critical) # depends on [control=['if'], data=[]] # always add the sensor to the output helper.add_long_output('Temperature %d: %s Celsius (threshold: %s Celsius)' % (x, data[0], data[1])) # for the first sensor (envirnoment temperature, we add performance data) if x == 1: helper.add_metric('Environment Temperature', data[0], '', ':' + data[1], '', '', 'Celsius') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
def send(self, stream=False): """Send the HTTP request via Python Requests modules. This method will send the request to the remote endpoint. It will try to handle temporary communications issues by retrying the request automatically. Args: stream (bool): Boolean to enable stream download. Returns: Requests.Response: The Request response """ # # api request (gracefully handle temporary communications issues with the API) # try: response = self.session.request( self._http_method, self._url, auth=self._basic_auth, data=self._body, files=self._files, headers=self._headers, params=self._payload, stream=stream, timeout=self._timeout, ) except Exception as e: err = 'Failed making HTTP request ({}).'.format(e) raise RuntimeError(err) # self.tcex.log.info(u'URL ({}): {}'.format(self._http_method, response.url)) self.tcex.log.info(u'Status Code: {}'.format(response.status_code)) return response
def function[send, parameter[self, stream]]: constant[Send the HTTP request via Python Requests modules. This method will send the request to the remote endpoint. It will try to handle temporary communications issues by retrying the request automatically. Args: stream (bool): Boolean to enable stream download. Returns: Requests.Response: The Request response ] <ast.Try object at 0x7da18fe91ab0> call[name[self].tcex.log.info, parameter[call[constant[Status Code: {}].format, parameter[name[response].status_code]]]] return[name[response]]
keyword[def] identifier[send] ( identifier[self] , identifier[stream] = keyword[False] ): literal[string] keyword[try] : identifier[response] = identifier[self] . identifier[session] . identifier[request] ( identifier[self] . identifier[_http_method] , identifier[self] . identifier[_url] , identifier[auth] = identifier[self] . identifier[_basic_auth] , identifier[data] = identifier[self] . identifier[_body] , identifier[files] = identifier[self] . identifier[_files] , identifier[headers] = identifier[self] . identifier[_headers] , identifier[params] = identifier[self] . identifier[_payload] , identifier[stream] = identifier[stream] , identifier[timeout] = identifier[self] . identifier[_timeout] , ) keyword[except] identifier[Exception] keyword[as] identifier[e] : identifier[err] = literal[string] . identifier[format] ( identifier[e] ) keyword[raise] identifier[RuntimeError] ( identifier[err] ) identifier[self] . identifier[tcex] . identifier[log] . identifier[info] ( literal[string] . identifier[format] ( identifier[response] . identifier[status_code] )) keyword[return] identifier[response]
def send(self, stream=False): """Send the HTTP request via Python Requests modules. This method will send the request to the remote endpoint. It will try to handle temporary communications issues by retrying the request automatically. Args: stream (bool): Boolean to enable stream download. Returns: Requests.Response: The Request response """ # # api request (gracefully handle temporary communications issues with the API) # try: response = self.session.request(self._http_method, self._url, auth=self._basic_auth, data=self._body, files=self._files, headers=self._headers, params=self._payload, stream=stream, timeout=self._timeout) # depends on [control=['try'], data=[]] except Exception as e: err = 'Failed making HTTP request ({}).'.format(e) raise RuntimeError(err) # depends on [control=['except'], data=['e']] # self.tcex.log.info(u'URL ({}): {}'.format(self._http_method, response.url)) self.tcex.log.info(u'Status Code: {}'.format(response.status_code)) return response
def formatted(self, include_server_time): """ Return a formatted one-line string with the statistics values for the operation for which this statistics object maintains data. This is a low-level method that is called by :meth:`pywbem.Statistics.formatted`. """ if include_server_time: # pylint: disable=no-else-return return ('{0:5d} {1:5d} ' '{2:7.3f} {3:7.3f} {4:7.3f} ' '{5:7.3f} {6:7.3f} {7:7.3f} ' '{8:6.0f} {9:6.0f} {10:6.0f} ' '{11:8.0f} {12:8.0f} {13:8.0f} {14}\n'. format(self.count, self.exception_count, self.avg_time, self.min_time, self.max_time, self.avg_server_time, self.min_server_time, self.max_server_time, self.avg_request_len, self.min_request_len, self.max_request_len, self.avg_reply_len, self.min_reply_len, self.max_reply_len, self.name)) else: return ('{0:5d} {1:5d} ' '{2:7.3f} {3:7.3f} {4:7.3f} ' '{5:6.0f} {6:6.0f} {7:6.0f} ' '{8:6.0f} {9:8.0f} {10:8.0f} {11}\n'. format(self.count, self.exception_count, self.avg_time, self.min_time, self.max_time, self.avg_request_len, self.min_request_len, self.max_request_len, self.avg_reply_len, self.min_reply_len, self.max_reply_len, self.name))
def function[formatted, parameter[self, include_server_time]]: constant[ Return a formatted one-line string with the statistics values for the operation for which this statistics object maintains data. This is a low-level method that is called by :meth:`pywbem.Statistics.formatted`. ] if name[include_server_time] begin[:] return[call[constant[{0:5d} {1:5d} {2:7.3f} {3:7.3f} {4:7.3f} {5:7.3f} {6:7.3f} {7:7.3f} {8:6.0f} {9:6.0f} {10:6.0f} {11:8.0f} {12:8.0f} {13:8.0f} {14} ].format, parameter[name[self].count, name[self].exception_count, name[self].avg_time, name[self].min_time, name[self].max_time, name[self].avg_server_time, name[self].min_server_time, name[self].max_server_time, name[self].avg_request_len, name[self].min_request_len, name[self].max_request_len, name[self].avg_reply_len, name[self].min_reply_len, name[self].max_reply_len, name[self].name]]]
keyword[def] identifier[formatted] ( identifier[self] , identifier[include_server_time] ): literal[string] keyword[if] identifier[include_server_time] : keyword[return] ( literal[string] literal[string] literal[string] literal[string] literal[string] . identifier[format] ( identifier[self] . identifier[count] , identifier[self] . identifier[exception_count] , identifier[self] . identifier[avg_time] , identifier[self] . identifier[min_time] , identifier[self] . identifier[max_time] , identifier[self] . identifier[avg_server_time] , identifier[self] . identifier[min_server_time] , identifier[self] . identifier[max_server_time] , identifier[self] . identifier[avg_request_len] , identifier[self] . identifier[min_request_len] , identifier[self] . identifier[max_request_len] , identifier[self] . identifier[avg_reply_len] , identifier[self] . identifier[min_reply_len] , identifier[self] . identifier[max_reply_len] , identifier[self] . identifier[name] )) keyword[else] : keyword[return] ( literal[string] literal[string] literal[string] literal[string] . identifier[format] ( identifier[self] . identifier[count] , identifier[self] . identifier[exception_count] , identifier[self] . identifier[avg_time] , identifier[self] . identifier[min_time] , identifier[self] . identifier[max_time] , identifier[self] . identifier[avg_request_len] , identifier[self] . identifier[min_request_len] , identifier[self] . identifier[max_request_len] , identifier[self] . identifier[avg_reply_len] , identifier[self] . identifier[min_reply_len] , identifier[self] . identifier[max_reply_len] , identifier[self] . identifier[name] ))
def formatted(self, include_server_time): """ Return a formatted one-line string with the statistics values for the operation for which this statistics object maintains data. This is a low-level method that is called by :meth:`pywbem.Statistics.formatted`. """ if include_server_time: # pylint: disable=no-else-return return '{0:5d} {1:5d} {2:7.3f} {3:7.3f} {4:7.3f} {5:7.3f} {6:7.3f} {7:7.3f} {8:6.0f} {9:6.0f} {10:6.0f} {11:8.0f} {12:8.0f} {13:8.0f} {14}\n'.format(self.count, self.exception_count, self.avg_time, self.min_time, self.max_time, self.avg_server_time, self.min_server_time, self.max_server_time, self.avg_request_len, self.min_request_len, self.max_request_len, self.avg_reply_len, self.min_reply_len, self.max_reply_len, self.name) # depends on [control=['if'], data=[]] else: return '{0:5d} {1:5d} {2:7.3f} {3:7.3f} {4:7.3f} {5:6.0f} {6:6.0f} {7:6.0f} {8:6.0f} {9:8.0f} {10:8.0f} {11}\n'.format(self.count, self.exception_count, self.avg_time, self.min_time, self.max_time, self.avg_request_len, self.min_request_len, self.max_request_len, self.avg_reply_len, self.min_reply_len, self.max_reply_len, self.name)
def check(text): """Check the text.""" err = "misc.currency" msg = u"Incorrect use of symbols in {}." symbols = [ "\$[\d]* ?(?:dollars|usd|us dollars)" ] return existence_check(text, symbols, err, msg)
def function[check, parameter[text]]: constant[Check the text.] variable[err] assign[=] constant[misc.currency] variable[msg] assign[=] constant[Incorrect use of symbols in {}.] variable[symbols] assign[=] list[[<ast.Constant object at 0x7da207f00af0>]] return[call[name[existence_check], parameter[name[text], name[symbols], name[err], name[msg]]]]
keyword[def] identifier[check] ( identifier[text] ): literal[string] identifier[err] = literal[string] identifier[msg] = literal[string] identifier[symbols] =[ literal[string] ] keyword[return] identifier[existence_check] ( identifier[text] , identifier[symbols] , identifier[err] , identifier[msg] )
def check(text): """Check the text.""" err = 'misc.currency' msg = u'Incorrect use of symbols in {}.' symbols = ['\\$[\\d]* ?(?:dollars|usd|us dollars)'] return existence_check(text, symbols, err, msg)
def main(): """ Main function """ ctx = {} def pretty_json(data): return json.dumps(data, indent=2, sort_keys=True) client = server.create_app().test_client() host = 'example.com:9984' # HTTP Index res = client.get('/', environ_overrides={'HTTP_HOST': host}) res_data = json.loads(res.data.decode()) ctx['index'] = pretty_json(res_data) # API index res = client.get('/api/v1/', environ_overrides={'HTTP_HOST': host}) ctx['api_index'] = pretty_json(json.loads(res.data.decode())) # tx create privkey = 'CfdqtD7sS7FgkMoGPXw55MVGGFwQLAoHYTcBhZDtF99Z' pubkey = '4K9sWUMFwTgaDGPfdynrbxWqWS6sWmKbZoTjxLtVUibD' asset = {'msg': 'Hello BigchainDB!'} tx = Transaction.create([pubkey], [([pubkey], 1)], asset=asset, metadata={'sequence': 0}) tx = tx.sign([privkey]) ctx['tx'] = pretty_json(tx.to_dict()) ctx['public_keys'] = tx.outputs[0].public_keys[0] ctx['txid'] = tx.id # tx transfer privkey_transfer = '3AeWpPdhEZzWLYfkfYHBfMFC2r1f8HEaGS9NtbbKssya' pubkey_transfer = '3yfQPHeWAa1MxTX9Zf9176QqcpcnWcanVZZbaHb8B3h9' cid = 0 input_ = Input(fulfillment=tx.outputs[cid].fulfillment, fulfills=TransactionLink(txid=tx.id, output=cid), owners_before=tx.outputs[cid].public_keys) tx_transfer = Transaction.transfer([input_], [([pubkey_transfer], 1)], asset_id=tx.id, metadata={'sequence': 1}) tx_transfer = tx_transfer.sign([privkey]) ctx['tx_transfer'] = pretty_json(tx_transfer.to_dict()) ctx['public_keys_transfer'] = tx_transfer.outputs[0].public_keys[0] ctx['tx_transfer_id'] = tx_transfer.id # privkey_transfer_last = 'sG3jWDtdTXUidBJK53ucSTrosktG616U3tQHBk81eQe' pubkey_transfer_last = '3Af3fhhjU6d9WecEM9Uw5hfom9kNEwE7YuDWdqAUssqm' cid = 0 input_ = Input(fulfillment=tx_transfer.outputs[cid].fulfillment, fulfills=TransactionLink(txid=tx_transfer.id, output=cid), owners_before=tx_transfer.outputs[cid].public_keys) tx_transfer_last = Transaction.transfer([input_], [([pubkey_transfer_last], 1)], asset_id=tx.id, metadata={'sequence': 2}) tx_transfer_last = tx_transfer_last.sign([privkey_transfer]) ctx['tx_transfer_last'] = pretty_json(tx_transfer_last.to_dict()) ctx['tx_transfer_last_id'] = tx_transfer_last.id ctx['public_keys_transfer_last'] = tx_transfer_last.outputs[0].public_keys[0] # block node_private = "5G2kE1zJAgTajkVSbPAQWo4c2izvtwqaNHYsaNpbbvxX" node_public = "DngBurxfeNVKZWCEcDnLj1eMPAS7focUZTE5FndFGuHT" signature = "53wxrEQDYk1dXzmvNSytbCfmNVnPqPkDQaTnAe8Jf43s6ssejPxezkCvUnGTnduNUmaLjhaan1iRLi3peu6s5DzA" app_hash = 'f6e0c49c6d94d6924351f25bb334cf2a99af4206339bf784e741d1a5ab599056' block = lib.Block(height=1, transactions=[tx.to_dict()], app_hash=app_hash) block_dict = block._asdict() block_dict.pop('app_hash') ctx['block'] = pretty_json(block_dict) ctx['blockid'] = block.height # block status block_list = [ block.height ] ctx['block_list'] = pretty_json(block_list) base_path = os.path.join(os.path.dirname(__file__), 'source/http-samples') if not os.path.exists(base_path): os.makedirs(base_path) for name, tpl in TPLS.items(): path = os.path.join(base_path, name + '.http') code = tpl % ctx with open(path, 'w') as handle: handle.write(code)
def function[main, parameter[]]: constant[ Main function ] variable[ctx] assign[=] dictionary[[], []] def function[pretty_json, parameter[data]]: return[call[name[json].dumps, parameter[name[data]]]] variable[client] assign[=] call[call[name[server].create_app, parameter[]].test_client, parameter[]] variable[host] assign[=] constant[example.com:9984] variable[res] assign[=] call[name[client].get, parameter[constant[/]]] variable[res_data] assign[=] call[name[json].loads, parameter[call[name[res].data.decode, parameter[]]]] call[name[ctx]][constant[index]] assign[=] call[name[pretty_json], parameter[name[res_data]]] variable[res] assign[=] call[name[client].get, parameter[constant[/api/v1/]]] call[name[ctx]][constant[api_index]] assign[=] call[name[pretty_json], parameter[call[name[json].loads, parameter[call[name[res].data.decode, parameter[]]]]]] variable[privkey] assign[=] constant[CfdqtD7sS7FgkMoGPXw55MVGGFwQLAoHYTcBhZDtF99Z] variable[pubkey] assign[=] constant[4K9sWUMFwTgaDGPfdynrbxWqWS6sWmKbZoTjxLtVUibD] variable[asset] assign[=] dictionary[[<ast.Constant object at 0x7da1b1bed7e0>], [<ast.Constant object at 0x7da1b1bedff0>]] variable[tx] assign[=] call[name[Transaction].create, parameter[list[[<ast.Name object at 0x7da1b1bee0b0>]], list[[<ast.Tuple object at 0x7da1b1bed120>]]]] variable[tx] assign[=] call[name[tx].sign, parameter[list[[<ast.Name object at 0x7da1b1bee4a0>]]]] call[name[ctx]][constant[tx]] assign[=] call[name[pretty_json], parameter[call[name[tx].to_dict, parameter[]]]] call[name[ctx]][constant[public_keys]] assign[=] call[call[name[tx].outputs][constant[0]].public_keys][constant[0]] call[name[ctx]][constant[txid]] assign[=] name[tx].id variable[privkey_transfer] assign[=] constant[3AeWpPdhEZzWLYfkfYHBfMFC2r1f8HEaGS9NtbbKssya] variable[pubkey_transfer] assign[=] constant[3yfQPHeWAa1MxTX9Zf9176QqcpcnWcanVZZbaHb8B3h9] variable[cid] assign[=] constant[0] variable[input_] assign[=] call[name[Input], parameter[]] variable[tx_transfer] assign[=] call[name[Transaction].transfer, parameter[list[[<ast.Name object at 0x7da1b1bef670>]], list[[<ast.Tuple object at 0x7da1b1becdf0>]]]] variable[tx_transfer] assign[=] call[name[tx_transfer].sign, parameter[list[[<ast.Name object at 0x7da1b1bef4f0>]]]] call[name[ctx]][constant[tx_transfer]] assign[=] call[name[pretty_json], parameter[call[name[tx_transfer].to_dict, parameter[]]]] call[name[ctx]][constant[public_keys_transfer]] assign[=] call[call[name[tx_transfer].outputs][constant[0]].public_keys][constant[0]] call[name[ctx]][constant[tx_transfer_id]] assign[=] name[tx_transfer].id variable[pubkey_transfer_last] assign[=] constant[3Af3fhhjU6d9WecEM9Uw5hfom9kNEwE7YuDWdqAUssqm] variable[cid] assign[=] constant[0] variable[input_] assign[=] call[name[Input], parameter[]] variable[tx_transfer_last] assign[=] call[name[Transaction].transfer, parameter[list[[<ast.Name object at 0x7da1b1beedd0>]], list[[<ast.Tuple object at 0x7da1b1befc70>]]]] variable[tx_transfer_last] assign[=] call[name[tx_transfer_last].sign, parameter[list[[<ast.Name object at 0x7da1b1becf70>]]]] call[name[ctx]][constant[tx_transfer_last]] assign[=] call[name[pretty_json], parameter[call[name[tx_transfer_last].to_dict, parameter[]]]] call[name[ctx]][constant[tx_transfer_last_id]] assign[=] name[tx_transfer_last].id call[name[ctx]][constant[public_keys_transfer_last]] assign[=] call[call[name[tx_transfer_last].outputs][constant[0]].public_keys][constant[0]] variable[node_private] assign[=] constant[5G2kE1zJAgTajkVSbPAQWo4c2izvtwqaNHYsaNpbbvxX] variable[node_public] assign[=] constant[DngBurxfeNVKZWCEcDnLj1eMPAS7focUZTE5FndFGuHT] variable[signature] assign[=] constant[53wxrEQDYk1dXzmvNSytbCfmNVnPqPkDQaTnAe8Jf43s6ssejPxezkCvUnGTnduNUmaLjhaan1iRLi3peu6s5DzA] variable[app_hash] assign[=] constant[f6e0c49c6d94d6924351f25bb334cf2a99af4206339bf784e741d1a5ab599056] variable[block] assign[=] call[name[lib].Block, parameter[]] variable[block_dict] assign[=] call[name[block]._asdict, parameter[]] call[name[block_dict].pop, parameter[constant[app_hash]]] call[name[ctx]][constant[block]] assign[=] call[name[pretty_json], parameter[name[block_dict]]] call[name[ctx]][constant[blockid]] assign[=] name[block].height variable[block_list] assign[=] list[[<ast.Attribute object at 0x7da1b1bfa020>]] call[name[ctx]][constant[block_list]] assign[=] call[name[pretty_json], parameter[name[block_list]]] variable[base_path] assign[=] call[name[os].path.join, parameter[call[name[os].path.dirname, parameter[name[__file__]]], constant[source/http-samples]]] if <ast.UnaryOp object at 0x7da1b1bf8220> begin[:] call[name[os].makedirs, parameter[name[base_path]]] for taget[tuple[[<ast.Name object at 0x7da1b1bfaad0>, <ast.Name object at 0x7da1b1bf98a0>]]] in starred[call[name[TPLS].items, parameter[]]] begin[:] variable[path] assign[=] call[name[os].path.join, parameter[name[base_path], binary_operation[name[name] + constant[.http]]]] variable[code] assign[=] binary_operation[name[tpl] <ast.Mod object at 0x7da2590d6920> name[ctx]] with call[name[open], parameter[name[path], constant[w]]] begin[:] call[name[handle].write, parameter[name[code]]]
keyword[def] identifier[main] (): literal[string] identifier[ctx] ={} keyword[def] identifier[pretty_json] ( identifier[data] ): keyword[return] identifier[json] . identifier[dumps] ( identifier[data] , identifier[indent] = literal[int] , identifier[sort_keys] = keyword[True] ) identifier[client] = identifier[server] . identifier[create_app] (). identifier[test_client] () identifier[host] = literal[string] identifier[res] = identifier[client] . identifier[get] ( literal[string] , identifier[environ_overrides] ={ literal[string] : identifier[host] }) identifier[res_data] = identifier[json] . identifier[loads] ( identifier[res] . identifier[data] . identifier[decode] ()) identifier[ctx] [ literal[string] ]= identifier[pretty_json] ( identifier[res_data] ) identifier[res] = identifier[client] . identifier[get] ( literal[string] , identifier[environ_overrides] ={ literal[string] : identifier[host] }) identifier[ctx] [ literal[string] ]= identifier[pretty_json] ( identifier[json] . identifier[loads] ( identifier[res] . identifier[data] . identifier[decode] ())) identifier[privkey] = literal[string] identifier[pubkey] = literal[string] identifier[asset] ={ literal[string] : literal[string] } identifier[tx] = identifier[Transaction] . identifier[create] ([ identifier[pubkey] ],[([ identifier[pubkey] ], literal[int] )], identifier[asset] = identifier[asset] , identifier[metadata] ={ literal[string] : literal[int] }) identifier[tx] = identifier[tx] . identifier[sign] ([ identifier[privkey] ]) identifier[ctx] [ literal[string] ]= identifier[pretty_json] ( identifier[tx] . identifier[to_dict] ()) identifier[ctx] [ literal[string] ]= identifier[tx] . identifier[outputs] [ literal[int] ]. identifier[public_keys] [ literal[int] ] identifier[ctx] [ literal[string] ]= identifier[tx] . identifier[id] identifier[privkey_transfer] = literal[string] identifier[pubkey_transfer] = literal[string] identifier[cid] = literal[int] identifier[input_] = identifier[Input] ( identifier[fulfillment] = identifier[tx] . identifier[outputs] [ identifier[cid] ]. identifier[fulfillment] , identifier[fulfills] = identifier[TransactionLink] ( identifier[txid] = identifier[tx] . identifier[id] , identifier[output] = identifier[cid] ), identifier[owners_before] = identifier[tx] . identifier[outputs] [ identifier[cid] ]. identifier[public_keys] ) identifier[tx_transfer] = identifier[Transaction] . identifier[transfer] ([ identifier[input_] ],[([ identifier[pubkey_transfer] ], literal[int] )], identifier[asset_id] = identifier[tx] . identifier[id] , identifier[metadata] ={ literal[string] : literal[int] }) identifier[tx_transfer] = identifier[tx_transfer] . identifier[sign] ([ identifier[privkey] ]) identifier[ctx] [ literal[string] ]= identifier[pretty_json] ( identifier[tx_transfer] . identifier[to_dict] ()) identifier[ctx] [ literal[string] ]= identifier[tx_transfer] . identifier[outputs] [ literal[int] ]. identifier[public_keys] [ literal[int] ] identifier[ctx] [ literal[string] ]= identifier[tx_transfer] . identifier[id] identifier[pubkey_transfer_last] = literal[string] identifier[cid] = literal[int] identifier[input_] = identifier[Input] ( identifier[fulfillment] = identifier[tx_transfer] . identifier[outputs] [ identifier[cid] ]. identifier[fulfillment] , identifier[fulfills] = identifier[TransactionLink] ( identifier[txid] = identifier[tx_transfer] . identifier[id] , identifier[output] = identifier[cid] ), identifier[owners_before] = identifier[tx_transfer] . identifier[outputs] [ identifier[cid] ]. identifier[public_keys] ) identifier[tx_transfer_last] = identifier[Transaction] . identifier[transfer] ([ identifier[input_] ],[([ identifier[pubkey_transfer_last] ], literal[int] )], identifier[asset_id] = identifier[tx] . identifier[id] , identifier[metadata] ={ literal[string] : literal[int] }) identifier[tx_transfer_last] = identifier[tx_transfer_last] . identifier[sign] ([ identifier[privkey_transfer] ]) identifier[ctx] [ literal[string] ]= identifier[pretty_json] ( identifier[tx_transfer_last] . identifier[to_dict] ()) identifier[ctx] [ literal[string] ]= identifier[tx_transfer_last] . identifier[id] identifier[ctx] [ literal[string] ]= identifier[tx_transfer_last] . identifier[outputs] [ literal[int] ]. identifier[public_keys] [ literal[int] ] identifier[node_private] = literal[string] identifier[node_public] = literal[string] identifier[signature] = literal[string] identifier[app_hash] = literal[string] identifier[block] = identifier[lib] . identifier[Block] ( identifier[height] = literal[int] , identifier[transactions] =[ identifier[tx] . identifier[to_dict] ()], identifier[app_hash] = identifier[app_hash] ) identifier[block_dict] = identifier[block] . identifier[_asdict] () identifier[block_dict] . identifier[pop] ( literal[string] ) identifier[ctx] [ literal[string] ]= identifier[pretty_json] ( identifier[block_dict] ) identifier[ctx] [ literal[string] ]= identifier[block] . identifier[height] identifier[block_list] =[ identifier[block] . identifier[height] ] identifier[ctx] [ literal[string] ]= identifier[pretty_json] ( identifier[block_list] ) identifier[base_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[__file__] ), literal[string] ) keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[base_path] ): identifier[os] . identifier[makedirs] ( identifier[base_path] ) keyword[for] identifier[name] , identifier[tpl] keyword[in] identifier[TPLS] . identifier[items] (): identifier[path] = identifier[os] . identifier[path] . identifier[join] ( identifier[base_path] , identifier[name] + literal[string] ) identifier[code] = identifier[tpl] % identifier[ctx] keyword[with] identifier[open] ( identifier[path] , literal[string] ) keyword[as] identifier[handle] : identifier[handle] . identifier[write] ( identifier[code] )
def main(): """ Main function """ ctx = {} def pretty_json(data): return json.dumps(data, indent=2, sort_keys=True) client = server.create_app().test_client() host = 'example.com:9984' # HTTP Index res = client.get('/', environ_overrides={'HTTP_HOST': host}) res_data = json.loads(res.data.decode()) ctx['index'] = pretty_json(res_data) # API index res = client.get('/api/v1/', environ_overrides={'HTTP_HOST': host}) ctx['api_index'] = pretty_json(json.loads(res.data.decode())) # tx create privkey = 'CfdqtD7sS7FgkMoGPXw55MVGGFwQLAoHYTcBhZDtF99Z' pubkey = '4K9sWUMFwTgaDGPfdynrbxWqWS6sWmKbZoTjxLtVUibD' asset = {'msg': 'Hello BigchainDB!'} tx = Transaction.create([pubkey], [([pubkey], 1)], asset=asset, metadata={'sequence': 0}) tx = tx.sign([privkey]) ctx['tx'] = pretty_json(tx.to_dict()) ctx['public_keys'] = tx.outputs[0].public_keys[0] ctx['txid'] = tx.id # tx transfer privkey_transfer = '3AeWpPdhEZzWLYfkfYHBfMFC2r1f8HEaGS9NtbbKssya' pubkey_transfer = '3yfQPHeWAa1MxTX9Zf9176QqcpcnWcanVZZbaHb8B3h9' cid = 0 input_ = Input(fulfillment=tx.outputs[cid].fulfillment, fulfills=TransactionLink(txid=tx.id, output=cid), owners_before=tx.outputs[cid].public_keys) tx_transfer = Transaction.transfer([input_], [([pubkey_transfer], 1)], asset_id=tx.id, metadata={'sequence': 1}) tx_transfer = tx_transfer.sign([privkey]) ctx['tx_transfer'] = pretty_json(tx_transfer.to_dict()) ctx['public_keys_transfer'] = tx_transfer.outputs[0].public_keys[0] ctx['tx_transfer_id'] = tx_transfer.id # privkey_transfer_last = 'sG3jWDtdTXUidBJK53ucSTrosktG616U3tQHBk81eQe' pubkey_transfer_last = '3Af3fhhjU6d9WecEM9Uw5hfom9kNEwE7YuDWdqAUssqm' cid = 0 input_ = Input(fulfillment=tx_transfer.outputs[cid].fulfillment, fulfills=TransactionLink(txid=tx_transfer.id, output=cid), owners_before=tx_transfer.outputs[cid].public_keys) tx_transfer_last = Transaction.transfer([input_], [([pubkey_transfer_last], 1)], asset_id=tx.id, metadata={'sequence': 2}) tx_transfer_last = tx_transfer_last.sign([privkey_transfer]) ctx['tx_transfer_last'] = pretty_json(tx_transfer_last.to_dict()) ctx['tx_transfer_last_id'] = tx_transfer_last.id ctx['public_keys_transfer_last'] = tx_transfer_last.outputs[0].public_keys[0] # block node_private = '5G2kE1zJAgTajkVSbPAQWo4c2izvtwqaNHYsaNpbbvxX' node_public = 'DngBurxfeNVKZWCEcDnLj1eMPAS7focUZTE5FndFGuHT' signature = '53wxrEQDYk1dXzmvNSytbCfmNVnPqPkDQaTnAe8Jf43s6ssejPxezkCvUnGTnduNUmaLjhaan1iRLi3peu6s5DzA' app_hash = 'f6e0c49c6d94d6924351f25bb334cf2a99af4206339bf784e741d1a5ab599056' block = lib.Block(height=1, transactions=[tx.to_dict()], app_hash=app_hash) block_dict = block._asdict() block_dict.pop('app_hash') ctx['block'] = pretty_json(block_dict) ctx['blockid'] = block.height # block status block_list = [block.height] ctx['block_list'] = pretty_json(block_list) base_path = os.path.join(os.path.dirname(__file__), 'source/http-samples') if not os.path.exists(base_path): os.makedirs(base_path) # depends on [control=['if'], data=[]] for (name, tpl) in TPLS.items(): path = os.path.join(base_path, name + '.http') code = tpl % ctx with open(path, 'w') as handle: handle.write(code) # depends on [control=['with'], data=['handle']] # depends on [control=['for'], data=[]]
def repr2(obj_, **kwargs): """ Attempt to replace repr more configurable pretty version that works the same in both 2 and 3 """ kwargs['nl'] = kwargs.pop('nl', kwargs.pop('newlines', False)) val_str = _make_valstr(**kwargs) return val_str(obj_)
def function[repr2, parameter[obj_]]: constant[ Attempt to replace repr more configurable pretty version that works the same in both 2 and 3 ] call[name[kwargs]][constant[nl]] assign[=] call[name[kwargs].pop, parameter[constant[nl], call[name[kwargs].pop, parameter[constant[newlines], constant[False]]]]] variable[val_str] assign[=] call[name[_make_valstr], parameter[]] return[call[name[val_str], parameter[name[obj_]]]]
keyword[def] identifier[repr2] ( identifier[obj_] ,** identifier[kwargs] ): literal[string] identifier[kwargs] [ literal[string] ]= identifier[kwargs] . identifier[pop] ( literal[string] , identifier[kwargs] . identifier[pop] ( literal[string] , keyword[False] )) identifier[val_str] = identifier[_make_valstr] (** identifier[kwargs] ) keyword[return] identifier[val_str] ( identifier[obj_] )
def repr2(obj_, **kwargs): """ Attempt to replace repr more configurable pretty version that works the same in both 2 and 3 """ kwargs['nl'] = kwargs.pop('nl', kwargs.pop('newlines', False)) val_str = _make_valstr(**kwargs) return val_str(obj_)
def _set_up_savefolder(self): """ Create catalogs for different file output to clean up savefolder. """ if not os.path.isdir(self.cells_path): os.mkdir(self.cells_path) if not os.path.isdir(self.figures_path): os.mkdir(self.figures_path) if not os.path.isdir(self.populations_path): os.mkdir(self.populations_path)
def function[_set_up_savefolder, parameter[self]]: constant[ Create catalogs for different file output to clean up savefolder. ] if <ast.UnaryOp object at 0x7da1b0b72d70> begin[:] call[name[os].mkdir, parameter[name[self].cells_path]] if <ast.UnaryOp object at 0x7da1b0b71cc0> begin[:] call[name[os].mkdir, parameter[name[self].figures_path]] if <ast.UnaryOp object at 0x7da1b0b71f00> begin[:] call[name[os].mkdir, parameter[name[self].populations_path]]
keyword[def] identifier[_set_up_savefolder] ( identifier[self] ): literal[string] keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isdir] ( identifier[self] . identifier[cells_path] ): identifier[os] . identifier[mkdir] ( identifier[self] . identifier[cells_path] ) keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isdir] ( identifier[self] . identifier[figures_path] ): identifier[os] . identifier[mkdir] ( identifier[self] . identifier[figures_path] ) keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isdir] ( identifier[self] . identifier[populations_path] ): identifier[os] . identifier[mkdir] ( identifier[self] . identifier[populations_path] )
def _set_up_savefolder(self): """ Create catalogs for different file output to clean up savefolder. """ if not os.path.isdir(self.cells_path): os.mkdir(self.cells_path) # depends on [control=['if'], data=[]] if not os.path.isdir(self.figures_path): os.mkdir(self.figures_path) # depends on [control=['if'], data=[]] if not os.path.isdir(self.populations_path): os.mkdir(self.populations_path) # depends on [control=['if'], data=[]]
def matrix_iter(self, scale=1, border=None): """\ Returns an iterator over the matrix which includes the border. The border is returned as sequence of light modules. Dark modules are reported as ``0x1``, light modules have the value ``0x0``. The following example converts the QR Code matrix into a list of lists which use boolean values for the modules (True = dark module, False = light module):: >>> import segno >>> qr = segno.make('The Beatles') >>> size = qr.symbol_size()[0] >>> res = [] >>> # Scaling factor 2, default border >>> for row in qr.matrix_iter(scale=2): >>> res.append([col == 0x1 for col in row]) >>> size * 2 == len(res[0]) True :param int scale: The scaling factor (default: ``1``). :param int border: The size of border / quiet zone or ``None`` to indicate the default border. :raises: :py:exc:`ValueError` if the scaling factor or the border is invalid (i.e. negative). """ return utils.matrix_iter(self.matrix, self._version, scale, border)
def function[matrix_iter, parameter[self, scale, border]]: constant[ Returns an iterator over the matrix which includes the border. The border is returned as sequence of light modules. Dark modules are reported as ``0x1``, light modules have the value ``0x0``. The following example converts the QR Code matrix into a list of lists which use boolean values for the modules (True = dark module, False = light module):: >>> import segno >>> qr = segno.make('The Beatles') >>> size = qr.symbol_size()[0] >>> res = [] >>> # Scaling factor 2, default border >>> for row in qr.matrix_iter(scale=2): >>> res.append([col == 0x1 for col in row]) >>> size * 2 == len(res[0]) True :param int scale: The scaling factor (default: ``1``). :param int border: The size of border / quiet zone or ``None`` to indicate the default border. :raises: :py:exc:`ValueError` if the scaling factor or the border is invalid (i.e. negative). ] return[call[name[utils].matrix_iter, parameter[name[self].matrix, name[self]._version, name[scale], name[border]]]]
keyword[def] identifier[matrix_iter] ( identifier[self] , identifier[scale] = literal[int] , identifier[border] = keyword[None] ): literal[string] keyword[return] identifier[utils] . identifier[matrix_iter] ( identifier[self] . identifier[matrix] , identifier[self] . identifier[_version] , identifier[scale] , identifier[border] )
def matrix_iter(self, scale=1, border=None): """ Returns an iterator over the matrix which includes the border. The border is returned as sequence of light modules. Dark modules are reported as ``0x1``, light modules have the value ``0x0``. The following example converts the QR Code matrix into a list of lists which use boolean values for the modules (True = dark module, False = light module):: >>> import segno >>> qr = segno.make('The Beatles') >>> size = qr.symbol_size()[0] >>> res = [] >>> # Scaling factor 2, default border >>> for row in qr.matrix_iter(scale=2): >>> res.append([col == 0x1 for col in row]) >>> size * 2 == len(res[0]) True :param int scale: The scaling factor (default: ``1``). :param int border: The size of border / quiet zone or ``None`` to indicate the default border. :raises: :py:exc:`ValueError` if the scaling factor or the border is invalid (i.e. negative). """ return utils.matrix_iter(self.matrix, self._version, scale, border)
def transfer_state_data(cls, source_entity, target_entity): """ Transfers instance state data from the given source entity to the given target entity. """ state_data = cls.get_state_data(source_entity) cls.set_state_data(target_entity, state_data)
def function[transfer_state_data, parameter[cls, source_entity, target_entity]]: constant[ Transfers instance state data from the given source entity to the given target entity. ] variable[state_data] assign[=] call[name[cls].get_state_data, parameter[name[source_entity]]] call[name[cls].set_state_data, parameter[name[target_entity], name[state_data]]]
keyword[def] identifier[transfer_state_data] ( identifier[cls] , identifier[source_entity] , identifier[target_entity] ): literal[string] identifier[state_data] = identifier[cls] . identifier[get_state_data] ( identifier[source_entity] ) identifier[cls] . identifier[set_state_data] ( identifier[target_entity] , identifier[state_data] )
def transfer_state_data(cls, source_entity, target_entity): """ Transfers instance state data from the given source entity to the given target entity. """ state_data = cls.get_state_data(source_entity) cls.set_state_data(target_entity, state_data)
def get_limit(self, criticity, stat_name=""): """Return the limit value for the alert.""" # Get the limit for stat + header # Exemple: network_wlan0_rx_careful try: limit = self._limits[stat_name + '_' + criticity] except KeyError: # Try fallback to plugin default limit # Exemple: network_careful limit = self._limits[self.plugin_name + '_' + criticity] # logger.debug("{} {} value is {}".format(stat_name, criticity, limit)) # Return the limiter return limit
def function[get_limit, parameter[self, criticity, stat_name]]: constant[Return the limit value for the alert.] <ast.Try object at 0x7da1b1c3d4e0> return[name[limit]]
keyword[def] identifier[get_limit] ( identifier[self] , identifier[criticity] , identifier[stat_name] = literal[string] ): literal[string] keyword[try] : identifier[limit] = identifier[self] . identifier[_limits] [ identifier[stat_name] + literal[string] + identifier[criticity] ] keyword[except] identifier[KeyError] : identifier[limit] = identifier[self] . identifier[_limits] [ identifier[self] . identifier[plugin_name] + literal[string] + identifier[criticity] ] keyword[return] identifier[limit]
def get_limit(self, criticity, stat_name=''): """Return the limit value for the alert.""" # Get the limit for stat + header # Exemple: network_wlan0_rx_careful try: limit = self._limits[stat_name + '_' + criticity] # depends on [control=['try'], data=[]] except KeyError: # Try fallback to plugin default limit # Exemple: network_careful limit = self._limits[self.plugin_name + '_' + criticity] # depends on [control=['except'], data=[]] # logger.debug("{} {} value is {}".format(stat_name, criticity, limit)) # Return the limiter return limit
def future_check_sensor(self, name, update=None): """Check if the sensor exists. Used internally by future_get_sensor. This method is aware of synchronisation in progress and if inspection of the server is allowed. Parameters ---------- name : str Name of the sensor to verify. update : bool or None, optional If a katcp request to the server should be made to check if the sensor is on the server now. Notes ----- Ensure that self.state.data_synced == True if yielding to future_check_sensor from a state-change callback, or a deadlock will occur. """ exist = False yield self.until_data_synced() if name in self._sensors_index: exist = True else: if update or (update is None and self._update_on_lookup): yield self.inspect_sensors(name) exist = yield self.future_check_sensor(name, False) raise tornado.gen.Return(exist)
def function[future_check_sensor, parameter[self, name, update]]: constant[Check if the sensor exists. Used internally by future_get_sensor. This method is aware of synchronisation in progress and if inspection of the server is allowed. Parameters ---------- name : str Name of the sensor to verify. update : bool or None, optional If a katcp request to the server should be made to check if the sensor is on the server now. Notes ----- Ensure that self.state.data_synced == True if yielding to future_check_sensor from a state-change callback, or a deadlock will occur. ] variable[exist] assign[=] constant[False] <ast.Yield object at 0x7da20c6a9b40> if compare[name[name] in name[self]._sensors_index] begin[:] variable[exist] assign[=] constant[True] <ast.Raise object at 0x7da1b0529240>
keyword[def] identifier[future_check_sensor] ( identifier[self] , identifier[name] , identifier[update] = keyword[None] ): literal[string] identifier[exist] = keyword[False] keyword[yield] identifier[self] . identifier[until_data_synced] () keyword[if] identifier[name] keyword[in] identifier[self] . identifier[_sensors_index] : identifier[exist] = keyword[True] keyword[else] : keyword[if] identifier[update] keyword[or] ( identifier[update] keyword[is] keyword[None] keyword[and] identifier[self] . identifier[_update_on_lookup] ): keyword[yield] identifier[self] . identifier[inspect_sensors] ( identifier[name] ) identifier[exist] = keyword[yield] identifier[self] . identifier[future_check_sensor] ( identifier[name] , keyword[False] ) keyword[raise] identifier[tornado] . identifier[gen] . identifier[Return] ( identifier[exist] )
def future_check_sensor(self, name, update=None): """Check if the sensor exists. Used internally by future_get_sensor. This method is aware of synchronisation in progress and if inspection of the server is allowed. Parameters ---------- name : str Name of the sensor to verify. update : bool or None, optional If a katcp request to the server should be made to check if the sensor is on the server now. Notes ----- Ensure that self.state.data_synced == True if yielding to future_check_sensor from a state-change callback, or a deadlock will occur. """ exist = False yield self.until_data_synced() if name in self._sensors_index: exist = True # depends on [control=['if'], data=[]] elif update or (update is None and self._update_on_lookup): yield self.inspect_sensors(name) exist = (yield self.future_check_sensor(name, False)) # depends on [control=['if'], data=[]] raise tornado.gen.Return(exist)
def hla_choices(orig_hla, min_parts=2): """Provide a range of options for HLA type, with decreasing resolution. """ yield orig_hla try: int(orig_hla[-1]) except ValueError: yield orig_hla[:-1] hla_parts = orig_hla.split(":") for sub_i in range(len(hla_parts) - min_parts + 1): yield ":".join(hla_parts[:len(hla_parts) - sub_i])
def function[hla_choices, parameter[orig_hla, min_parts]]: constant[Provide a range of options for HLA type, with decreasing resolution. ] <ast.Yield object at 0x7da1b23450f0> <ast.Try object at 0x7da1b2346470> variable[hla_parts] assign[=] call[name[orig_hla].split, parameter[constant[:]]] for taget[name[sub_i]] in starred[call[name[range], parameter[binary_operation[binary_operation[call[name[len], parameter[name[hla_parts]]] - name[min_parts]] + constant[1]]]]] begin[:] <ast.Yield object at 0x7da20c76dde0>
keyword[def] identifier[hla_choices] ( identifier[orig_hla] , identifier[min_parts] = literal[int] ): literal[string] keyword[yield] identifier[orig_hla] keyword[try] : identifier[int] ( identifier[orig_hla] [- literal[int] ]) keyword[except] identifier[ValueError] : keyword[yield] identifier[orig_hla] [:- literal[int] ] identifier[hla_parts] = identifier[orig_hla] . identifier[split] ( literal[string] ) keyword[for] identifier[sub_i] keyword[in] identifier[range] ( identifier[len] ( identifier[hla_parts] )- identifier[min_parts] + literal[int] ): keyword[yield] literal[string] . identifier[join] ( identifier[hla_parts] [: identifier[len] ( identifier[hla_parts] )- identifier[sub_i] ])
def hla_choices(orig_hla, min_parts=2): """Provide a range of options for HLA type, with decreasing resolution. """ yield orig_hla try: int(orig_hla[-1]) # depends on [control=['try'], data=[]] except ValueError: yield orig_hla[:-1] # depends on [control=['except'], data=[]] hla_parts = orig_hla.split(':') for sub_i in range(len(hla_parts) - min_parts + 1): yield ':'.join(hla_parts[:len(hla_parts) - sub_i]) # depends on [control=['for'], data=['sub_i']]
def hasFeature(self, prop, check_softs=False): """Return if there is a property with that name.""" return prop in self.props or (check_softs and any([fs.hasFeature(prop) for fs in self.props.get(SoftFeatures.SOFT, [])]))
def function[hasFeature, parameter[self, prop, check_softs]]: constant[Return if there is a property with that name.] return[<ast.BoolOp object at 0x7da1b0aba4a0>]
keyword[def] identifier[hasFeature] ( identifier[self] , identifier[prop] , identifier[check_softs] = keyword[False] ): literal[string] keyword[return] identifier[prop] keyword[in] identifier[self] . identifier[props] keyword[or] ( identifier[check_softs] keyword[and] identifier[any] ([ identifier[fs] . identifier[hasFeature] ( identifier[prop] ) keyword[for] identifier[fs] keyword[in] identifier[self] . identifier[props] . identifier[get] ( identifier[SoftFeatures] . identifier[SOFT] ,[])]))
def hasFeature(self, prop, check_softs=False): """Return if there is a property with that name.""" return prop in self.props or (check_softs and any([fs.hasFeature(prop) for fs in self.props.get(SoftFeatures.SOFT, [])]))
def _get_credentials(username=None, password=None, dbhost=None): """Obtain user credentials by arguments or asking the user""" # Database salt system_config = dbhost.objectmodels['systemconfig'].find_one({ 'active': True }) try: salt = system_config.salt.encode('ascii') except (KeyError, AttributeError): log('No systemconfig or it is without a salt! ' 'Reinstall the system provisioning with' 'hfos_manage.py install provisions -p system') sys.exit(3) if username is None: username = _ask("Please enter username: ") else: username = username if password is None: password = _ask_password() else: password = password try: password = password.encode('utf-8') except UnicodeDecodeError: password = password passhash = hashlib.sha512(password) passhash.update(salt) return username, passhash.hexdigest()
def function[_get_credentials, parameter[username, password, dbhost]]: constant[Obtain user credentials by arguments or asking the user] variable[system_config] assign[=] call[call[name[dbhost].objectmodels][constant[systemconfig]].find_one, parameter[dictionary[[<ast.Constant object at 0x7da1b0ff8c10>], [<ast.Constant object at 0x7da1b0ffbdf0>]]]] <ast.Try object at 0x7da1b0ff90f0> if compare[name[username] is constant[None]] begin[:] variable[username] assign[=] call[name[_ask], parameter[constant[Please enter username: ]]] if compare[name[password] is constant[None]] begin[:] variable[password] assign[=] call[name[_ask_password], parameter[]] <ast.Try object at 0x7da1b0ff8070> variable[passhash] assign[=] call[name[hashlib].sha512, parameter[name[password]]] call[name[passhash].update, parameter[name[salt]]] return[tuple[[<ast.Name object at 0x7da1b0faef20>, <ast.Call object at 0x7da1b0fad1b0>]]]
keyword[def] identifier[_get_credentials] ( identifier[username] = keyword[None] , identifier[password] = keyword[None] , identifier[dbhost] = keyword[None] ): literal[string] identifier[system_config] = identifier[dbhost] . identifier[objectmodels] [ literal[string] ]. identifier[find_one] ({ literal[string] : keyword[True] }) keyword[try] : identifier[salt] = identifier[system_config] . identifier[salt] . identifier[encode] ( literal[string] ) keyword[except] ( identifier[KeyError] , identifier[AttributeError] ): identifier[log] ( literal[string] literal[string] literal[string] ) identifier[sys] . identifier[exit] ( literal[int] ) keyword[if] identifier[username] keyword[is] keyword[None] : identifier[username] = identifier[_ask] ( literal[string] ) keyword[else] : identifier[username] = identifier[username] keyword[if] identifier[password] keyword[is] keyword[None] : identifier[password] = identifier[_ask_password] () keyword[else] : identifier[password] = identifier[password] keyword[try] : identifier[password] = identifier[password] . identifier[encode] ( literal[string] ) keyword[except] identifier[UnicodeDecodeError] : identifier[password] = identifier[password] identifier[passhash] = identifier[hashlib] . identifier[sha512] ( identifier[password] ) identifier[passhash] . identifier[update] ( identifier[salt] ) keyword[return] identifier[username] , identifier[passhash] . identifier[hexdigest] ()
def _get_credentials(username=None, password=None, dbhost=None): """Obtain user credentials by arguments or asking the user""" # Database salt system_config = dbhost.objectmodels['systemconfig'].find_one({'active': True}) try: salt = system_config.salt.encode('ascii') # depends on [control=['try'], data=[]] except (KeyError, AttributeError): log('No systemconfig or it is without a salt! Reinstall the system provisioning withhfos_manage.py install provisions -p system') sys.exit(3) # depends on [control=['except'], data=[]] if username is None: username = _ask('Please enter username: ') # depends on [control=['if'], data=['username']] else: username = username if password is None: password = _ask_password() # depends on [control=['if'], data=['password']] else: password = password try: password = password.encode('utf-8') # depends on [control=['try'], data=[]] except UnicodeDecodeError: password = password # depends on [control=['except'], data=[]] passhash = hashlib.sha512(password) passhash.update(salt) return (username, passhash.hexdigest())
def get_device(self, rid): """ Retrieve the device object for a given RID. http://docs.exosite.com/portals/#get-device """ headers = { 'User-Agent': self.user_agent(), 'Content-Type': self.content_type() } headers.update(self.headers()) url = self.portals_url()+'/devices/'+rid # print("URL: {0}".format(url)) r = requests.get( url, headers=headers, auth=self.auth()) if HTTP_STATUS.OK == r.status_code: # fix the 'meta' to be dictionary instead of string device_obj = r.json() # device_obj['info']['description']['meta'] = \ # json.loads(device_obj['info']['description']['meta']) return device_obj else: print("get_device: Something went wrong: <{0}>: {1}".format( r.status_code, r.reason)) r.raise_for_status()
def function[get_device, parameter[self, rid]]: constant[ Retrieve the device object for a given RID. http://docs.exosite.com/portals/#get-device ] variable[headers] assign[=] dictionary[[<ast.Constant object at 0x7da18bc71810>, <ast.Constant object at 0x7da18bc70bb0>], [<ast.Call object at 0x7da18bc72d10>, <ast.Call object at 0x7da18bc70c40>]] call[name[headers].update, parameter[call[name[self].headers, parameter[]]]] variable[url] assign[=] binary_operation[binary_operation[call[name[self].portals_url, parameter[]] + constant[/devices/]] + name[rid]] variable[r] assign[=] call[name[requests].get, parameter[name[url]]] if compare[name[HTTP_STATUS].OK equal[==] name[r].status_code] begin[:] variable[device_obj] assign[=] call[name[r].json, parameter[]] return[name[device_obj]]
keyword[def] identifier[get_device] ( identifier[self] , identifier[rid] ): literal[string] identifier[headers] ={ literal[string] : identifier[self] . identifier[user_agent] (), literal[string] : identifier[self] . identifier[content_type] () } identifier[headers] . identifier[update] ( identifier[self] . identifier[headers] ()) identifier[url] = identifier[self] . identifier[portals_url] ()+ literal[string] + identifier[rid] identifier[r] = identifier[requests] . identifier[get] ( identifier[url] , identifier[headers] = identifier[headers] , identifier[auth] = identifier[self] . identifier[auth] ()) keyword[if] identifier[HTTP_STATUS] . identifier[OK] == identifier[r] . identifier[status_code] : identifier[device_obj] = identifier[r] . identifier[json] () keyword[return] identifier[device_obj] keyword[else] : identifier[print] ( literal[string] . identifier[format] ( identifier[r] . identifier[status_code] , identifier[r] . identifier[reason] )) identifier[r] . identifier[raise_for_status] ()
def get_device(self, rid): """ Retrieve the device object for a given RID. http://docs.exosite.com/portals/#get-device """ headers = {'User-Agent': self.user_agent(), 'Content-Type': self.content_type()} headers.update(self.headers()) url = self.portals_url() + '/devices/' + rid # print("URL: {0}".format(url)) r = requests.get(url, headers=headers, auth=self.auth()) if HTTP_STATUS.OK == r.status_code: # fix the 'meta' to be dictionary instead of string device_obj = r.json() # device_obj['info']['description']['meta'] = \ # json.loads(device_obj['info']['description']['meta']) return device_obj # depends on [control=['if'], data=[]] else: print('get_device: Something went wrong: <{0}>: {1}'.format(r.status_code, r.reason)) r.raise_for_status()
def _fn_to_py_ast( ctx: GeneratorContext, node: Fn, def_name: Optional[str] = None, meta_node: Optional[MetaNode] = None, ) -> GeneratedPyAST: """Return a Python AST Node for a `fn` expression.""" assert node.op == NodeOp.FN if len(node.methods) == 1: return __single_arity_fn_to_py_ast( ctx, node, next(iter(node.methods)), def_name=def_name, meta_node=meta_node ) else: return __multi_arity_fn_to_py_ast( ctx, node, node.methods, def_name=def_name, meta_node=meta_node )
def function[_fn_to_py_ast, parameter[ctx, node, def_name, meta_node]]: constant[Return a Python AST Node for a `fn` expression.] assert[compare[name[node].op equal[==] name[NodeOp].FN]] if compare[call[name[len], parameter[name[node].methods]] equal[==] constant[1]] begin[:] return[call[name[__single_arity_fn_to_py_ast], parameter[name[ctx], name[node], call[name[next], parameter[call[name[iter], parameter[name[node].methods]]]]]]]
keyword[def] identifier[_fn_to_py_ast] ( identifier[ctx] : identifier[GeneratorContext] , identifier[node] : identifier[Fn] , identifier[def_name] : identifier[Optional] [ identifier[str] ]= keyword[None] , identifier[meta_node] : identifier[Optional] [ identifier[MetaNode] ]= keyword[None] , )-> identifier[GeneratedPyAST] : literal[string] keyword[assert] identifier[node] . identifier[op] == identifier[NodeOp] . identifier[FN] keyword[if] identifier[len] ( identifier[node] . identifier[methods] )== literal[int] : keyword[return] identifier[__single_arity_fn_to_py_ast] ( identifier[ctx] , identifier[node] , identifier[next] ( identifier[iter] ( identifier[node] . identifier[methods] )), identifier[def_name] = identifier[def_name] , identifier[meta_node] = identifier[meta_node] ) keyword[else] : keyword[return] identifier[__multi_arity_fn_to_py_ast] ( identifier[ctx] , identifier[node] , identifier[node] . identifier[methods] , identifier[def_name] = identifier[def_name] , identifier[meta_node] = identifier[meta_node] )
def _fn_to_py_ast(ctx: GeneratorContext, node: Fn, def_name: Optional[str]=None, meta_node: Optional[MetaNode]=None) -> GeneratedPyAST: """Return a Python AST Node for a `fn` expression.""" assert node.op == NodeOp.FN if len(node.methods) == 1: return __single_arity_fn_to_py_ast(ctx, node, next(iter(node.methods)), def_name=def_name, meta_node=meta_node) # depends on [control=['if'], data=[]] else: return __multi_arity_fn_to_py_ast(ctx, node, node.methods, def_name=def_name, meta_node=meta_node)
def on_click_dispatcher(self, module_name, event, command): """ Dispatch on_click config parameters to either: - Our own methods for special py3status commands (listed below) - The i3-msg program which is part of i3wm """ if command is None: return elif command == "refresh_all": self.py3_wrapper.refresh_modules() elif command == "refresh": self.py3_wrapper.refresh_modules(module_name) else: # In commands we are able to use substitutions for the text output # of a module if "$OUTPUT" in command or "$OUTPUT_PART" in command: full_text, partial_text = self.get_module_text(module_name, event) command = command.replace("$OUTPUT_PART", shell_quote(partial_text)) command = command.replace("$OUTPUT", shell_quote(full_text)) # this is a i3 message self.wm_msg(module_name, command) # to make the bar more responsive to users we ask for a refresh # of the module or of i3status if the module is an i3status one self.py3_wrapper.refresh_modules(module_name)
def function[on_click_dispatcher, parameter[self, module_name, event, command]]: constant[ Dispatch on_click config parameters to either: - Our own methods for special py3status commands (listed below) - The i3-msg program which is part of i3wm ] if compare[name[command] is constant[None]] begin[:] return[None]
keyword[def] identifier[on_click_dispatcher] ( identifier[self] , identifier[module_name] , identifier[event] , identifier[command] ): literal[string] keyword[if] identifier[command] keyword[is] keyword[None] : keyword[return] keyword[elif] identifier[command] == literal[string] : identifier[self] . identifier[py3_wrapper] . identifier[refresh_modules] () keyword[elif] identifier[command] == literal[string] : identifier[self] . identifier[py3_wrapper] . identifier[refresh_modules] ( identifier[module_name] ) keyword[else] : keyword[if] literal[string] keyword[in] identifier[command] keyword[or] literal[string] keyword[in] identifier[command] : identifier[full_text] , identifier[partial_text] = identifier[self] . identifier[get_module_text] ( identifier[module_name] , identifier[event] ) identifier[command] = identifier[command] . identifier[replace] ( literal[string] , identifier[shell_quote] ( identifier[partial_text] )) identifier[command] = identifier[command] . identifier[replace] ( literal[string] , identifier[shell_quote] ( identifier[full_text] )) identifier[self] . identifier[wm_msg] ( identifier[module_name] , identifier[command] ) identifier[self] . identifier[py3_wrapper] . identifier[refresh_modules] ( identifier[module_name] )
def on_click_dispatcher(self, module_name, event, command): """ Dispatch on_click config parameters to either: - Our own methods for special py3status commands (listed below) - The i3-msg program which is part of i3wm """ if command is None: return # depends on [control=['if'], data=[]] elif command == 'refresh_all': self.py3_wrapper.refresh_modules() # depends on [control=['if'], data=[]] elif command == 'refresh': self.py3_wrapper.refresh_modules(module_name) # depends on [control=['if'], data=[]] else: # In commands we are able to use substitutions for the text output # of a module if '$OUTPUT' in command or '$OUTPUT_PART' in command: (full_text, partial_text) = self.get_module_text(module_name, event) command = command.replace('$OUTPUT_PART', shell_quote(partial_text)) command = command.replace('$OUTPUT', shell_quote(full_text)) # depends on [control=['if'], data=[]] # this is a i3 message self.wm_msg(module_name, command) # to make the bar more responsive to users we ask for a refresh # of the module or of i3status if the module is an i3status one self.py3_wrapper.refresh_modules(module_name)
def to_json(self): """Returns an input shard state for the remaining inputs. Returns: A json-izable version of the remaining InputReader. """ return {self.BLOB_KEY_PARAM: self._blob_key, self.START_INDEX_PARAM: self._start_index, self.END_INDEX_PARAM: self._end_index}
def function[to_json, parameter[self]]: constant[Returns an input shard state for the remaining inputs. Returns: A json-izable version of the remaining InputReader. ] return[dictionary[[<ast.Attribute object at 0x7da18eb54d00>, <ast.Attribute object at 0x7da18eb57f40>, <ast.Attribute object at 0x7da18eb546a0>], [<ast.Attribute object at 0x7da18eb566e0>, <ast.Attribute object at 0x7da18eb57eb0>, <ast.Attribute object at 0x7da18eb54af0>]]]
keyword[def] identifier[to_json] ( identifier[self] ): literal[string] keyword[return] { identifier[self] . identifier[BLOB_KEY_PARAM] : identifier[self] . identifier[_blob_key] , identifier[self] . identifier[START_INDEX_PARAM] : identifier[self] . identifier[_start_index] , identifier[self] . identifier[END_INDEX_PARAM] : identifier[self] . identifier[_end_index] }
def to_json(self): """Returns an input shard state for the remaining inputs. Returns: A json-izable version of the remaining InputReader. """ return {self.BLOB_KEY_PARAM: self._blob_key, self.START_INDEX_PARAM: self._start_index, self.END_INDEX_PARAM: self._end_index}
def _next_record(self, next_line): """ Use loader to parse the record from the reader stream Supporting warc and arc records """ record = self.loader.parse_record_stream(self.reader, next_line, self.known_format, self.no_record_parse, self.ensure_http_headers) self.member_info = None # Track known format for faster parsing of other records if not self.mixed_arc_warc: self.known_format = record.format return record
def function[_next_record, parameter[self, next_line]]: constant[ Use loader to parse the record from the reader stream Supporting warc and arc records ] variable[record] assign[=] call[name[self].loader.parse_record_stream, parameter[name[self].reader, name[next_line], name[self].known_format, name[self].no_record_parse, name[self].ensure_http_headers]] name[self].member_info assign[=] constant[None] if <ast.UnaryOp object at 0x7da1b0b61510> begin[:] name[self].known_format assign[=] name[record].format return[name[record]]
keyword[def] identifier[_next_record] ( identifier[self] , identifier[next_line] ): literal[string] identifier[record] = identifier[self] . identifier[loader] . identifier[parse_record_stream] ( identifier[self] . identifier[reader] , identifier[next_line] , identifier[self] . identifier[known_format] , identifier[self] . identifier[no_record_parse] , identifier[self] . identifier[ensure_http_headers] ) identifier[self] . identifier[member_info] = keyword[None] keyword[if] keyword[not] identifier[self] . identifier[mixed_arc_warc] : identifier[self] . identifier[known_format] = identifier[record] . identifier[format] keyword[return] identifier[record]
def _next_record(self, next_line): """ Use loader to parse the record from the reader stream Supporting warc and arc records """ record = self.loader.parse_record_stream(self.reader, next_line, self.known_format, self.no_record_parse, self.ensure_http_headers) self.member_info = None # Track known format for faster parsing of other records if not self.mixed_arc_warc: self.known_format = record.format # depends on [control=['if'], data=[]] return record
def _get_node_by_key(self, key, path=None): """Returns the 2-tuple (prefix, node) where node either contains the value corresponding to the key, or is the most specific prefix on the path which would contain the key if it were there. The key was found if prefix==key and the node.value is not None.""" prefix, subkey, node = Bits(), key, self while prefix != key: for idx,link in enumerate(node.children): if subkey.startswith(link.prefix): if link.pruned: return (prefix, node) subkey = subkey[len(link.prefix):] prefix += link.prefix if path is not None: path.append((node, idx, link.prefix)) node = link.node break else: break return (prefix, node)
def function[_get_node_by_key, parameter[self, key, path]]: constant[Returns the 2-tuple (prefix, node) where node either contains the value corresponding to the key, or is the most specific prefix on the path which would contain the key if it were there. The key was found if prefix==key and the node.value is not None.] <ast.Tuple object at 0x7da1b052a470> assign[=] tuple[[<ast.Call object at 0x7da1b0528f40>, <ast.Name object at 0x7da1b052b130>, <ast.Name object at 0x7da1b052bc40>]] while compare[name[prefix] not_equal[!=] name[key]] begin[:] for taget[tuple[[<ast.Name object at 0x7da20c6a92d0>, <ast.Name object at 0x7da20c6ab4c0>]]] in starred[call[name[enumerate], parameter[name[node].children]]] begin[:] if call[name[subkey].startswith, parameter[name[link].prefix]] begin[:] if name[link].pruned begin[:] return[tuple[[<ast.Name object at 0x7da1b04e0f10>, <ast.Name object at 0x7da1b04e2470>]]] variable[subkey] assign[=] call[name[subkey]][<ast.Slice object at 0x7da1b04e2560>] <ast.AugAssign object at 0x7da1b04e2e00> if compare[name[path] is_not constant[None]] begin[:] call[name[path].append, parameter[tuple[[<ast.Name object at 0x7da1b0529330>, <ast.Name object at 0x7da1b052b550>, <ast.Attribute object at 0x7da1b0528f70>]]]] variable[node] assign[=] name[link].node break return[tuple[[<ast.Name object at 0x7da1b052b940>, <ast.Name object at 0x7da1b0529660>]]]
keyword[def] identifier[_get_node_by_key] ( identifier[self] , identifier[key] , identifier[path] = keyword[None] ): literal[string] identifier[prefix] , identifier[subkey] , identifier[node] = identifier[Bits] (), identifier[key] , identifier[self] keyword[while] identifier[prefix] != identifier[key] : keyword[for] identifier[idx] , identifier[link] keyword[in] identifier[enumerate] ( identifier[node] . identifier[children] ): keyword[if] identifier[subkey] . identifier[startswith] ( identifier[link] . identifier[prefix] ): keyword[if] identifier[link] . identifier[pruned] : keyword[return] ( identifier[prefix] , identifier[node] ) identifier[subkey] = identifier[subkey] [ identifier[len] ( identifier[link] . identifier[prefix] ):] identifier[prefix] += identifier[link] . identifier[prefix] keyword[if] identifier[path] keyword[is] keyword[not] keyword[None] : identifier[path] . identifier[append] (( identifier[node] , identifier[idx] , identifier[link] . identifier[prefix] )) identifier[node] = identifier[link] . identifier[node] keyword[break] keyword[else] : keyword[break] keyword[return] ( identifier[prefix] , identifier[node] )
def _get_node_by_key(self, key, path=None): """Returns the 2-tuple (prefix, node) where node either contains the value corresponding to the key, or is the most specific prefix on the path which would contain the key if it were there. The key was found if prefix==key and the node.value is not None.""" (prefix, subkey, node) = (Bits(), key, self) while prefix != key: for (idx, link) in enumerate(node.children): if subkey.startswith(link.prefix): if link.pruned: return (prefix, node) # depends on [control=['if'], data=[]] subkey = subkey[len(link.prefix):] prefix += link.prefix if path is not None: path.append((node, idx, link.prefix)) # depends on [control=['if'], data=['path']] node = link.node break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] else: break # depends on [control=['while'], data=['prefix']] return (prefix, node)
def __setup_connection(self): """ each operation requested represents a session the session holds information about the plugin running it and establishes a project object """ if self.payload != None and type(self.payload) is dict and 'settings' in self.payload: config.plugin_client_settings = self.payload['settings'] config.offline = self.args.offline config.connection = PluginConnection( client=self.args.client or 'SUBLIME_TEXT_3', ui=self.args.ui_switch, args=self.args, params=self.payload, operation=self.operation, verbose=self.args.verbose) config.project = MavensMateProject(params=self.payload,ui=self.args.ui_switch) config.sfdc_client = config.project.sfdc_client
def function[__setup_connection, parameter[self]]: constant[ each operation requested represents a session the session holds information about the plugin running it and establishes a project object ] if <ast.BoolOp object at 0x7da1b2853ca0> begin[:] name[config].plugin_client_settings assign[=] call[name[self].payload][constant[settings]] name[config].offline assign[=] name[self].args.offline name[config].connection assign[=] call[name[PluginConnection], parameter[]] name[config].project assign[=] call[name[MavensMateProject], parameter[]] name[config].sfdc_client assign[=] name[config].project.sfdc_client
keyword[def] identifier[__setup_connection] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[payload] != keyword[None] keyword[and] identifier[type] ( identifier[self] . identifier[payload] ) keyword[is] identifier[dict] keyword[and] literal[string] keyword[in] identifier[self] . identifier[payload] : identifier[config] . identifier[plugin_client_settings] = identifier[self] . identifier[payload] [ literal[string] ] identifier[config] . identifier[offline] = identifier[self] . identifier[args] . identifier[offline] identifier[config] . identifier[connection] = identifier[PluginConnection] ( identifier[client] = identifier[self] . identifier[args] . identifier[client] keyword[or] literal[string] , identifier[ui] = identifier[self] . identifier[args] . identifier[ui_switch] , identifier[args] = identifier[self] . identifier[args] , identifier[params] = identifier[self] . identifier[payload] , identifier[operation] = identifier[self] . identifier[operation] , identifier[verbose] = identifier[self] . identifier[args] . identifier[verbose] ) identifier[config] . identifier[project] = identifier[MavensMateProject] ( identifier[params] = identifier[self] . identifier[payload] , identifier[ui] = identifier[self] . identifier[args] . identifier[ui_switch] ) identifier[config] . identifier[sfdc_client] = identifier[config] . identifier[project] . identifier[sfdc_client]
def __setup_connection(self): """ each operation requested represents a session the session holds information about the plugin running it and establishes a project object """ if self.payload != None and type(self.payload) is dict and ('settings' in self.payload): config.plugin_client_settings = self.payload['settings'] # depends on [control=['if'], data=[]] config.offline = self.args.offline config.connection = PluginConnection(client=self.args.client or 'SUBLIME_TEXT_3', ui=self.args.ui_switch, args=self.args, params=self.payload, operation=self.operation, verbose=self.args.verbose) config.project = MavensMateProject(params=self.payload, ui=self.args.ui_switch) config.sfdc_client = config.project.sfdc_client
def update_record(self, domain, record, data=None, priority=None, ttl=None, comment=None): """ Modifies an existing record for a domain. """ rdict = {"id": record.id, "name": record.name, } pdict = {"data": data, "priority": priority, "ttl": ttl, "comment": comment, } utils.params_to_dict(pdict, rdict) return self.update_records(domain, [rdict])
def function[update_record, parameter[self, domain, record, data, priority, ttl, comment]]: constant[ Modifies an existing record for a domain. ] variable[rdict] assign[=] dictionary[[<ast.Constant object at 0x7da20e961390>, <ast.Constant object at 0x7da20e960070>], [<ast.Attribute object at 0x7da20e9626e0>, <ast.Attribute object at 0x7da20e9605e0>]] variable[pdict] assign[=] dictionary[[<ast.Constant object at 0x7da20e962830>, <ast.Constant object at 0x7da20e9627d0>, <ast.Constant object at 0x7da20e961420>, <ast.Constant object at 0x7da20e961270>], [<ast.Name object at 0x7da20e9600a0>, <ast.Name object at 0x7da20e9631f0>, <ast.Name object at 0x7da20e9619f0>, <ast.Name object at 0x7da20e962bc0>]] call[name[utils].params_to_dict, parameter[name[pdict], name[rdict]]] return[call[name[self].update_records, parameter[name[domain], list[[<ast.Name object at 0x7da2054a55d0>]]]]]
keyword[def] identifier[update_record] ( identifier[self] , identifier[domain] , identifier[record] , identifier[data] = keyword[None] , identifier[priority] = keyword[None] , identifier[ttl] = keyword[None] , identifier[comment] = keyword[None] ): literal[string] identifier[rdict] ={ literal[string] : identifier[record] . identifier[id] , literal[string] : identifier[record] . identifier[name] , } identifier[pdict] ={ literal[string] : identifier[data] , literal[string] : identifier[priority] , literal[string] : identifier[ttl] , literal[string] : identifier[comment] , } identifier[utils] . identifier[params_to_dict] ( identifier[pdict] , identifier[rdict] ) keyword[return] identifier[self] . identifier[update_records] ( identifier[domain] ,[ identifier[rdict] ])
def update_record(self, domain, record, data=None, priority=None, ttl=None, comment=None): """ Modifies an existing record for a domain. """ rdict = {'id': record.id, 'name': record.name} pdict = {'data': data, 'priority': priority, 'ttl': ttl, 'comment': comment} utils.params_to_dict(pdict, rdict) return self.update_records(domain, [rdict])
def create_reply(post_data): ''' Create the reply. ''' uid = tools.get_uuid() TabReply.create( uid=uid, post_id=post_data['post_id'], user_name=post_data['user_name'], user_id=post_data['user_id'], timestamp=tools.timestamp(), date=datetime.datetime.now(), cnt_md=tornado.escape.xhtml_escape(post_data['cnt_reply']), cnt_html=tools.markdown2html(post_data['cnt_reply']), vote=0 ) return uid
def function[create_reply, parameter[post_data]]: constant[ Create the reply. ] variable[uid] assign[=] call[name[tools].get_uuid, parameter[]] call[name[TabReply].create, parameter[]] return[name[uid]]
keyword[def] identifier[create_reply] ( identifier[post_data] ): literal[string] identifier[uid] = identifier[tools] . identifier[get_uuid] () identifier[TabReply] . identifier[create] ( identifier[uid] = identifier[uid] , identifier[post_id] = identifier[post_data] [ literal[string] ], identifier[user_name] = identifier[post_data] [ literal[string] ], identifier[user_id] = identifier[post_data] [ literal[string] ], identifier[timestamp] = identifier[tools] . identifier[timestamp] (), identifier[date] = identifier[datetime] . identifier[datetime] . identifier[now] (), identifier[cnt_md] = identifier[tornado] . identifier[escape] . identifier[xhtml_escape] ( identifier[post_data] [ literal[string] ]), identifier[cnt_html] = identifier[tools] . identifier[markdown2html] ( identifier[post_data] [ literal[string] ]), identifier[vote] = literal[int] ) keyword[return] identifier[uid]
def create_reply(post_data): """ Create the reply. """ uid = tools.get_uuid() TabReply.create(uid=uid, post_id=post_data['post_id'], user_name=post_data['user_name'], user_id=post_data['user_id'], timestamp=tools.timestamp(), date=datetime.datetime.now(), cnt_md=tornado.escape.xhtml_escape(post_data['cnt_reply']), cnt_html=tools.markdown2html(post_data['cnt_reply']), vote=0) return uid
def knapsack2(p, v, cmax): """Knapsack problem: select maximum value set of items if total size not more than capacity. alternative implementation with same behavior. :param p: table with size of items :param v: table with value of items :param cmax: capacity of bag :requires: number of items non-zero :returns: value optimal solution, list of item indexes in solution :complexity: O(n * cmax), for n = number of items """ n = len(p) # Plus grande valeur obtenable avec objets ≤ i et capacité c pgv = [[0] * (cmax + 1) for _ in range(n)] for c in range(cmax + 1): # Initialisation pgv[0][c] = v[0] if c >= p[0] else 0 pred = {} # Prédécesseurs pour mémoriser les choix faits for i in range(1, n): for c in range(cmax + 1): pgv[i][c] = pgv[i - 1][c] # Si on ne prend pas l'objet i pred[(i, c)] = (i - 1, c) # Est-ce que prendre l'objet i est préférable ? if c >= p[i] and pgv[i - 1][c - p[i]] + v[i] > pgv[i][c]: pgv[i][c] = pgv[i - 1][c - p[i]] + v[i] pred[(i, c)] = (i - 1, c - p[i]) # On marque le prédécesseur # On pourrait s'arrêter là, mais si on veut un sous-ensemble d'objets # optimal, il faut remonter les marquages cursor = (n - 1, cmax) chosen = [] while cursor in pred: # Si la case prédécesseur a une capacité inférieure if pred[cursor][1] < cursor[1]: # C'est qu'on a ramassé l'objet sur le chemin chosen.append(cursor[0]) cursor = pred[cursor] if cursor[1] > 0: # A-t-on pris le premier objet ? # (La première ligne n'a pas de prédécesseur.) chosen.append(cursor[0]) return pgv[n - 1][cmax], chosen
def function[knapsack2, parameter[p, v, cmax]]: constant[Knapsack problem: select maximum value set of items if total size not more than capacity. alternative implementation with same behavior. :param p: table with size of items :param v: table with value of items :param cmax: capacity of bag :requires: number of items non-zero :returns: value optimal solution, list of item indexes in solution :complexity: O(n * cmax), for n = number of items ] variable[n] assign[=] call[name[len], parameter[name[p]]] variable[pgv] assign[=] <ast.ListComp object at 0x7da1b07cd3f0> for taget[name[c]] in starred[call[name[range], parameter[binary_operation[name[cmax] + constant[1]]]]] begin[:] call[call[name[pgv]][constant[0]]][name[c]] assign[=] <ast.IfExp object at 0x7da1b07ce7d0> variable[pred] assign[=] dictionary[[], []] for taget[name[i]] in starred[call[name[range], parameter[constant[1], name[n]]]] begin[:] for taget[name[c]] in starred[call[name[range], parameter[binary_operation[name[cmax] + constant[1]]]]] begin[:] call[call[name[pgv]][name[i]]][name[c]] assign[=] call[call[name[pgv]][binary_operation[name[i] - constant[1]]]][name[c]] call[name[pred]][tuple[[<ast.Name object at 0x7da1b07cd420>, <ast.Name object at 0x7da1b07ce650>]]] assign[=] tuple[[<ast.BinOp object at 0x7da1b07ccdc0>, <ast.Name object at 0x7da1b07ce140>]] if <ast.BoolOp object at 0x7da1b07ce260> begin[:] call[call[name[pgv]][name[i]]][name[c]] assign[=] binary_operation[call[call[name[pgv]][binary_operation[name[i] - constant[1]]]][binary_operation[name[c] - call[name[p]][name[i]]]] + call[name[v]][name[i]]] call[name[pred]][tuple[[<ast.Name object at 0x7da1b07cc940>, <ast.Name object at 0x7da1b07cca30>]]] assign[=] tuple[[<ast.BinOp object at 0x7da1b07cc1c0>, <ast.BinOp object at 0x7da1b07ce200>]] variable[cursor] assign[=] tuple[[<ast.BinOp object at 0x7da1b07cd900>, <ast.Name object at 0x7da1b07cc2b0>]] variable[chosen] assign[=] list[[]] while compare[name[cursor] in name[pred]] begin[:] if compare[call[call[name[pred]][name[cursor]]][constant[1]] less[<] call[name[cursor]][constant[1]]] begin[:] call[name[chosen].append, parameter[call[name[cursor]][constant[0]]]] variable[cursor] assign[=] call[name[pred]][name[cursor]] if compare[call[name[cursor]][constant[1]] greater[>] constant[0]] begin[:] call[name[chosen].append, parameter[call[name[cursor]][constant[0]]]] return[tuple[[<ast.Subscript object at 0x7da1b07cd000>, <ast.Name object at 0x7da1b07cea70>]]]
keyword[def] identifier[knapsack2] ( identifier[p] , identifier[v] , identifier[cmax] ): literal[string] identifier[n] = identifier[len] ( identifier[p] ) identifier[pgv] =[[ literal[int] ]*( identifier[cmax] + literal[int] ) keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[n] )] keyword[for] identifier[c] keyword[in] identifier[range] ( identifier[cmax] + literal[int] ): identifier[pgv] [ literal[int] ][ identifier[c] ]= identifier[v] [ literal[int] ] keyword[if] identifier[c] >= identifier[p] [ literal[int] ] keyword[else] literal[int] identifier[pred] ={} keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[n] ): keyword[for] identifier[c] keyword[in] identifier[range] ( identifier[cmax] + literal[int] ): identifier[pgv] [ identifier[i] ][ identifier[c] ]= identifier[pgv] [ identifier[i] - literal[int] ][ identifier[c] ] identifier[pred] [( identifier[i] , identifier[c] )]=( identifier[i] - literal[int] , identifier[c] ) keyword[if] identifier[c] >= identifier[p] [ identifier[i] ] keyword[and] identifier[pgv] [ identifier[i] - literal[int] ][ identifier[c] - identifier[p] [ identifier[i] ]]+ identifier[v] [ identifier[i] ]> identifier[pgv] [ identifier[i] ][ identifier[c] ]: identifier[pgv] [ identifier[i] ][ identifier[c] ]= identifier[pgv] [ identifier[i] - literal[int] ][ identifier[c] - identifier[p] [ identifier[i] ]]+ identifier[v] [ identifier[i] ] identifier[pred] [( identifier[i] , identifier[c] )]=( identifier[i] - literal[int] , identifier[c] - identifier[p] [ identifier[i] ]) identifier[cursor] =( identifier[n] - literal[int] , identifier[cmax] ) identifier[chosen] =[] keyword[while] identifier[cursor] keyword[in] identifier[pred] : keyword[if] identifier[pred] [ identifier[cursor] ][ literal[int] ]< identifier[cursor] [ literal[int] ]: identifier[chosen] . identifier[append] ( identifier[cursor] [ literal[int] ]) identifier[cursor] = identifier[pred] [ identifier[cursor] ] keyword[if] identifier[cursor] [ literal[int] ]> literal[int] : identifier[chosen] . identifier[append] ( identifier[cursor] [ literal[int] ]) keyword[return] identifier[pgv] [ identifier[n] - literal[int] ][ identifier[cmax] ], identifier[chosen]
def knapsack2(p, v, cmax): """Knapsack problem: select maximum value set of items if total size not more than capacity. alternative implementation with same behavior. :param p: table with size of items :param v: table with value of items :param cmax: capacity of bag :requires: number of items non-zero :returns: value optimal solution, list of item indexes in solution :complexity: O(n * cmax), for n = number of items """ n = len(p) # Plus grande valeur obtenable avec objets ≤ i et capacité c pgv = [[0] * (cmax + 1) for _ in range(n)] for c in range(cmax + 1): # Initialisation pgv[0][c] = v[0] if c >= p[0] else 0 # depends on [control=['for'], data=['c']] pred = {} # Prédécesseurs pour mémoriser les choix faits for i in range(1, n): for c in range(cmax + 1): pgv[i][c] = pgv[i - 1][c] # Si on ne prend pas l'objet i pred[i, c] = (i - 1, c) # Est-ce que prendre l'objet i est préférable ? if c >= p[i] and pgv[i - 1][c - p[i]] + v[i] > pgv[i][c]: pgv[i][c] = pgv[i - 1][c - p[i]] + v[i] pred[i, c] = (i - 1, c - p[i]) # On marque le prédécesseur # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['c']] # depends on [control=['for'], data=['i']] # On pourrait s'arrêter là, mais si on veut un sous-ensemble d'objets # optimal, il faut remonter les marquages cursor = (n - 1, cmax) chosen = [] while cursor in pred: # Si la case prédécesseur a une capacité inférieure if pred[cursor][1] < cursor[1]: # C'est qu'on a ramassé l'objet sur le chemin chosen.append(cursor[0]) # depends on [control=['if'], data=[]] cursor = pred[cursor] # depends on [control=['while'], data=['cursor', 'pred']] if cursor[1] > 0: # A-t-on pris le premier objet ? # (La première ligne n'a pas de prédécesseur.) chosen.append(cursor[0]) # depends on [control=['if'], data=[]] return (pgv[n - 1][cmax], chosen)
def _query_systemstate(self): """Query the maximum number of connections supported by this adapter """ def status_filter_func(event): if event.command_class == 3 and event.command == 0: return True return False try: response = self._send_command(0, 6, []) maxconn, = unpack("<B", response.payload) except InternalTimeoutError: return False, {'reason': 'Timeout waiting for command response'} events = self._wait_process_events(0.5, status_filter_func, lambda x: False) conns = [] for event in events: handle, flags, addr, addr_type, interval, timeout, lat, bond = unpack("<BB6sBHHHB", event.payload) if flags != 0: conns.append(handle) return True, {'max_connections': maxconn, 'active_connections': conns}
def function[_query_systemstate, parameter[self]]: constant[Query the maximum number of connections supported by this adapter ] def function[status_filter_func, parameter[event]]: if <ast.BoolOp object at 0x7da18fe921a0> begin[:] return[constant[True]] return[constant[False]] <ast.Try object at 0x7da18fe90a90> variable[events] assign[=] call[name[self]._wait_process_events, parameter[constant[0.5], name[status_filter_func], <ast.Lambda object at 0x7da18fe916f0>]] variable[conns] assign[=] list[[]] for taget[name[event]] in starred[name[events]] begin[:] <ast.Tuple object at 0x7da18fe92ec0> assign[=] call[name[unpack], parameter[constant[<BB6sBHHHB], name[event].payload]] if compare[name[flags] not_equal[!=] constant[0]] begin[:] call[name[conns].append, parameter[name[handle]]] return[tuple[[<ast.Constant object at 0x7da18fe92200>, <ast.Dict object at 0x7da18fe92d40>]]]
keyword[def] identifier[_query_systemstate] ( identifier[self] ): literal[string] keyword[def] identifier[status_filter_func] ( identifier[event] ): keyword[if] identifier[event] . identifier[command_class] == literal[int] keyword[and] identifier[event] . identifier[command] == literal[int] : keyword[return] keyword[True] keyword[return] keyword[False] keyword[try] : identifier[response] = identifier[self] . identifier[_send_command] ( literal[int] , literal[int] ,[]) identifier[maxconn] ,= identifier[unpack] ( literal[string] , identifier[response] . identifier[payload] ) keyword[except] identifier[InternalTimeoutError] : keyword[return] keyword[False] ,{ literal[string] : literal[string] } identifier[events] = identifier[self] . identifier[_wait_process_events] ( literal[int] , identifier[status_filter_func] , keyword[lambda] identifier[x] : keyword[False] ) identifier[conns] =[] keyword[for] identifier[event] keyword[in] identifier[events] : identifier[handle] , identifier[flags] , identifier[addr] , identifier[addr_type] , identifier[interval] , identifier[timeout] , identifier[lat] , identifier[bond] = identifier[unpack] ( literal[string] , identifier[event] . identifier[payload] ) keyword[if] identifier[flags] != literal[int] : identifier[conns] . identifier[append] ( identifier[handle] ) keyword[return] keyword[True] ,{ literal[string] : identifier[maxconn] , literal[string] : identifier[conns] }
def _query_systemstate(self): """Query the maximum number of connections supported by this adapter """ def status_filter_func(event): if event.command_class == 3 and event.command == 0: return True # depends on [control=['if'], data=[]] return False try: response = self._send_command(0, 6, []) (maxconn,) = unpack('<B', response.payload) # depends on [control=['try'], data=[]] except InternalTimeoutError: return (False, {'reason': 'Timeout waiting for command response'}) # depends on [control=['except'], data=[]] events = self._wait_process_events(0.5, status_filter_func, lambda x: False) conns = [] for event in events: (handle, flags, addr, addr_type, interval, timeout, lat, bond) = unpack('<BB6sBHHHB', event.payload) if flags != 0: conns.append(handle) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['event']] return (True, {'max_connections': maxconn, 'active_connections': conns})
def get_scm(path, search_up=False, explicit_alias=None): """ Returns one of alias from ``ALIASES`` (in order of precedence same as shortcuts given in ``ALIASES``) and top working dir path for the given argument. If no scm-specific directory is found or more than one scm is found at that directory, ``VCSError`` is raised. :param search_up: if set to ``True``, this function would try to move up to parent directory every time no scm is recognized for the currently checked path. Default: ``False``. :param explicit_alias: can be one of available backend aliases, when given it will return given explicit alias in repositories under more than one version control, if explicit_alias is different than found it will raise VCSError """ if not os.path.isdir(path): raise VCSError("Given path %s is not a directory" % path) def get_scms(path): return [(scm, path) for scm in get_scms_for_path(path)] found_scms = get_scms(path) while not found_scms and search_up: newpath = abspath(path, '..') if newpath == path: break path = newpath found_scms = get_scms(path) if len(found_scms) > 1: for scm in found_scms: if scm[0] == explicit_alias: return scm raise VCSError('More than one [%s] scm found at given path %s' % (','.join((x[0] for x in found_scms)), path)) if len(found_scms) is 0: raise VCSError('No scm found at given path %s' % path) return found_scms[0]
def function[get_scm, parameter[path, search_up, explicit_alias]]: constant[ Returns one of alias from ``ALIASES`` (in order of precedence same as shortcuts given in ``ALIASES``) and top working dir path for the given argument. If no scm-specific directory is found or more than one scm is found at that directory, ``VCSError`` is raised. :param search_up: if set to ``True``, this function would try to move up to parent directory every time no scm is recognized for the currently checked path. Default: ``False``. :param explicit_alias: can be one of available backend aliases, when given it will return given explicit alias in repositories under more than one version control, if explicit_alias is different than found it will raise VCSError ] if <ast.UnaryOp object at 0x7da2054a4cd0> begin[:] <ast.Raise object at 0x7da2054a5d20> def function[get_scms, parameter[path]]: return[<ast.ListComp object at 0x7da2054a5db0>] variable[found_scms] assign[=] call[name[get_scms], parameter[name[path]]] while <ast.BoolOp object at 0x7da2054a5ed0> begin[:] variable[newpath] assign[=] call[name[abspath], parameter[name[path], constant[..]]] if compare[name[newpath] equal[==] name[path]] begin[:] break variable[path] assign[=] name[newpath] variable[found_scms] assign[=] call[name[get_scms], parameter[name[path]]] if compare[call[name[len], parameter[name[found_scms]]] greater[>] constant[1]] begin[:] for taget[name[scm]] in starred[name[found_scms]] begin[:] if compare[call[name[scm]][constant[0]] equal[==] name[explicit_alias]] begin[:] return[name[scm]] <ast.Raise object at 0x7da2054a6e90> if compare[call[name[len], parameter[name[found_scms]]] is constant[0]] begin[:] <ast.Raise object at 0x7da18f00d810> return[call[name[found_scms]][constant[0]]]
keyword[def] identifier[get_scm] ( identifier[path] , identifier[search_up] = keyword[False] , identifier[explicit_alias] = keyword[None] ): literal[string] keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isdir] ( identifier[path] ): keyword[raise] identifier[VCSError] ( literal[string] % identifier[path] ) keyword[def] identifier[get_scms] ( identifier[path] ): keyword[return] [( identifier[scm] , identifier[path] ) keyword[for] identifier[scm] keyword[in] identifier[get_scms_for_path] ( identifier[path] )] identifier[found_scms] = identifier[get_scms] ( identifier[path] ) keyword[while] keyword[not] identifier[found_scms] keyword[and] identifier[search_up] : identifier[newpath] = identifier[abspath] ( identifier[path] , literal[string] ) keyword[if] identifier[newpath] == identifier[path] : keyword[break] identifier[path] = identifier[newpath] identifier[found_scms] = identifier[get_scms] ( identifier[path] ) keyword[if] identifier[len] ( identifier[found_scms] )> literal[int] : keyword[for] identifier[scm] keyword[in] identifier[found_scms] : keyword[if] identifier[scm] [ literal[int] ]== identifier[explicit_alias] : keyword[return] identifier[scm] keyword[raise] identifier[VCSError] ( literal[string] %( literal[string] . identifier[join] (( identifier[x] [ literal[int] ] keyword[for] identifier[x] keyword[in] identifier[found_scms] )), identifier[path] )) keyword[if] identifier[len] ( identifier[found_scms] ) keyword[is] literal[int] : keyword[raise] identifier[VCSError] ( literal[string] % identifier[path] ) keyword[return] identifier[found_scms] [ literal[int] ]
def get_scm(path, search_up=False, explicit_alias=None): """ Returns one of alias from ``ALIASES`` (in order of precedence same as shortcuts given in ``ALIASES``) and top working dir path for the given argument. If no scm-specific directory is found or more than one scm is found at that directory, ``VCSError`` is raised. :param search_up: if set to ``True``, this function would try to move up to parent directory every time no scm is recognized for the currently checked path. Default: ``False``. :param explicit_alias: can be one of available backend aliases, when given it will return given explicit alias in repositories under more than one version control, if explicit_alias is different than found it will raise VCSError """ if not os.path.isdir(path): raise VCSError('Given path %s is not a directory' % path) # depends on [control=['if'], data=[]] def get_scms(path): return [(scm, path) for scm in get_scms_for_path(path)] found_scms = get_scms(path) while not found_scms and search_up: newpath = abspath(path, '..') if newpath == path: break # depends on [control=['if'], data=[]] path = newpath found_scms = get_scms(path) # depends on [control=['while'], data=[]] if len(found_scms) > 1: for scm in found_scms: if scm[0] == explicit_alias: return scm # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['scm']] raise VCSError('More than one [%s] scm found at given path %s' % (','.join((x[0] for x in found_scms)), path)) # depends on [control=['if'], data=[]] if len(found_scms) is 0: raise VCSError('No scm found at given path %s' % path) # depends on [control=['if'], data=[]] return found_scms[0]
def element_neighbors(self): """Return a list with element numbers (zero indexed) of neighboring elements. Note that the elements are not sorted. No spacial orientation can be inferred from the order of neighbors. WARNING: This function is slow due to a nested loop. This would be a good starting point for further optimizations. In order to speed things up, we could search using the raw data, i.e., with CutMcK enabled sorting, and then restrict the loops to 2x the bandwidth (before - after). While not being returned, this function also sets the variable self.element_neighbors_edges, in which the common nodes with each neighbor are stored. Returns ------- neighbors : list a list (length equal to nr of elements) with neighboring elements Examples -------- """ if self.element_neighbors_data is not None: return self.element_neighbors_data max_nr_edges = self.header['element_infos'][0, 2] # initialize the neighbor array self.element_neighbors_data = [] self.element_neighbors_edges = [] # determine neighbors print('Looking for neighbors') for nr, element_nodes in enumerate(self.elements): # print('element {0}/{1}'.format(nr + 1, self.nr_of_elements)) # print(element_nodes) neighbors = [] neighbors_edges = [] # store the edges to this neighbor for nr1, el in enumerate(self.elements): # we look for elements that have two nodes in common with this # element intersection = np.intersect1d(element_nodes, el) if intersection.size == 2: neighbors.append(nr1) neighbors_edges.append(intersection) # stop if we reached the maximum number of possible edges # this saves us quite some loop iterations if len(neighbors) == max_nr_edges: break self.element_neighbors_data.append(neighbors) self.element_neighbors_edges.append(neighbors_edges) return self.element_neighbors_data
def function[element_neighbors, parameter[self]]: constant[Return a list with element numbers (zero indexed) of neighboring elements. Note that the elements are not sorted. No spacial orientation can be inferred from the order of neighbors. WARNING: This function is slow due to a nested loop. This would be a good starting point for further optimizations. In order to speed things up, we could search using the raw data, i.e., with CutMcK enabled sorting, and then restrict the loops to 2x the bandwidth (before - after). While not being returned, this function also sets the variable self.element_neighbors_edges, in which the common nodes with each neighbor are stored. Returns ------- neighbors : list a list (length equal to nr of elements) with neighboring elements Examples -------- ] if compare[name[self].element_neighbors_data is_not constant[None]] begin[:] return[name[self].element_neighbors_data] variable[max_nr_edges] assign[=] call[call[name[self].header][constant[element_infos]]][tuple[[<ast.Constant object at 0x7da1b24aefe0>, <ast.Constant object at 0x7da1b24ae380>]]] name[self].element_neighbors_data assign[=] list[[]] name[self].element_neighbors_edges assign[=] list[[]] call[name[print], parameter[constant[Looking for neighbors]]] for taget[tuple[[<ast.Name object at 0x7da1b24ae0b0>, <ast.Name object at 0x7da1b24ae950>]]] in starred[call[name[enumerate], parameter[name[self].elements]]] begin[:] variable[neighbors] assign[=] list[[]] variable[neighbors_edges] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da1b2390640>, <ast.Name object at 0x7da1b23922c0>]]] in starred[call[name[enumerate], parameter[name[self].elements]]] begin[:] variable[intersection] assign[=] call[name[np].intersect1d, parameter[name[element_nodes], name[el]]] if compare[name[intersection].size equal[==] constant[2]] begin[:] call[name[neighbors].append, parameter[name[nr1]]] call[name[neighbors_edges].append, parameter[name[intersection]]] if compare[call[name[len], parameter[name[neighbors]]] equal[==] name[max_nr_edges]] begin[:] break call[name[self].element_neighbors_data.append, parameter[name[neighbors]]] call[name[self].element_neighbors_edges.append, parameter[name[neighbors_edges]]] return[name[self].element_neighbors_data]
keyword[def] identifier[element_neighbors] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[element_neighbors_data] keyword[is] keyword[not] keyword[None] : keyword[return] identifier[self] . identifier[element_neighbors_data] identifier[max_nr_edges] = identifier[self] . identifier[header] [ literal[string] ][ literal[int] , literal[int] ] identifier[self] . identifier[element_neighbors_data] =[] identifier[self] . identifier[element_neighbors_edges] =[] identifier[print] ( literal[string] ) keyword[for] identifier[nr] , identifier[element_nodes] keyword[in] identifier[enumerate] ( identifier[self] . identifier[elements] ): identifier[neighbors] =[] identifier[neighbors_edges] =[] keyword[for] identifier[nr1] , identifier[el] keyword[in] identifier[enumerate] ( identifier[self] . identifier[elements] ): identifier[intersection] = identifier[np] . identifier[intersect1d] ( identifier[element_nodes] , identifier[el] ) keyword[if] identifier[intersection] . identifier[size] == literal[int] : identifier[neighbors] . identifier[append] ( identifier[nr1] ) identifier[neighbors_edges] . identifier[append] ( identifier[intersection] ) keyword[if] identifier[len] ( identifier[neighbors] )== identifier[max_nr_edges] : keyword[break] identifier[self] . identifier[element_neighbors_data] . identifier[append] ( identifier[neighbors] ) identifier[self] . identifier[element_neighbors_edges] . identifier[append] ( identifier[neighbors_edges] ) keyword[return] identifier[self] . identifier[element_neighbors_data]
def element_neighbors(self): """Return a list with element numbers (zero indexed) of neighboring elements. Note that the elements are not sorted. No spacial orientation can be inferred from the order of neighbors. WARNING: This function is slow due to a nested loop. This would be a good starting point for further optimizations. In order to speed things up, we could search using the raw data, i.e., with CutMcK enabled sorting, and then restrict the loops to 2x the bandwidth (before - after). While not being returned, this function also sets the variable self.element_neighbors_edges, in which the common nodes with each neighbor are stored. Returns ------- neighbors : list a list (length equal to nr of elements) with neighboring elements Examples -------- """ if self.element_neighbors_data is not None: return self.element_neighbors_data # depends on [control=['if'], data=[]] max_nr_edges = self.header['element_infos'][0, 2] # initialize the neighbor array self.element_neighbors_data = [] self.element_neighbors_edges = [] # determine neighbors print('Looking for neighbors') for (nr, element_nodes) in enumerate(self.elements): # print('element {0}/{1}'.format(nr + 1, self.nr_of_elements)) # print(element_nodes) neighbors = [] neighbors_edges = [] # store the edges to this neighbor for (nr1, el) in enumerate(self.elements): # we look for elements that have two nodes in common with this # element intersection = np.intersect1d(element_nodes, el) if intersection.size == 2: neighbors.append(nr1) neighbors_edges.append(intersection) # stop if we reached the maximum number of possible edges # this saves us quite some loop iterations if len(neighbors) == max_nr_edges: break # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] self.element_neighbors_data.append(neighbors) self.element_neighbors_edges.append(neighbors_edges) # depends on [control=['for'], data=[]] return self.element_neighbors_data
def allow_ip(*ips: typing.Union[str, ipaddress.IPv4Network, ipaddress.IPv4Address]): """ Allow ip address. :param ips: :return: """ for ip in ips: if isinstance(ip, ipaddress.IPv4Address): allowed_ips.add(ip) elif isinstance(ip, str): allowed_ips.add(ipaddress.IPv4Address(ip)) elif isinstance(ip, ipaddress.IPv4Network): allowed_ips.update(ip.hosts()) else: raise ValueError(f"Bad type of ipaddress: {type(ip)} ('{ip}')")
def function[allow_ip, parameter[]]: constant[ Allow ip address. :param ips: :return: ] for taget[name[ip]] in starred[name[ips]] begin[:] if call[name[isinstance], parameter[name[ip], name[ipaddress].IPv4Address]] begin[:] call[name[allowed_ips].add, parameter[name[ip]]]
keyword[def] identifier[allow_ip] (* identifier[ips] : identifier[typing] . identifier[Union] [ identifier[str] , identifier[ipaddress] . identifier[IPv4Network] , identifier[ipaddress] . identifier[IPv4Address] ]): literal[string] keyword[for] identifier[ip] keyword[in] identifier[ips] : keyword[if] identifier[isinstance] ( identifier[ip] , identifier[ipaddress] . identifier[IPv4Address] ): identifier[allowed_ips] . identifier[add] ( identifier[ip] ) keyword[elif] identifier[isinstance] ( identifier[ip] , identifier[str] ): identifier[allowed_ips] . identifier[add] ( identifier[ipaddress] . identifier[IPv4Address] ( identifier[ip] )) keyword[elif] identifier[isinstance] ( identifier[ip] , identifier[ipaddress] . identifier[IPv4Network] ): identifier[allowed_ips] . identifier[update] ( identifier[ip] . identifier[hosts] ()) keyword[else] : keyword[raise] identifier[ValueError] ( literal[string] )
def allow_ip(*ips: typing.Union[str, ipaddress.IPv4Network, ipaddress.IPv4Address]): """ Allow ip address. :param ips: :return: """ for ip in ips: if isinstance(ip, ipaddress.IPv4Address): allowed_ips.add(ip) # depends on [control=['if'], data=[]] elif isinstance(ip, str): allowed_ips.add(ipaddress.IPv4Address(ip)) # depends on [control=['if'], data=[]] elif isinstance(ip, ipaddress.IPv4Network): allowed_ips.update(ip.hosts()) # depends on [control=['if'], data=[]] else: raise ValueError(f"Bad type of ipaddress: {type(ip)} ('{ip}')") # depends on [control=['for'], data=['ip']]
def update_refresh_state(self, id_or_uri, refresh_state_data): """ Refreshes a given intelligent power delivery device. Args: id_or_uri: Can be either the power device id or the uri refresh_state_data: Power device refresh request Returns: str: The power state """ uri = self._client.build_uri(id_or_uri) + "/refreshState" return self._client.update(refresh_state_data, uri=uri)
def function[update_refresh_state, parameter[self, id_or_uri, refresh_state_data]]: constant[ Refreshes a given intelligent power delivery device. Args: id_or_uri: Can be either the power device id or the uri refresh_state_data: Power device refresh request Returns: str: The power state ] variable[uri] assign[=] binary_operation[call[name[self]._client.build_uri, parameter[name[id_or_uri]]] + constant[/refreshState]] return[call[name[self]._client.update, parameter[name[refresh_state_data]]]]
keyword[def] identifier[update_refresh_state] ( identifier[self] , identifier[id_or_uri] , identifier[refresh_state_data] ): literal[string] identifier[uri] = identifier[self] . identifier[_client] . identifier[build_uri] ( identifier[id_or_uri] )+ literal[string] keyword[return] identifier[self] . identifier[_client] . identifier[update] ( identifier[refresh_state_data] , identifier[uri] = identifier[uri] )
def update_refresh_state(self, id_or_uri, refresh_state_data): """ Refreshes a given intelligent power delivery device. Args: id_or_uri: Can be either the power device id or the uri refresh_state_data: Power device refresh request Returns: str: The power state """ uri = self._client.build_uri(id_or_uri) + '/refreshState' return self._client.update(refresh_state_data, uri=uri)
def apply(self, word, ctx=None): """ ignore ctx information right now """ return Sequential.in_sequence(word,AdjacentVowels.uyir_letters,AdjacentVowels.reason)
def function[apply, parameter[self, word, ctx]]: constant[ ignore ctx information right now ] return[call[name[Sequential].in_sequence, parameter[name[word], name[AdjacentVowels].uyir_letters, name[AdjacentVowels].reason]]]
keyword[def] identifier[apply] ( identifier[self] , identifier[word] , identifier[ctx] = keyword[None] ): literal[string] keyword[return] identifier[Sequential] . identifier[in_sequence] ( identifier[word] , identifier[AdjacentVowels] . identifier[uyir_letters] , identifier[AdjacentVowels] . identifier[reason] )
def apply(self, word, ctx=None): """ ignore ctx information right now """ return Sequential.in_sequence(word, AdjacentVowels.uyir_letters, AdjacentVowels.reason)
def _normalize_path(self, path): """ Normalizes a file path so that it returns a path relative to the root repo directory. """ norm_path = os.path.normpath(path) return os.path.relpath(norm_path, start=self._get_working_dir())
def function[_normalize_path, parameter[self, path]]: constant[ Normalizes a file path so that it returns a path relative to the root repo directory. ] variable[norm_path] assign[=] call[name[os].path.normpath, parameter[name[path]]] return[call[name[os].path.relpath, parameter[name[norm_path]]]]
keyword[def] identifier[_normalize_path] ( identifier[self] , identifier[path] ): literal[string] identifier[norm_path] = identifier[os] . identifier[path] . identifier[normpath] ( identifier[path] ) keyword[return] identifier[os] . identifier[path] . identifier[relpath] ( identifier[norm_path] , identifier[start] = identifier[self] . identifier[_get_working_dir] ())
def _normalize_path(self, path): """ Normalizes a file path so that it returns a path relative to the root repo directory. """ norm_path = os.path.normpath(path) return os.path.relpath(norm_path, start=self._get_working_dir())
def _commandline(self, *args, **kwargs): """Returns the command line (without pipes) as a list. Inserts driver if present""" if(self.driver is not None): return [self.driver, self.command_name] + self.transform_args(*args, **kwargs) return [self.command_name] + self.transform_args(*args, **kwargs)
def function[_commandline, parameter[self]]: constant[Returns the command line (without pipes) as a list. Inserts driver if present] if compare[name[self].driver is_not constant[None]] begin[:] return[binary_operation[list[[<ast.Attribute object at 0x7da20e9b2560>, <ast.Attribute object at 0x7da20c795630>]] + call[name[self].transform_args, parameter[<ast.Starred object at 0x7da20c795660>]]]] return[binary_operation[list[[<ast.Attribute object at 0x7da204564040>]] + call[name[self].transform_args, parameter[<ast.Starred object at 0x7da2045653c0>]]]]
keyword[def] identifier[_commandline] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ): literal[string] keyword[if] ( identifier[self] . identifier[driver] keyword[is] keyword[not] keyword[None] ): keyword[return] [ identifier[self] . identifier[driver] , identifier[self] . identifier[command_name] ]+ identifier[self] . identifier[transform_args] (* identifier[args] ,** identifier[kwargs] ) keyword[return] [ identifier[self] . identifier[command_name] ]+ identifier[self] . identifier[transform_args] (* identifier[args] ,** identifier[kwargs] )
def _commandline(self, *args, **kwargs): """Returns the command line (without pipes) as a list. Inserts driver if present""" if self.driver is not None: return [self.driver, self.command_name] + self.transform_args(*args, **kwargs) # depends on [control=['if'], data=[]] return [self.command_name] + self.transform_args(*args, **kwargs)
def bytes_to_int(byte_array, big_endian=True, signed=False): """ Converts a byte array to an integer. """ if six.PY3: order = 'little' if big_endian: order = 'big' return int.from_bytes(byte_array, byteorder=order, signed=signed) else: length = len(byte_array) if length == 1: code = 'B' elif length == 2: code = 'H' elif length == 4: code = 'L' elif length == 8: code = 'Q' else: raise Exception("bytes_to_int : length of byte_array should be 1, 2, 4, or 8") if big_endian: code = '>'+code else: code = '<'+code if signed: code = code.lower() return struct.unpack(code, byte_array)[0]
def function[bytes_to_int, parameter[byte_array, big_endian, signed]]: constant[ Converts a byte array to an integer. ] if name[six].PY3 begin[:] variable[order] assign[=] constant[little] if name[big_endian] begin[:] variable[order] assign[=] constant[big] return[call[name[int].from_bytes, parameter[name[byte_array]]]]
keyword[def] identifier[bytes_to_int] ( identifier[byte_array] , identifier[big_endian] = keyword[True] , identifier[signed] = keyword[False] ): literal[string] keyword[if] identifier[six] . identifier[PY3] : identifier[order] = literal[string] keyword[if] identifier[big_endian] : identifier[order] = literal[string] keyword[return] identifier[int] . identifier[from_bytes] ( identifier[byte_array] , identifier[byteorder] = identifier[order] , identifier[signed] = identifier[signed] ) keyword[else] : identifier[length] = identifier[len] ( identifier[byte_array] ) keyword[if] identifier[length] == literal[int] : identifier[code] = literal[string] keyword[elif] identifier[length] == literal[int] : identifier[code] = literal[string] keyword[elif] identifier[length] == literal[int] : identifier[code] = literal[string] keyword[elif] identifier[length] == literal[int] : identifier[code] = literal[string] keyword[else] : keyword[raise] identifier[Exception] ( literal[string] ) keyword[if] identifier[big_endian] : identifier[code] = literal[string] + identifier[code] keyword[else] : identifier[code] = literal[string] + identifier[code] keyword[if] identifier[signed] : identifier[code] = identifier[code] . identifier[lower] () keyword[return] identifier[struct] . identifier[unpack] ( identifier[code] , identifier[byte_array] )[ literal[int] ]
def bytes_to_int(byte_array, big_endian=True, signed=False): """ Converts a byte array to an integer. """ if six.PY3: order = 'little' if big_endian: order = 'big' # depends on [control=['if'], data=[]] return int.from_bytes(byte_array, byteorder=order, signed=signed) # depends on [control=['if'], data=[]] else: length = len(byte_array) if length == 1: code = 'B' # depends on [control=['if'], data=[]] elif length == 2: code = 'H' # depends on [control=['if'], data=[]] elif length == 4: code = 'L' # depends on [control=['if'], data=[]] elif length == 8: code = 'Q' # depends on [control=['if'], data=[]] else: raise Exception('bytes_to_int : length of byte_array should be 1, 2, 4, or 8') if big_endian: code = '>' + code # depends on [control=['if'], data=[]] else: code = '<' + code if signed: code = code.lower() # depends on [control=['if'], data=[]] return struct.unpack(code, byte_array)[0]
def _stdout_level(self): """Returns the level that stdout runs at""" for level, consumer in self.consumers: if consumer is sys.stdout: return level return self.FATAL
def function[_stdout_level, parameter[self]]: constant[Returns the level that stdout runs at] for taget[tuple[[<ast.Name object at 0x7da1b170c640>, <ast.Name object at 0x7da1b170d6c0>]]] in starred[name[self].consumers] begin[:] if compare[name[consumer] is name[sys].stdout] begin[:] return[name[level]] return[name[self].FATAL]
keyword[def] identifier[_stdout_level] ( identifier[self] ): literal[string] keyword[for] identifier[level] , identifier[consumer] keyword[in] identifier[self] . identifier[consumers] : keyword[if] identifier[consumer] keyword[is] identifier[sys] . identifier[stdout] : keyword[return] identifier[level] keyword[return] identifier[self] . identifier[FATAL]
def _stdout_level(self): """Returns the level that stdout runs at""" for (level, consumer) in self.consumers: if consumer is sys.stdout: return level # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] return self.FATAL
async def render_list(self, request=None) -> List[Text]: """ Render the translation as a list if there is multiple strings for this single key. :param request: Bot request. """ from bernard.middleware import MiddlewareManager if request: tz = await request.user.get_timezone() locale = await request.get_locale() flags = await request.get_trans_flags() else: tz = None locale = self.wd.list_locales()[0] flags = {} rp = MiddlewareManager.instance()\ .get('resolve_trans_params', self._resolve_params) resolved_params = await rp(self.params, request) f = I18nFormatter(self.wd.choose_locale(locale), tz) return self.wd.get( self.key, self.count, f, locale, resolved_params, flags, )
<ast.AsyncFunctionDef object at 0x7da2041d8dc0>
keyword[async] keyword[def] identifier[render_list] ( identifier[self] , identifier[request] = keyword[None] )-> identifier[List] [ identifier[Text] ]: literal[string] keyword[from] identifier[bernard] . identifier[middleware] keyword[import] identifier[MiddlewareManager] keyword[if] identifier[request] : identifier[tz] = keyword[await] identifier[request] . identifier[user] . identifier[get_timezone] () identifier[locale] = keyword[await] identifier[request] . identifier[get_locale] () identifier[flags] = keyword[await] identifier[request] . identifier[get_trans_flags] () keyword[else] : identifier[tz] = keyword[None] identifier[locale] = identifier[self] . identifier[wd] . identifier[list_locales] ()[ literal[int] ] identifier[flags] ={} identifier[rp] = identifier[MiddlewareManager] . identifier[instance] (). identifier[get] ( literal[string] , identifier[self] . identifier[_resolve_params] ) identifier[resolved_params] = keyword[await] identifier[rp] ( identifier[self] . identifier[params] , identifier[request] ) identifier[f] = identifier[I18nFormatter] ( identifier[self] . identifier[wd] . identifier[choose_locale] ( identifier[locale] ), identifier[tz] ) keyword[return] identifier[self] . identifier[wd] . identifier[get] ( identifier[self] . identifier[key] , identifier[self] . identifier[count] , identifier[f] , identifier[locale] , identifier[resolved_params] , identifier[flags] , )
async def render_list(self, request=None) -> List[Text]: """ Render the translation as a list if there is multiple strings for this single key. :param request: Bot request. """ from bernard.middleware import MiddlewareManager if request: tz = await request.user.get_timezone() locale = await request.get_locale() flags = await request.get_trans_flags() # depends on [control=['if'], data=[]] else: tz = None locale = self.wd.list_locales()[0] flags = {} rp = MiddlewareManager.instance().get('resolve_trans_params', self._resolve_params) resolved_params = await rp(self.params, request) f = I18nFormatter(self.wd.choose_locale(locale), tz) return self.wd.get(self.key, self.count, f, locale, resolved_params, flags)
def add_link_header(response, links): """Add a Link HTTP header to a REST response. :param response: REST response instance :param links: Dictionary of links """ if links is not None: response.headers.extend({ 'Link': ', '.join([ '<{0}>; rel="{1}"'.format(l, r) for r, l in links.items()]) })
def function[add_link_header, parameter[response, links]]: constant[Add a Link HTTP header to a REST response. :param response: REST response instance :param links: Dictionary of links ] if compare[name[links] is_not constant[None]] begin[:] call[name[response].headers.extend, parameter[dictionary[[<ast.Constant object at 0x7da1b0341000>], [<ast.Call object at 0x7da1b0341030>]]]]
keyword[def] identifier[add_link_header] ( identifier[response] , identifier[links] ): literal[string] keyword[if] identifier[links] keyword[is] keyword[not] keyword[None] : identifier[response] . identifier[headers] . identifier[extend] ({ literal[string] : literal[string] . identifier[join] ([ literal[string] . identifier[format] ( identifier[l] , identifier[r] ) keyword[for] identifier[r] , identifier[l] keyword[in] identifier[links] . identifier[items] ()]) })
def add_link_header(response, links): """Add a Link HTTP header to a REST response. :param response: REST response instance :param links: Dictionary of links """ if links is not None: response.headers.extend({'Link': ', '.join(['<{0}>; rel="{1}"'.format(l, r) for (r, l) in links.items()])}) # depends on [control=['if'], data=['links']]
def tempput(local_path=None, remote_path=None, use_sudo=False, mirror_local_mode=False, mode=None): """Put a file to remote and remove it afterwards""" import warnings warnings.simplefilter('ignore', RuntimeWarning) if remote_path is None: remote_path = os.tempnam() put(local_path, remote_path, use_sudo, mirror_local_mode, mode) yield remote_path run("rm '{}'".format(remote_path))
def function[tempput, parameter[local_path, remote_path, use_sudo, mirror_local_mode, mode]]: constant[Put a file to remote and remove it afterwards] import module[warnings] call[name[warnings].simplefilter, parameter[constant[ignore], name[RuntimeWarning]]] if compare[name[remote_path] is constant[None]] begin[:] variable[remote_path] assign[=] call[name[os].tempnam, parameter[]] call[name[put], parameter[name[local_path], name[remote_path], name[use_sudo], name[mirror_local_mode], name[mode]]] <ast.Yield object at 0x7da1b09d12d0> call[name[run], parameter[call[constant[rm '{}'].format, parameter[name[remote_path]]]]]
keyword[def] identifier[tempput] ( identifier[local_path] = keyword[None] , identifier[remote_path] = keyword[None] , identifier[use_sudo] = keyword[False] , identifier[mirror_local_mode] = keyword[False] , identifier[mode] = keyword[None] ): literal[string] keyword[import] identifier[warnings] identifier[warnings] . identifier[simplefilter] ( literal[string] , identifier[RuntimeWarning] ) keyword[if] identifier[remote_path] keyword[is] keyword[None] : identifier[remote_path] = identifier[os] . identifier[tempnam] () identifier[put] ( identifier[local_path] , identifier[remote_path] , identifier[use_sudo] , identifier[mirror_local_mode] , identifier[mode] ) keyword[yield] identifier[remote_path] identifier[run] ( literal[string] . identifier[format] ( identifier[remote_path] ))
def tempput(local_path=None, remote_path=None, use_sudo=False, mirror_local_mode=False, mode=None): """Put a file to remote and remove it afterwards""" import warnings warnings.simplefilter('ignore', RuntimeWarning) if remote_path is None: remote_path = os.tempnam() # depends on [control=['if'], data=['remote_path']] put(local_path, remote_path, use_sudo, mirror_local_mode, mode) yield remote_path run("rm '{}'".format(remote_path))
def write_document(doc, fnm): """Write a Text document to file. Parameters ---------- doc: Text The document to save. fnm: str The filename to save the document """ with codecs.open(fnm, 'wb', 'ascii') as f: f.write(json.dumps(doc, indent=2))
def function[write_document, parameter[doc, fnm]]: constant[Write a Text document to file. Parameters ---------- doc: Text The document to save. fnm: str The filename to save the document ] with call[name[codecs].open, parameter[name[fnm], constant[wb], constant[ascii]]] begin[:] call[name[f].write, parameter[call[name[json].dumps, parameter[name[doc]]]]]
keyword[def] identifier[write_document] ( identifier[doc] , identifier[fnm] ): literal[string] keyword[with] identifier[codecs] . identifier[open] ( identifier[fnm] , literal[string] , literal[string] ) keyword[as] identifier[f] : identifier[f] . identifier[write] ( identifier[json] . identifier[dumps] ( identifier[doc] , identifier[indent] = literal[int] ))
def write_document(doc, fnm): """Write a Text document to file. Parameters ---------- doc: Text The document to save. fnm: str The filename to save the document """ with codecs.open(fnm, 'wb', 'ascii') as f: f.write(json.dumps(doc, indent=2)) # depends on [control=['with'], data=['f']]
def log(self, n=None, **kwargs): """ Run the repository log command Returns: str: output of log command (``hg log -l <n> <--kwarg=value>``) """ cmd = ['hg', 'log'] if n: cmd.extend('-l%d' % n) cmd.extend( (('--%s=%s' % (k, v)) for (k, v) in iteritems(kwargs)) ) return self.sh(cmd, shell=False)
def function[log, parameter[self, n]]: constant[ Run the repository log command Returns: str: output of log command (``hg log -l <n> <--kwarg=value>``) ] variable[cmd] assign[=] list[[<ast.Constant object at 0x7da1b2346830>, <ast.Constant object at 0x7da1b2345330>]] if name[n] begin[:] call[name[cmd].extend, parameter[binary_operation[constant[-l%d] <ast.Mod object at 0x7da2590d6920> name[n]]]] call[name[cmd].extend, parameter[<ast.GeneratorExp object at 0x7da1b2345750>]] return[call[name[self].sh, parameter[name[cmd]]]]
keyword[def] identifier[log] ( identifier[self] , identifier[n] = keyword[None] ,** identifier[kwargs] ): literal[string] identifier[cmd] =[ literal[string] , literal[string] ] keyword[if] identifier[n] : identifier[cmd] . identifier[extend] ( literal[string] % identifier[n] ) identifier[cmd] . identifier[extend] ( (( literal[string] %( identifier[k] , identifier[v] )) keyword[for] ( identifier[k] , identifier[v] ) keyword[in] identifier[iteritems] ( identifier[kwargs] )) ) keyword[return] identifier[self] . identifier[sh] ( identifier[cmd] , identifier[shell] = keyword[False] )
def log(self, n=None, **kwargs): """ Run the repository log command Returns: str: output of log command (``hg log -l <n> <--kwarg=value>``) """ cmd = ['hg', 'log'] if n: cmd.extend('-l%d' % n) # depends on [control=['if'], data=[]] cmd.extend(('--%s=%s' % (k, v) for (k, v) in iteritems(kwargs))) return self.sh(cmd, shell=False)
def result_tree(context, cl, request): """ Added 'filtered' param, so the template's js knows whether the results have been affected by a GET param or not. Only when the results are not filtered you can drag and sort the tree """ # Here I'm adding an extra col on pos 2 for the drag handlers headers = list(result_headers(cl)) headers.insert(1 if needs_checkboxes(context) else 0, { 'text': '+', 'sortable': True, 'url': request.path, 'tooltip': _('Return to ordered tree'), 'class_attrib': mark_safe(' class="oder-grabber"') }) return { 'filtered': not check_empty_dict(request.GET), 'result_hidden_fields': list(result_hidden_fields(cl)), 'result_headers': headers, 'results': list(results(cl)), }
def function[result_tree, parameter[context, cl, request]]: constant[ Added 'filtered' param, so the template's js knows whether the results have been affected by a GET param or not. Only when the results are not filtered you can drag and sort the tree ] variable[headers] assign[=] call[name[list], parameter[call[name[result_headers], parameter[name[cl]]]]] call[name[headers].insert, parameter[<ast.IfExp object at 0x7da1b20b4250>, dictionary[[<ast.Constant object at 0x7da1b20b6b30>, <ast.Constant object at 0x7da1b20b69e0>, <ast.Constant object at 0x7da1b20b63e0>, <ast.Constant object at 0x7da1b20b6260>, <ast.Constant object at 0x7da1b20b56c0>], [<ast.Constant object at 0x7da1b20b44f0>, <ast.Constant object at 0x7da1b20b5720>, <ast.Attribute object at 0x7da1b20b48e0>, <ast.Call object at 0x7da1b20b61d0>, <ast.Call object at 0x7da1b20b75b0>]]]] return[dictionary[[<ast.Constant object at 0x7da1b20b46a0>, <ast.Constant object at 0x7da1b20b5120>, <ast.Constant object at 0x7da1b20b6890>, <ast.Constant object at 0x7da1b20b4ee0>], [<ast.UnaryOp object at 0x7da1b20b49a0>, <ast.Call object at 0x7da1b20b4ca0>, <ast.Name object at 0x7da1b20b5b10>, <ast.Call object at 0x7da1b20b4940>]]]
keyword[def] identifier[result_tree] ( identifier[context] , identifier[cl] , identifier[request] ): literal[string] identifier[headers] = identifier[list] ( identifier[result_headers] ( identifier[cl] )) identifier[headers] . identifier[insert] ( literal[int] keyword[if] identifier[needs_checkboxes] ( identifier[context] ) keyword[else] literal[int] ,{ literal[string] : literal[string] , literal[string] : keyword[True] , literal[string] : identifier[request] . identifier[path] , literal[string] : identifier[_] ( literal[string] ), literal[string] : identifier[mark_safe] ( literal[string] ) }) keyword[return] { literal[string] : keyword[not] identifier[check_empty_dict] ( identifier[request] . identifier[GET] ), literal[string] : identifier[list] ( identifier[result_hidden_fields] ( identifier[cl] )), literal[string] : identifier[headers] , literal[string] : identifier[list] ( identifier[results] ( identifier[cl] )), }
def result_tree(context, cl, request): """ Added 'filtered' param, so the template's js knows whether the results have been affected by a GET param or not. Only when the results are not filtered you can drag and sort the tree """ # Here I'm adding an extra col on pos 2 for the drag handlers headers = list(result_headers(cl)) headers.insert(1 if needs_checkboxes(context) else 0, {'text': '+', 'sortable': True, 'url': request.path, 'tooltip': _('Return to ordered tree'), 'class_attrib': mark_safe(' class="oder-grabber"')}) return {'filtered': not check_empty_dict(request.GET), 'result_hidden_fields': list(result_hidden_fields(cl)), 'result_headers': headers, 'results': list(results(cl))}
def succeed(self, instance, action): """Returns if the task for the instance took place successfully """ uid = api.get_uid(instance) return self.objects.get(uid, {}).get(action, {}).get('success', False)
def function[succeed, parameter[self, instance, action]]: constant[Returns if the task for the instance took place successfully ] variable[uid] assign[=] call[name[api].get_uid, parameter[name[instance]]] return[call[call[call[name[self].objects.get, parameter[name[uid], dictionary[[], []]]].get, parameter[name[action], dictionary[[], []]]].get, parameter[constant[success], constant[False]]]]
keyword[def] identifier[succeed] ( identifier[self] , identifier[instance] , identifier[action] ): literal[string] identifier[uid] = identifier[api] . identifier[get_uid] ( identifier[instance] ) keyword[return] identifier[self] . identifier[objects] . identifier[get] ( identifier[uid] ,{}). identifier[get] ( identifier[action] ,{}). identifier[get] ( literal[string] , keyword[False] )
def succeed(self, instance, action): """Returns if the task for the instance took place successfully """ uid = api.get_uid(instance) return self.objects.get(uid, {}).get(action, {}).get('success', False)
def config(data_folder=settings.data_folder, logs_folder=settings.logs_folder, imgs_folder=settings.imgs_folder, cache_folder=settings.cache_folder, use_cache=settings.use_cache, log_file=settings.log_file, log_console=settings.log_console, log_level=settings.log_level, log_name=settings.log_name, log_filename=settings.log_filename, useful_tags_node=settings.useful_tags_node, useful_tags_path=settings.useful_tags_path, osm_xml_node_attrs=settings.osm_xml_node_attrs, osm_xml_node_tags=settings.osm_xml_node_tags, osm_xml_way_attrs=settings.osm_xml_way_attrs, osm_xml_way_tags=settings.osm_xml_way_tags, default_access=settings.default_access, default_crs=settings.default_crs, default_user_agent=settings.default_user_agent, default_referer=settings.default_referer, default_accept_language=settings.default_accept_language): """ Configure osmnx by setting the default global vars to desired values. Parameters --------- data_folder : string where to save and load data files logs_folder : string where to write the log files imgs_folder : string where to save figures cache_folder : string where to save the http response cache use_cache : bool if True, use a local cache to save/retrieve http responses instead of calling API repetitively for the same request URL log_file : bool if true, save log output to a log file in logs_folder log_console : bool if true, print log output to the console log_level : int one of the logger.level constants log_name : string name of the logger useful_tags_node : list a list of useful OSM tags to attempt to save from node elements useful_tags_path : list a list of useful OSM tags to attempt to save from path elements default_access : string default filter for OSM "access" key default_crs : string default CRS to set when creating graphs default_user_agent : string HTTP header user-agent default_referer : string HTTP header referer default_accept_language : string HTTP header accept-language Returns ------- None """ # set each global variable to the passed-in parameter value settings.use_cache = use_cache settings.cache_folder = cache_folder settings.data_folder = data_folder settings.imgs_folder = imgs_folder settings.logs_folder = logs_folder settings.log_console = log_console settings.log_file = log_file settings.log_level = log_level settings.log_name = log_name settings.log_filename = log_filename settings.useful_tags_node = useful_tags_node settings.useful_tags_path = useful_tags_path settings.useful_tags_node = list(set( useful_tags_node + osm_xml_node_attrs + osm_xml_node_tags)) settings.useful_tags_path = list(set( useful_tags_path + osm_xml_way_attrs + osm_xml_way_tags)) settings.osm_xml_node_attrs = osm_xml_node_attrs settings.osm_xml_node_tags = osm_xml_node_tags settings.osm_xml_way_attrs = osm_xml_way_attrs settings.osm_xml_way_tags = osm_xml_way_tags settings.default_access = default_access settings.default_crs = default_crs settings.default_user_agent = default_user_agent settings.default_referer = default_referer settings.default_accept_language = default_accept_language # if logging is turned on, log that we are configured if settings.log_file or settings.log_console: log('Configured osmnx')
def function[config, parameter[data_folder, logs_folder, imgs_folder, cache_folder, use_cache, log_file, log_console, log_level, log_name, log_filename, useful_tags_node, useful_tags_path, osm_xml_node_attrs, osm_xml_node_tags, osm_xml_way_attrs, osm_xml_way_tags, default_access, default_crs, default_user_agent, default_referer, default_accept_language]]: constant[ Configure osmnx by setting the default global vars to desired values. Parameters --------- data_folder : string where to save and load data files logs_folder : string where to write the log files imgs_folder : string where to save figures cache_folder : string where to save the http response cache use_cache : bool if True, use a local cache to save/retrieve http responses instead of calling API repetitively for the same request URL log_file : bool if true, save log output to a log file in logs_folder log_console : bool if true, print log output to the console log_level : int one of the logger.level constants log_name : string name of the logger useful_tags_node : list a list of useful OSM tags to attempt to save from node elements useful_tags_path : list a list of useful OSM tags to attempt to save from path elements default_access : string default filter for OSM "access" key default_crs : string default CRS to set when creating graphs default_user_agent : string HTTP header user-agent default_referer : string HTTP header referer default_accept_language : string HTTP header accept-language Returns ------- None ] name[settings].use_cache assign[=] name[use_cache] name[settings].cache_folder assign[=] name[cache_folder] name[settings].data_folder assign[=] name[data_folder] name[settings].imgs_folder assign[=] name[imgs_folder] name[settings].logs_folder assign[=] name[logs_folder] name[settings].log_console assign[=] name[log_console] name[settings].log_file assign[=] name[log_file] name[settings].log_level assign[=] name[log_level] name[settings].log_name assign[=] name[log_name] name[settings].log_filename assign[=] name[log_filename] name[settings].useful_tags_node assign[=] name[useful_tags_node] name[settings].useful_tags_path assign[=] name[useful_tags_path] name[settings].useful_tags_node assign[=] call[name[list], parameter[call[name[set], parameter[binary_operation[binary_operation[name[useful_tags_node] + name[osm_xml_node_attrs]] + name[osm_xml_node_tags]]]]]] name[settings].useful_tags_path assign[=] call[name[list], parameter[call[name[set], parameter[binary_operation[binary_operation[name[useful_tags_path] + name[osm_xml_way_attrs]] + name[osm_xml_way_tags]]]]]] name[settings].osm_xml_node_attrs assign[=] name[osm_xml_node_attrs] name[settings].osm_xml_node_tags assign[=] name[osm_xml_node_tags] name[settings].osm_xml_way_attrs assign[=] name[osm_xml_way_attrs] name[settings].osm_xml_way_tags assign[=] name[osm_xml_way_tags] name[settings].default_access assign[=] name[default_access] name[settings].default_crs assign[=] name[default_crs] name[settings].default_user_agent assign[=] name[default_user_agent] name[settings].default_referer assign[=] name[default_referer] name[settings].default_accept_language assign[=] name[default_accept_language] if <ast.BoolOp object at 0x7da1b216d810> begin[:] call[name[log], parameter[constant[Configured osmnx]]]
keyword[def] identifier[config] ( identifier[data_folder] = identifier[settings] . identifier[data_folder] , identifier[logs_folder] = identifier[settings] . identifier[logs_folder] , identifier[imgs_folder] = identifier[settings] . identifier[imgs_folder] , identifier[cache_folder] = identifier[settings] . identifier[cache_folder] , identifier[use_cache] = identifier[settings] . identifier[use_cache] , identifier[log_file] = identifier[settings] . identifier[log_file] , identifier[log_console] = identifier[settings] . identifier[log_console] , identifier[log_level] = identifier[settings] . identifier[log_level] , identifier[log_name] = identifier[settings] . identifier[log_name] , identifier[log_filename] = identifier[settings] . identifier[log_filename] , identifier[useful_tags_node] = identifier[settings] . identifier[useful_tags_node] , identifier[useful_tags_path] = identifier[settings] . identifier[useful_tags_path] , identifier[osm_xml_node_attrs] = identifier[settings] . identifier[osm_xml_node_attrs] , identifier[osm_xml_node_tags] = identifier[settings] . identifier[osm_xml_node_tags] , identifier[osm_xml_way_attrs] = identifier[settings] . identifier[osm_xml_way_attrs] , identifier[osm_xml_way_tags] = identifier[settings] . identifier[osm_xml_way_tags] , identifier[default_access] = identifier[settings] . identifier[default_access] , identifier[default_crs] = identifier[settings] . identifier[default_crs] , identifier[default_user_agent] = identifier[settings] . identifier[default_user_agent] , identifier[default_referer] = identifier[settings] . identifier[default_referer] , identifier[default_accept_language] = identifier[settings] . identifier[default_accept_language] ): literal[string] identifier[settings] . identifier[use_cache] = identifier[use_cache] identifier[settings] . identifier[cache_folder] = identifier[cache_folder] identifier[settings] . identifier[data_folder] = identifier[data_folder] identifier[settings] . identifier[imgs_folder] = identifier[imgs_folder] identifier[settings] . identifier[logs_folder] = identifier[logs_folder] identifier[settings] . identifier[log_console] = identifier[log_console] identifier[settings] . identifier[log_file] = identifier[log_file] identifier[settings] . identifier[log_level] = identifier[log_level] identifier[settings] . identifier[log_name] = identifier[log_name] identifier[settings] . identifier[log_filename] = identifier[log_filename] identifier[settings] . identifier[useful_tags_node] = identifier[useful_tags_node] identifier[settings] . identifier[useful_tags_path] = identifier[useful_tags_path] identifier[settings] . identifier[useful_tags_node] = identifier[list] ( identifier[set] ( identifier[useful_tags_node] + identifier[osm_xml_node_attrs] + identifier[osm_xml_node_tags] )) identifier[settings] . identifier[useful_tags_path] = identifier[list] ( identifier[set] ( identifier[useful_tags_path] + identifier[osm_xml_way_attrs] + identifier[osm_xml_way_tags] )) identifier[settings] . identifier[osm_xml_node_attrs] = identifier[osm_xml_node_attrs] identifier[settings] . identifier[osm_xml_node_tags] = identifier[osm_xml_node_tags] identifier[settings] . identifier[osm_xml_way_attrs] = identifier[osm_xml_way_attrs] identifier[settings] . identifier[osm_xml_way_tags] = identifier[osm_xml_way_tags] identifier[settings] . identifier[default_access] = identifier[default_access] identifier[settings] . identifier[default_crs] = identifier[default_crs] identifier[settings] . identifier[default_user_agent] = identifier[default_user_agent] identifier[settings] . identifier[default_referer] = identifier[default_referer] identifier[settings] . identifier[default_accept_language] = identifier[default_accept_language] keyword[if] identifier[settings] . identifier[log_file] keyword[or] identifier[settings] . identifier[log_console] : identifier[log] ( literal[string] )
def config(data_folder=settings.data_folder, logs_folder=settings.logs_folder, imgs_folder=settings.imgs_folder, cache_folder=settings.cache_folder, use_cache=settings.use_cache, log_file=settings.log_file, log_console=settings.log_console, log_level=settings.log_level, log_name=settings.log_name, log_filename=settings.log_filename, useful_tags_node=settings.useful_tags_node, useful_tags_path=settings.useful_tags_path, osm_xml_node_attrs=settings.osm_xml_node_attrs, osm_xml_node_tags=settings.osm_xml_node_tags, osm_xml_way_attrs=settings.osm_xml_way_attrs, osm_xml_way_tags=settings.osm_xml_way_tags, default_access=settings.default_access, default_crs=settings.default_crs, default_user_agent=settings.default_user_agent, default_referer=settings.default_referer, default_accept_language=settings.default_accept_language): """ Configure osmnx by setting the default global vars to desired values. Parameters --------- data_folder : string where to save and load data files logs_folder : string where to write the log files imgs_folder : string where to save figures cache_folder : string where to save the http response cache use_cache : bool if True, use a local cache to save/retrieve http responses instead of calling API repetitively for the same request URL log_file : bool if true, save log output to a log file in logs_folder log_console : bool if true, print log output to the console log_level : int one of the logger.level constants log_name : string name of the logger useful_tags_node : list a list of useful OSM tags to attempt to save from node elements useful_tags_path : list a list of useful OSM tags to attempt to save from path elements default_access : string default filter for OSM "access" key default_crs : string default CRS to set when creating graphs default_user_agent : string HTTP header user-agent default_referer : string HTTP header referer default_accept_language : string HTTP header accept-language Returns ------- None """ # set each global variable to the passed-in parameter value settings.use_cache = use_cache settings.cache_folder = cache_folder settings.data_folder = data_folder settings.imgs_folder = imgs_folder settings.logs_folder = logs_folder settings.log_console = log_console settings.log_file = log_file settings.log_level = log_level settings.log_name = log_name settings.log_filename = log_filename settings.useful_tags_node = useful_tags_node settings.useful_tags_path = useful_tags_path settings.useful_tags_node = list(set(useful_tags_node + osm_xml_node_attrs + osm_xml_node_tags)) settings.useful_tags_path = list(set(useful_tags_path + osm_xml_way_attrs + osm_xml_way_tags)) settings.osm_xml_node_attrs = osm_xml_node_attrs settings.osm_xml_node_tags = osm_xml_node_tags settings.osm_xml_way_attrs = osm_xml_way_attrs settings.osm_xml_way_tags = osm_xml_way_tags settings.default_access = default_access settings.default_crs = default_crs settings.default_user_agent = default_user_agent settings.default_referer = default_referer settings.default_accept_language = default_accept_language # if logging is turned on, log that we are configured if settings.log_file or settings.log_console: log('Configured osmnx') # depends on [control=['if'], data=[]]
def forward_for_single_feature_map( self, anchors, box_cls, box_regression): """ Arguments: anchors: list[BoxList] box_cls: tensor of size N, A * C, H, W box_regression: tensor of size N, A * 4, H, W """ device = box_cls.device N, _, H, W = box_cls.shape A = box_regression.size(1) // 4 C = box_cls.size(1) // A # put in the same format as anchors box_cls = permute_and_flatten(box_cls, N, A, C, H, W) box_cls = box_cls.sigmoid() box_regression = permute_and_flatten(box_regression, N, A, 4, H, W) box_regression = box_regression.reshape(N, -1, 4) num_anchors = A * H * W candidate_inds = box_cls > self.pre_nms_thresh pre_nms_top_n = candidate_inds.view(N, -1).sum(1) pre_nms_top_n = pre_nms_top_n.clamp(max=self.pre_nms_top_n) results = [] for per_box_cls, per_box_regression, per_pre_nms_top_n, \ per_candidate_inds, per_anchors in zip( box_cls, box_regression, pre_nms_top_n, candidate_inds, anchors): # Sort and select TopN # TODO most of this can be made out of the loop for # all images. # TODO:Yang: Not easy to do. Because the numbers of detections are # different in each image. Therefore, this part needs to be done # per image. per_box_cls = per_box_cls[per_candidate_inds] per_box_cls, top_k_indices = \ per_box_cls.topk(per_pre_nms_top_n, sorted=False) per_candidate_nonzeros = \ per_candidate_inds.nonzero()[top_k_indices, :] per_box_loc = per_candidate_nonzeros[:, 0] per_class = per_candidate_nonzeros[:, 1] per_class += 1 detections = self.box_coder.decode( per_box_regression[per_box_loc, :].view(-1, 4), per_anchors.bbox[per_box_loc, :].view(-1, 4) ) boxlist = BoxList(detections, per_anchors.size, mode="xyxy") boxlist.add_field("labels", per_class) boxlist.add_field("scores", per_box_cls) boxlist = boxlist.clip_to_image(remove_empty=False) boxlist = remove_small_boxes(boxlist, self.min_size) results.append(boxlist) return results
def function[forward_for_single_feature_map, parameter[self, anchors, box_cls, box_regression]]: constant[ Arguments: anchors: list[BoxList] box_cls: tensor of size N, A * C, H, W box_regression: tensor of size N, A * 4, H, W ] variable[device] assign[=] name[box_cls].device <ast.Tuple object at 0x7da18fe93a00> assign[=] name[box_cls].shape variable[A] assign[=] binary_operation[call[name[box_regression].size, parameter[constant[1]]] <ast.FloorDiv object at 0x7da2590d6bc0> constant[4]] variable[C] assign[=] binary_operation[call[name[box_cls].size, parameter[constant[1]]] <ast.FloorDiv object at 0x7da2590d6bc0> name[A]] variable[box_cls] assign[=] call[name[permute_and_flatten], parameter[name[box_cls], name[N], name[A], name[C], name[H], name[W]]] variable[box_cls] assign[=] call[name[box_cls].sigmoid, parameter[]] variable[box_regression] assign[=] call[name[permute_and_flatten], parameter[name[box_regression], name[N], name[A], constant[4], name[H], name[W]]] variable[box_regression] assign[=] call[name[box_regression].reshape, parameter[name[N], <ast.UnaryOp object at 0x7da20cabd180>, constant[4]]] variable[num_anchors] assign[=] binary_operation[binary_operation[name[A] * name[H]] * name[W]] variable[candidate_inds] assign[=] compare[name[box_cls] greater[>] name[self].pre_nms_thresh] variable[pre_nms_top_n] assign[=] call[call[name[candidate_inds].view, parameter[name[N], <ast.UnaryOp object at 0x7da20cabce80>]].sum, parameter[constant[1]]] variable[pre_nms_top_n] assign[=] call[name[pre_nms_top_n].clamp, parameter[]] variable[results] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da20cabeec0>, <ast.Name object at 0x7da18fe90580>, <ast.Name object at 0x7da18fe92170>, <ast.Name object at 0x7da18fe93070>, <ast.Name object at 0x7da18fe923e0>]]] in starred[call[name[zip], parameter[name[box_cls], name[box_regression], name[pre_nms_top_n], name[candidate_inds], name[anchors]]]] begin[:] variable[per_box_cls] assign[=] call[name[per_box_cls]][name[per_candidate_inds]] <ast.Tuple object at 0x7da18fe91ff0> assign[=] call[name[per_box_cls].topk, parameter[name[per_pre_nms_top_n]]] variable[per_candidate_nonzeros] assign[=] call[call[name[per_candidate_inds].nonzero, parameter[]]][tuple[[<ast.Name object at 0x7da18fe919f0>, <ast.Slice object at 0x7da18fe92980>]]] variable[per_box_loc] assign[=] call[name[per_candidate_nonzeros]][tuple[[<ast.Slice object at 0x7da1b1b013c0>, <ast.Constant object at 0x7da1b1b00580>]]] variable[per_class] assign[=] call[name[per_candidate_nonzeros]][tuple[[<ast.Slice object at 0x7da1b1b005b0>, <ast.Constant object at 0x7da1b1b00130>]]] <ast.AugAssign object at 0x7da1b1b01f90> variable[detections] assign[=] call[name[self].box_coder.decode, parameter[call[call[name[per_box_regression]][tuple[[<ast.Name object at 0x7da1b1b00d00>, <ast.Slice object at 0x7da1b1b021d0>]]].view, parameter[<ast.UnaryOp object at 0x7da1b1b02e60>, constant[4]]], call[call[name[per_anchors].bbox][tuple[[<ast.Name object at 0x7da1b1b003a0>, <ast.Slice object at 0x7da1b1b03970>]]].view, parameter[<ast.UnaryOp object at 0x7da1b1b02f20>, constant[4]]]]] variable[boxlist] assign[=] call[name[BoxList], parameter[name[detections], name[per_anchors].size]] call[name[boxlist].add_field, parameter[constant[labels], name[per_class]]] call[name[boxlist].add_field, parameter[constant[scores], name[per_box_cls]]] variable[boxlist] assign[=] call[name[boxlist].clip_to_image, parameter[]] variable[boxlist] assign[=] call[name[remove_small_boxes], parameter[name[boxlist], name[self].min_size]] call[name[results].append, parameter[name[boxlist]]] return[name[results]]
keyword[def] identifier[forward_for_single_feature_map] ( identifier[self] , identifier[anchors] , identifier[box_cls] , identifier[box_regression] ): literal[string] identifier[device] = identifier[box_cls] . identifier[device] identifier[N] , identifier[_] , identifier[H] , identifier[W] = identifier[box_cls] . identifier[shape] identifier[A] = identifier[box_regression] . identifier[size] ( literal[int] )// literal[int] identifier[C] = identifier[box_cls] . identifier[size] ( literal[int] )// identifier[A] identifier[box_cls] = identifier[permute_and_flatten] ( identifier[box_cls] , identifier[N] , identifier[A] , identifier[C] , identifier[H] , identifier[W] ) identifier[box_cls] = identifier[box_cls] . identifier[sigmoid] () identifier[box_regression] = identifier[permute_and_flatten] ( identifier[box_regression] , identifier[N] , identifier[A] , literal[int] , identifier[H] , identifier[W] ) identifier[box_regression] = identifier[box_regression] . identifier[reshape] ( identifier[N] ,- literal[int] , literal[int] ) identifier[num_anchors] = identifier[A] * identifier[H] * identifier[W] identifier[candidate_inds] = identifier[box_cls] > identifier[self] . identifier[pre_nms_thresh] identifier[pre_nms_top_n] = identifier[candidate_inds] . identifier[view] ( identifier[N] ,- literal[int] ). identifier[sum] ( literal[int] ) identifier[pre_nms_top_n] = identifier[pre_nms_top_n] . identifier[clamp] ( identifier[max] = identifier[self] . identifier[pre_nms_top_n] ) identifier[results] =[] keyword[for] identifier[per_box_cls] , identifier[per_box_regression] , identifier[per_pre_nms_top_n] , identifier[per_candidate_inds] , identifier[per_anchors] keyword[in] identifier[zip] ( identifier[box_cls] , identifier[box_regression] , identifier[pre_nms_top_n] , identifier[candidate_inds] , identifier[anchors] ): identifier[per_box_cls] = identifier[per_box_cls] [ identifier[per_candidate_inds] ] identifier[per_box_cls] , identifier[top_k_indices] = identifier[per_box_cls] . identifier[topk] ( identifier[per_pre_nms_top_n] , identifier[sorted] = keyword[False] ) identifier[per_candidate_nonzeros] = identifier[per_candidate_inds] . identifier[nonzero] ()[ identifier[top_k_indices] ,:] identifier[per_box_loc] = identifier[per_candidate_nonzeros] [:, literal[int] ] identifier[per_class] = identifier[per_candidate_nonzeros] [:, literal[int] ] identifier[per_class] += literal[int] identifier[detections] = identifier[self] . identifier[box_coder] . identifier[decode] ( identifier[per_box_regression] [ identifier[per_box_loc] ,:]. identifier[view] (- literal[int] , literal[int] ), identifier[per_anchors] . identifier[bbox] [ identifier[per_box_loc] ,:]. identifier[view] (- literal[int] , literal[int] ) ) identifier[boxlist] = identifier[BoxList] ( identifier[detections] , identifier[per_anchors] . identifier[size] , identifier[mode] = literal[string] ) identifier[boxlist] . identifier[add_field] ( literal[string] , identifier[per_class] ) identifier[boxlist] . identifier[add_field] ( literal[string] , identifier[per_box_cls] ) identifier[boxlist] = identifier[boxlist] . identifier[clip_to_image] ( identifier[remove_empty] = keyword[False] ) identifier[boxlist] = identifier[remove_small_boxes] ( identifier[boxlist] , identifier[self] . identifier[min_size] ) identifier[results] . identifier[append] ( identifier[boxlist] ) keyword[return] identifier[results]
def forward_for_single_feature_map(self, anchors, box_cls, box_regression): """ Arguments: anchors: list[BoxList] box_cls: tensor of size N, A * C, H, W box_regression: tensor of size N, A * 4, H, W """ device = box_cls.device (N, _, H, W) = box_cls.shape A = box_regression.size(1) // 4 C = box_cls.size(1) // A # put in the same format as anchors box_cls = permute_and_flatten(box_cls, N, A, C, H, W) box_cls = box_cls.sigmoid() box_regression = permute_and_flatten(box_regression, N, A, 4, H, W) box_regression = box_regression.reshape(N, -1, 4) num_anchors = A * H * W candidate_inds = box_cls > self.pre_nms_thresh pre_nms_top_n = candidate_inds.view(N, -1).sum(1) pre_nms_top_n = pre_nms_top_n.clamp(max=self.pre_nms_top_n) results = [] for (per_box_cls, per_box_regression, per_pre_nms_top_n, per_candidate_inds, per_anchors) in zip(box_cls, box_regression, pre_nms_top_n, candidate_inds, anchors): # Sort and select TopN # TODO most of this can be made out of the loop for # all images. # TODO:Yang: Not easy to do. Because the numbers of detections are # different in each image. Therefore, this part needs to be done # per image. per_box_cls = per_box_cls[per_candidate_inds] (per_box_cls, top_k_indices) = per_box_cls.topk(per_pre_nms_top_n, sorted=False) per_candidate_nonzeros = per_candidate_inds.nonzero()[top_k_indices, :] per_box_loc = per_candidate_nonzeros[:, 0] per_class = per_candidate_nonzeros[:, 1] per_class += 1 detections = self.box_coder.decode(per_box_regression[per_box_loc, :].view(-1, 4), per_anchors.bbox[per_box_loc, :].view(-1, 4)) boxlist = BoxList(detections, per_anchors.size, mode='xyxy') boxlist.add_field('labels', per_class) boxlist.add_field('scores', per_box_cls) boxlist = boxlist.clip_to_image(remove_empty=False) boxlist = remove_small_boxes(boxlist, self.min_size) results.append(boxlist) # depends on [control=['for'], data=[]] return results
def get_auto_config(self): """ Walk over all available auto_config methods, passing them the current environment and seeing if they return a configuration URL """ methods = [m for m in dir(self) if m.startswith('auto_config_')] for method_name in sorted(methods): auto_config_method = getattr(self, method_name) url = auto_config_method(self.env) if url: return url
def function[get_auto_config, parameter[self]]: constant[ Walk over all available auto_config methods, passing them the current environment and seeing if they return a configuration URL ] variable[methods] assign[=] <ast.ListComp object at 0x7da1b24fc6a0> for taget[name[method_name]] in starred[call[name[sorted], parameter[name[methods]]]] begin[:] variable[auto_config_method] assign[=] call[name[getattr], parameter[name[self], name[method_name]]] variable[url] assign[=] call[name[auto_config_method], parameter[name[self].env]] if name[url] begin[:] return[name[url]]
keyword[def] identifier[get_auto_config] ( identifier[self] ): literal[string] identifier[methods] =[ identifier[m] keyword[for] identifier[m] keyword[in] identifier[dir] ( identifier[self] ) keyword[if] identifier[m] . identifier[startswith] ( literal[string] )] keyword[for] identifier[method_name] keyword[in] identifier[sorted] ( identifier[methods] ): identifier[auto_config_method] = identifier[getattr] ( identifier[self] , identifier[method_name] ) identifier[url] = identifier[auto_config_method] ( identifier[self] . identifier[env] ) keyword[if] identifier[url] : keyword[return] identifier[url]
def get_auto_config(self): """ Walk over all available auto_config methods, passing them the current environment and seeing if they return a configuration URL """ methods = [m for m in dir(self) if m.startswith('auto_config_')] for method_name in sorted(methods): auto_config_method = getattr(self, method_name) url = auto_config_method(self.env) if url: return url # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['method_name']]
def autoconf(self): """Implements Munin Plugin Auto-Configuration Option. @return: True if plugin can be auto-configured, False otherwise. """ apcinfo = APCinfo(self._host, self._port, self._user, self._password, self._monpath, self._ssl) return apcinfo is not None
def function[autoconf, parameter[self]]: constant[Implements Munin Plugin Auto-Configuration Option. @return: True if plugin can be auto-configured, False otherwise. ] variable[apcinfo] assign[=] call[name[APCinfo], parameter[name[self]._host, name[self]._port, name[self]._user, name[self]._password, name[self]._monpath, name[self]._ssl]] return[compare[name[apcinfo] is_not constant[None]]]
keyword[def] identifier[autoconf] ( identifier[self] ): literal[string] identifier[apcinfo] = identifier[APCinfo] ( identifier[self] . identifier[_host] , identifier[self] . identifier[_port] , identifier[self] . identifier[_user] , identifier[self] . identifier[_password] , identifier[self] . identifier[_monpath] , identifier[self] . identifier[_ssl] ) keyword[return] identifier[apcinfo] keyword[is] keyword[not] keyword[None]
def autoconf(self): """Implements Munin Plugin Auto-Configuration Option. @return: True if plugin can be auto-configured, False otherwise. """ apcinfo = APCinfo(self._host, self._port, self._user, self._password, self._monpath, self._ssl) return apcinfo is not None
def _get_submissions(course_factory, submission_manager, user_manager, translations, courseid, taskid, with_input, submissionid=None): """ Helper for the GET methods of the two following classes """ try: course = course_factory.get_course(courseid) except: raise APINotFound("Course not found") if not user_manager.course_is_open_to_user(course, lti=False): raise APIForbidden("You are not registered to this course") try: task = course.get_task(taskid) except: raise APINotFound("Task not found") if submissionid is None: submissions = submission_manager.get_user_submissions(task) else: try: submissions = [submission_manager.get_submission(submissionid)] except: raise APINotFound("Submission not found") if submissions[0]["taskid"] != task.get_id() or submissions[0]["courseid"] != course.get_id(): raise APINotFound("Submission not found") output = [] for submission in submissions: submission = submission_manager.get_feedback_from_submission( submission, show_everything=user_manager.has_staff_rights_on_course(course, user_manager.session_username()), translation=translations.get(user_manager.session_language(), gettext.NullTranslations()) ) data = { "id": str(submission["_id"]), "submitted_on": str(submission["submitted_on"]), "status": submission["status"] } if with_input: data["input"] = submission_manager.get_input_from_submission(submission, True) # base64 encode file to allow JSON encoding for d in data["input"]: if isinstance(d, dict) and d.keys() == {"filename", "value"}: d["value"] = base64.b64encode(d["value"]).decode("utf8") if submission["status"] == "done": data["grade"] = submission.get("grade", 0) data["result"] = submission.get("result", "crash") data["feedback"] = submission.get("text", "") data["problems_feedback"] = submission.get("problems", {}) output.append(data) return 200, output
def function[_get_submissions, parameter[course_factory, submission_manager, user_manager, translations, courseid, taskid, with_input, submissionid]]: constant[ Helper for the GET methods of the two following classes ] <ast.Try object at 0x7da1b2345f00> if <ast.UnaryOp object at 0x7da1b2344460> begin[:] <ast.Raise object at 0x7da1b23475e0> <ast.Try object at 0x7da1b2346260> if compare[name[submissionid] is constant[None]] begin[:] variable[submissions] assign[=] call[name[submission_manager].get_user_submissions, parameter[name[task]]] variable[output] assign[=] list[[]] for taget[name[submission]] in starred[name[submissions]] begin[:] variable[submission] assign[=] call[name[submission_manager].get_feedback_from_submission, parameter[name[submission]]] variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da1b2347dc0>, <ast.Constant object at 0x7da1b2346ce0>, <ast.Constant object at 0x7da1b2347fd0>], [<ast.Call object at 0x7da1b2345b40>, <ast.Call object at 0x7da1b2346920>, <ast.Subscript object at 0x7da1b2345d50>]] if name[with_input] begin[:] call[name[data]][constant[input]] assign[=] call[name[submission_manager].get_input_from_submission, parameter[name[submission], constant[True]]] for taget[name[d]] in starred[call[name[data]][constant[input]]] begin[:] if <ast.BoolOp object at 0x7da1b23457b0> begin[:] call[name[d]][constant[value]] assign[=] call[call[name[base64].b64encode, parameter[call[name[d]][constant[value]]]].decode, parameter[constant[utf8]]] if compare[call[name[submission]][constant[status]] equal[==] constant[done]] begin[:] call[name[data]][constant[grade]] assign[=] call[name[submission].get, parameter[constant[grade], constant[0]]] call[name[data]][constant[result]] assign[=] call[name[submission].get, parameter[constant[result], constant[crash]]] call[name[data]][constant[feedback]] assign[=] call[name[submission].get, parameter[constant[text], constant[]]] call[name[data]][constant[problems_feedback]] assign[=] call[name[submission].get, parameter[constant[problems], dictionary[[], []]]] call[name[output].append, parameter[name[data]]] return[tuple[[<ast.Constant object at 0x7da1b2347df0>, <ast.Name object at 0x7da1b2347f40>]]]
keyword[def] identifier[_get_submissions] ( identifier[course_factory] , identifier[submission_manager] , identifier[user_manager] , identifier[translations] , identifier[courseid] , identifier[taskid] , identifier[with_input] , identifier[submissionid] = keyword[None] ): literal[string] keyword[try] : identifier[course] = identifier[course_factory] . identifier[get_course] ( identifier[courseid] ) keyword[except] : keyword[raise] identifier[APINotFound] ( literal[string] ) keyword[if] keyword[not] identifier[user_manager] . identifier[course_is_open_to_user] ( identifier[course] , identifier[lti] = keyword[False] ): keyword[raise] identifier[APIForbidden] ( literal[string] ) keyword[try] : identifier[task] = identifier[course] . identifier[get_task] ( identifier[taskid] ) keyword[except] : keyword[raise] identifier[APINotFound] ( literal[string] ) keyword[if] identifier[submissionid] keyword[is] keyword[None] : identifier[submissions] = identifier[submission_manager] . identifier[get_user_submissions] ( identifier[task] ) keyword[else] : keyword[try] : identifier[submissions] =[ identifier[submission_manager] . identifier[get_submission] ( identifier[submissionid] )] keyword[except] : keyword[raise] identifier[APINotFound] ( literal[string] ) keyword[if] identifier[submissions] [ literal[int] ][ literal[string] ]!= identifier[task] . identifier[get_id] () keyword[or] identifier[submissions] [ literal[int] ][ literal[string] ]!= identifier[course] . identifier[get_id] (): keyword[raise] identifier[APINotFound] ( literal[string] ) identifier[output] =[] keyword[for] identifier[submission] keyword[in] identifier[submissions] : identifier[submission] = identifier[submission_manager] . identifier[get_feedback_from_submission] ( identifier[submission] , identifier[show_everything] = identifier[user_manager] . identifier[has_staff_rights_on_course] ( identifier[course] , identifier[user_manager] . identifier[session_username] ()), identifier[translation] = identifier[translations] . identifier[get] ( identifier[user_manager] . identifier[session_language] (), identifier[gettext] . identifier[NullTranslations] ()) ) identifier[data] ={ literal[string] : identifier[str] ( identifier[submission] [ literal[string] ]), literal[string] : identifier[str] ( identifier[submission] [ literal[string] ]), literal[string] : identifier[submission] [ literal[string] ] } keyword[if] identifier[with_input] : identifier[data] [ literal[string] ]= identifier[submission_manager] . identifier[get_input_from_submission] ( identifier[submission] , keyword[True] ) keyword[for] identifier[d] keyword[in] identifier[data] [ literal[string] ]: keyword[if] identifier[isinstance] ( identifier[d] , identifier[dict] ) keyword[and] identifier[d] . identifier[keys] ()=={ literal[string] , literal[string] }: identifier[d] [ literal[string] ]= identifier[base64] . identifier[b64encode] ( identifier[d] [ literal[string] ]). identifier[decode] ( literal[string] ) keyword[if] identifier[submission] [ literal[string] ]== literal[string] : identifier[data] [ literal[string] ]= identifier[submission] . identifier[get] ( literal[string] , literal[int] ) identifier[data] [ literal[string] ]= identifier[submission] . identifier[get] ( literal[string] , literal[string] ) identifier[data] [ literal[string] ]= identifier[submission] . identifier[get] ( literal[string] , literal[string] ) identifier[data] [ literal[string] ]= identifier[submission] . identifier[get] ( literal[string] ,{}) identifier[output] . identifier[append] ( identifier[data] ) keyword[return] literal[int] , identifier[output]
def _get_submissions(course_factory, submission_manager, user_manager, translations, courseid, taskid, with_input, submissionid=None): """ Helper for the GET methods of the two following classes """ try: course = course_factory.get_course(courseid) # depends on [control=['try'], data=[]] except: raise APINotFound('Course not found') # depends on [control=['except'], data=[]] if not user_manager.course_is_open_to_user(course, lti=False): raise APIForbidden('You are not registered to this course') # depends on [control=['if'], data=[]] try: task = course.get_task(taskid) # depends on [control=['try'], data=[]] except: raise APINotFound('Task not found') # depends on [control=['except'], data=[]] if submissionid is None: submissions = submission_manager.get_user_submissions(task) # depends on [control=['if'], data=[]] else: try: submissions = [submission_manager.get_submission(submissionid)] # depends on [control=['try'], data=[]] except: raise APINotFound('Submission not found') # depends on [control=['except'], data=[]] if submissions[0]['taskid'] != task.get_id() or submissions[0]['courseid'] != course.get_id(): raise APINotFound('Submission not found') # depends on [control=['if'], data=[]] output = [] for submission in submissions: submission = submission_manager.get_feedback_from_submission(submission, show_everything=user_manager.has_staff_rights_on_course(course, user_manager.session_username()), translation=translations.get(user_manager.session_language(), gettext.NullTranslations())) data = {'id': str(submission['_id']), 'submitted_on': str(submission['submitted_on']), 'status': submission['status']} if with_input: data['input'] = submission_manager.get_input_from_submission(submission, True) # base64 encode file to allow JSON encoding for d in data['input']: if isinstance(d, dict) and d.keys() == {'filename', 'value'}: d['value'] = base64.b64encode(d['value']).decode('utf8') # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['d']] # depends on [control=['if'], data=[]] if submission['status'] == 'done': data['grade'] = submission.get('grade', 0) data['result'] = submission.get('result', 'crash') data['feedback'] = submission.get('text', '') data['problems_feedback'] = submission.get('problems', {}) # depends on [control=['if'], data=[]] output.append(data) # depends on [control=['for'], data=['submission']] return (200, output)
def _ExtractYahooSearchQuery(self, url): """Extracts a search query from a Yahoo search URL. Examples: https://search.yahoo.com/search?p=query https://search.yahoo.com/search;?p=query Args: url (str): URL. Returns: str: search query or None if no query was found. """ if 'p=' not in url: return None _, _, line = url.partition('p=') before_and, _, _ = line.partition('&') if not before_and: return None yahoo_search_url = before_and.split()[0] return yahoo_search_url.replace('+', ' ')
def function[_ExtractYahooSearchQuery, parameter[self, url]]: constant[Extracts a search query from a Yahoo search URL. Examples: https://search.yahoo.com/search?p=query https://search.yahoo.com/search;?p=query Args: url (str): URL. Returns: str: search query or None if no query was found. ] if compare[constant[p=] <ast.NotIn object at 0x7da2590d7190> name[url]] begin[:] return[constant[None]] <ast.Tuple object at 0x7da18bc73bb0> assign[=] call[name[url].partition, parameter[constant[p=]]] <ast.Tuple object at 0x7da20e9547c0> assign[=] call[name[line].partition, parameter[constant[&]]] if <ast.UnaryOp object at 0x7da20e954790> begin[:] return[constant[None]] variable[yahoo_search_url] assign[=] call[call[name[before_and].split, parameter[]]][constant[0]] return[call[name[yahoo_search_url].replace, parameter[constant[+], constant[ ]]]]
keyword[def] identifier[_ExtractYahooSearchQuery] ( identifier[self] , identifier[url] ): literal[string] keyword[if] literal[string] keyword[not] keyword[in] identifier[url] : keyword[return] keyword[None] identifier[_] , identifier[_] , identifier[line] = identifier[url] . identifier[partition] ( literal[string] ) identifier[before_and] , identifier[_] , identifier[_] = identifier[line] . identifier[partition] ( literal[string] ) keyword[if] keyword[not] identifier[before_and] : keyword[return] keyword[None] identifier[yahoo_search_url] = identifier[before_and] . identifier[split] ()[ literal[int] ] keyword[return] identifier[yahoo_search_url] . identifier[replace] ( literal[string] , literal[string] )
def _ExtractYahooSearchQuery(self, url): """Extracts a search query from a Yahoo search URL. Examples: https://search.yahoo.com/search?p=query https://search.yahoo.com/search;?p=query Args: url (str): URL. Returns: str: search query or None if no query was found. """ if 'p=' not in url: return None # depends on [control=['if'], data=[]] (_, _, line) = url.partition('p=') (before_and, _, _) = line.partition('&') if not before_and: return None # depends on [control=['if'], data=[]] yahoo_search_url = before_and.split()[0] return yahoo_search_url.replace('+', ' ')
def degrees(x): """ Convert radians to degrees """ if isinstance(x, UncertainFunction): mcpts = np.degrees(x._mcpts) return UncertainFunction(mcpts) else: return np.degrees(x)
def function[degrees, parameter[x]]: constant[ Convert radians to degrees ] if call[name[isinstance], parameter[name[x], name[UncertainFunction]]] begin[:] variable[mcpts] assign[=] call[name[np].degrees, parameter[name[x]._mcpts]] return[call[name[UncertainFunction], parameter[name[mcpts]]]]
keyword[def] identifier[degrees] ( identifier[x] ): literal[string] keyword[if] identifier[isinstance] ( identifier[x] , identifier[UncertainFunction] ): identifier[mcpts] = identifier[np] . identifier[degrees] ( identifier[x] . identifier[_mcpts] ) keyword[return] identifier[UncertainFunction] ( identifier[mcpts] ) keyword[else] : keyword[return] identifier[np] . identifier[degrees] ( identifier[x] )
def degrees(x): """ Convert radians to degrees """ if isinstance(x, UncertainFunction): mcpts = np.degrees(x._mcpts) return UncertainFunction(mcpts) # depends on [control=['if'], data=[]] else: return np.degrees(x)
def filter_osm_file(): """ Downloads (and compiles) osmfilter tool from web and calls that osmfilter to only filter out only the road elements. """ print_info('Filtering OSM file...') start_time = time.time() if check_osmfilter(): # params = '--keep="highway=motorway =motorway_link =trunk =trunk_link =primary =primary_link =secondary' \ # ' =secondary_link =tertiary =tertiary_link =unclassified =unclassified_link =residential =residential_link' \ # ' =living_street" --drop="access=no"' params = config.osm_filter_params command = './osmfilter' if platform.system() == 'Linux' else 'osmfilter.exe' if platform.system() == 'Linux': filter_command = '%s "%s" %s | pv > "%s"' % (command, config.osm_map_filename, params, config.filtered_osm_filename) else: filter_command = '%s "%s" %s > "%s"' % ( command, config.osm_map_filename, params, config.filtered_osm_filename) os.system(filter_command) else: print_info('Osmfilter not available. Exiting.') exit(1) print_info('Filtering finished. (%.2f secs)' % (time.time() - start_time))
def function[filter_osm_file, parameter[]]: constant[ Downloads (and compiles) osmfilter tool from web and calls that osmfilter to only filter out only the road elements. ] call[name[print_info], parameter[constant[Filtering OSM file...]]] variable[start_time] assign[=] call[name[time].time, parameter[]] if call[name[check_osmfilter], parameter[]] begin[:] variable[params] assign[=] name[config].osm_filter_params variable[command] assign[=] <ast.IfExp object at 0x7da20c6abcd0> if compare[call[name[platform].system, parameter[]] equal[==] constant[Linux]] begin[:] variable[filter_command] assign[=] binary_operation[constant[%s "%s" %s | pv > "%s"] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da20c6a8220>, <ast.Attribute object at 0x7da20c6aaf20>, <ast.Name object at 0x7da20c6a9660>, <ast.Attribute object at 0x7da20c6a8730>]]] call[name[os].system, parameter[name[filter_command]]] call[name[print_info], parameter[binary_operation[constant[Filtering finished. (%.2f secs)] <ast.Mod object at 0x7da2590d6920> binary_operation[call[name[time].time, parameter[]] - name[start_time]]]]]
keyword[def] identifier[filter_osm_file] (): literal[string] identifier[print_info] ( literal[string] ) identifier[start_time] = identifier[time] . identifier[time] () keyword[if] identifier[check_osmfilter] (): identifier[params] = identifier[config] . identifier[osm_filter_params] identifier[command] = literal[string] keyword[if] identifier[platform] . identifier[system] ()== literal[string] keyword[else] literal[string] keyword[if] identifier[platform] . identifier[system] ()== literal[string] : identifier[filter_command] = literal[string] %( identifier[command] , identifier[config] . identifier[osm_map_filename] , identifier[params] , identifier[config] . identifier[filtered_osm_filename] ) keyword[else] : identifier[filter_command] = literal[string] %( identifier[command] , identifier[config] . identifier[osm_map_filename] , identifier[params] , identifier[config] . identifier[filtered_osm_filename] ) identifier[os] . identifier[system] ( identifier[filter_command] ) keyword[else] : identifier[print_info] ( literal[string] ) identifier[exit] ( literal[int] ) identifier[print_info] ( literal[string] %( identifier[time] . identifier[time] ()- identifier[start_time] ))
def filter_osm_file(): """ Downloads (and compiles) osmfilter tool from web and calls that osmfilter to only filter out only the road elements. """ print_info('Filtering OSM file...') start_time = time.time() if check_osmfilter(): # params = '--keep="highway=motorway =motorway_link =trunk =trunk_link =primary =primary_link =secondary' \ # ' =secondary_link =tertiary =tertiary_link =unclassified =unclassified_link =residential =residential_link' \ # ' =living_street" --drop="access=no"' params = config.osm_filter_params command = './osmfilter' if platform.system() == 'Linux' else 'osmfilter.exe' if platform.system() == 'Linux': filter_command = '%s "%s" %s | pv > "%s"' % (command, config.osm_map_filename, params, config.filtered_osm_filename) # depends on [control=['if'], data=[]] else: filter_command = '%s "%s" %s > "%s"' % (command, config.osm_map_filename, params, config.filtered_osm_filename) os.system(filter_command) # depends on [control=['if'], data=[]] else: print_info('Osmfilter not available. Exiting.') exit(1) print_info('Filtering finished. (%.2f secs)' % (time.time() - start_time))
def iterrepos(self): """ A generator function that yields a (repo, [items]) tuple for each non-empty repo. """ for repo, items in self.repo_items_hash.iteritems(): if items: yield (repo, items)
def function[iterrepos, parameter[self]]: constant[ A generator function that yields a (repo, [items]) tuple for each non-empty repo. ] for taget[tuple[[<ast.Name object at 0x7da1b254d0c0>, <ast.Name object at 0x7da1b254c640>]]] in starred[call[name[self].repo_items_hash.iteritems, parameter[]]] begin[:] if name[items] begin[:] <ast.Yield object at 0x7da1b254d540>
keyword[def] identifier[iterrepos] ( identifier[self] ): literal[string] keyword[for] identifier[repo] , identifier[items] keyword[in] identifier[self] . identifier[repo_items_hash] . identifier[iteritems] (): keyword[if] identifier[items] : keyword[yield] ( identifier[repo] , identifier[items] )
def iterrepos(self): """ A generator function that yields a (repo, [items]) tuple for each non-empty repo. """ for (repo, items) in self.repo_items_hash.iteritems(): if items: yield (repo, items) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
def check(self, value, namespace): """ See whether the TypeVar is bound for the first time or is met with _exactly_ the same type as previously. That type must also obey the TypeVar's bound, if any. Everything else is a type error. """ return namespace.is_compatible(self.typevar, type(value))
def function[check, parameter[self, value, namespace]]: constant[ See whether the TypeVar is bound for the first time or is met with _exactly_ the same type as previously. That type must also obey the TypeVar's bound, if any. Everything else is a type error. ] return[call[name[namespace].is_compatible, parameter[name[self].typevar, call[name[type], parameter[name[value]]]]]]
keyword[def] identifier[check] ( identifier[self] , identifier[value] , identifier[namespace] ): literal[string] keyword[return] identifier[namespace] . identifier[is_compatible] ( identifier[self] . identifier[typevar] , identifier[type] ( identifier[value] ))
def check(self, value, namespace): """ See whether the TypeVar is bound for the first time or is met with _exactly_ the same type as previously. That type must also obey the TypeVar's bound, if any. Everything else is a type error. """ return namespace.is_compatible(self.typevar, type(value))
def run_supy( df_forcing: pandas.DataFrame, df_state_init: pandas.DataFrame, save_state=False, n_yr=10, )->Tuple[pandas.DataFrame, pandas.DataFrame]: '''Perform supy simulation. Parameters ---------- df_forcing : pandas.DataFrame forcing data for all grids in `df_state_init`. df_state_init : pandas.DataFrame initial model states; or a collection of model states with multiple timestamps, whose last temporal record will be used as the initial model states. save_state : bool, optional flag for saving model states at each time step, which can be useful in diagnosing model runtime performance or performing a restart run. (the default is False, which instructs supy not to save runtime model states). n_yr : int, optional chunk size (`n_yr` years) to split simulation periods so memory usage can be reduced. (the default is 10, which implies 10-year forcing chunks used in simulations). Returns ------- df_output, df_state_final : Tuple[pandas.DataFrame, pandas.DataFrame] - df_output: `output results <df_output_var>` - df_state_final: `final model states <df_state_var>` Examples -------- >>> df_output, df_state_final = supy.run_supy(df_forcing, df_state_init) ''' # save df_init without changing its original data # df.copy() in pandas does work as a standard python deepcopy df_init = df_state_init.copy() # retrieve the last temporal record as `df_init` # if a `datetime` level existing in the index if df_init.index.nlevels > 1: idx_dt = df_init.index.get_level_values('datetime').unique() dt_last = idx_dt.max() df_init = df_init.loc[dt_last] # add placeholder variables for df_forcing # `metforcingdata_grid` and `ts5mindata_ir` are used by AnOHM and ESTM, respectively # they are now temporarily disabled in supy df_forcing = df_forcing\ .assign( metforcingdata_grid=0, ts5mindata_ir=0, )\ .rename( # remanae is a workaround to resolve naming inconsistency between # suews fortran code interface and input forcing file hearders columns={ '%' + 'iy': 'iy', 'id': 'id', 'it': 'it', 'imin': 'imin', 'qn': 'qn1_obs', 'qh': 'qh_obs', 'qe': 'qe', 'qs': 'qs_obs', 'qf': 'qf_obs', 'U': 'avu1', 'RH': 'avrh', 'Tair': 'temp_c', 'pres': 'press_hpa', 'rain': 'precip', 'kdown': 'avkdn', 'snow': 'snow_obs', 'ldown': 'ldown_obs', 'fcld': 'fcld_obs', 'Wuh': 'wu_m3', 'xsmd': 'xsmd', 'lai': 'lai_obs', 'kdiff': 'kdiff', 'kdir': 'kdir', 'wdir': 'wdir', } ) # grid list determined by initial states list_grid = df_init.index # initialise dicts for holding results and model states dict_state = {} dict_output = {} # initial and final tsteps retrieved from forcing data tstep_init = df_forcing.index[0] tstep_final = df_forcing.index[-1] # tstep size retrieved from forcing data freq = df_forcing.index.freq # dict_state is used to save model states for later use dict_state = { # (t_start, grid): series_state_init.to_dict() (tstep_init, grid): pack_grid_dict(series_state_init) for grid, series_state_init in df_init.iterrows() } # remove 'problems.txt' if Path('problems.txt').exists(): os.remove('problems.txt') if save_state: # use slower more functional single step wrapper # convert df to dict with `itertuples` for better performance dict_forcing = {row.Index: row._asdict() for row in df_forcing.itertuples()} for tstep in df_forcing.index: # temporal loop # initialise output of tstep: # load met_forcing if the same across all grids: met_forcing_tstep = dict_forcing[tstep] # spatial loop for grid in list_grid: dict_state_start = dict_state[(tstep, grid)] # calculation at one step: # series_state_end, series_output_tstep = suews_cal_tstep_df( # series_state_start, met_forcing_tstep) dict_state_end, dict_output_tstep = suews_cal_tstep( dict_state_start, met_forcing_tstep) # update output & model state at tstep for the current grid dict_output.update({(tstep, grid): dict_output_tstep}) dict_state.update({(tstep + 1*freq, grid): dict_state_end}) # pack results as easier DataFrames df_output = pack_df_output(dict_output).swaplevel(0, 1) # drop unnecessary 'datetime' as it is already included in the index df_output = df_output.drop(columns=['datetime'], level=0) df_state_final = pack_df_state(dict_state).swaplevel(0, 1) else: # for multi-year run, reduce the whole df_forcing into {n_yr}-year chunks for less memory consumption grp_forcing_yr = df_forcing.groupby(df_forcing.index.year // n_yr) if len(grp_forcing_yr) > 1: df_state_init_yr = df_state_init.copy() list_df_output = [] list_df_state = [] for grp in grp_forcing_yr.groups: # get forcing of a specific year df_forcing_yr = grp_forcing_yr.get_group(grp) # run supy: actual execution done in the `else` clause below df_output_yr, df_state_final_yr = run_supy( df_forcing_yr, df_state_init_yr) df_state_init_yr = df_state_final_yr.copy() # collect results list_df_output.append(df_output_yr) list_df_state.append(df_state_final_yr) # re-organise results of each year df_output = pd.concat(list_df_output).sort_index() df_state_final = pd.concat( list_df_state).sort_index().drop_duplicates() return df_output, df_state_final else: # for single-chunk run (1 chunk = {n_yr} years), directly put df_forcing into supy_driver for calculation # use higher level wrapper that calculate at a `block` level # for better performance # for grid in list_grid: # dict_state_start_grid = dict_state[(tstep_init, grid)] # dict_state_end, dict_output_array = suews_cal_tstep_multi( # dict_state_start_grid, # df_forcing) # # update output & model state at tstep for the current grid # dict_output.update({grid: dict_output_array}) # # model state for the next run # dict_state.update({(tstep_final + freq, grid): dict_state_end}) # # parallel run of grid_list for better efficiency # if os.name == 'nt': # if __name__ == '__main__': # p = Pool(min([len(list_grid), cpu_count()])) # else: # p = Pool(min([len(list_grid), cpu_count()])) # # construct input list for `Pool.starmap` # construct input list for `dask.bag` list_input = [ # (dict_state[(tstep_init, grid)], df_forcing) dict_state[(tstep_init, grid)] for grid in list_grid ] # on windows `processes` has issues when importing # so set `threads` here method_parallel = 'threads' if os.name == 'nt' else 'processes' list_res = db.from_sequence(list_input)\ .map(suews_cal_tstep_multi, df_forcing)\ .compute(scheduler=method_parallel) list_state_end, list_output_array = zip(*list_res) # collect output arrays dict_output = { grid: dict_output_array for grid, dict_output_array in zip(list_grid, list_output_array) } # collect final states dict_state_final_tstep = { (tstep_final + freq, grid): dict_state_end for grid, dict_state_end in zip(list_grid, list_state_end) } dict_state.update(dict_state_final_tstep) # save results as time-aware DataFrame df_output0 = pack_df_output_array(dict_output, df_forcing) df_output = df_output0.replace(-999., np.nan) df_state_final = pack_df_state(dict_state).swaplevel(0, 1) # drop ESTM for now as it is not supported yet # select only those supported output groups df_output = df_output.loc[:, ['SUEWS', 'snow', 'DailyState']] # trim multiindex based columns df_output.columns = df_output.columns.remove_unused_levels() # pack final model states into a proper dataframe df_state_final = pack_df_state_final(df_state_final, df_init) return df_output, df_state_final
def function[run_supy, parameter[df_forcing, df_state_init, save_state, n_yr]]: constant[Perform supy simulation. Parameters ---------- df_forcing : pandas.DataFrame forcing data for all grids in `df_state_init`. df_state_init : pandas.DataFrame initial model states; or a collection of model states with multiple timestamps, whose last temporal record will be used as the initial model states. save_state : bool, optional flag for saving model states at each time step, which can be useful in diagnosing model runtime performance or performing a restart run. (the default is False, which instructs supy not to save runtime model states). n_yr : int, optional chunk size (`n_yr` years) to split simulation periods so memory usage can be reduced. (the default is 10, which implies 10-year forcing chunks used in simulations). Returns ------- df_output, df_state_final : Tuple[pandas.DataFrame, pandas.DataFrame] - df_output: `output results <df_output_var>` - df_state_final: `final model states <df_state_var>` Examples -------- >>> df_output, df_state_final = supy.run_supy(df_forcing, df_state_init) ] variable[df_init] assign[=] call[name[df_state_init].copy, parameter[]] if compare[name[df_init].index.nlevels greater[>] constant[1]] begin[:] variable[idx_dt] assign[=] call[call[name[df_init].index.get_level_values, parameter[constant[datetime]]].unique, parameter[]] variable[dt_last] assign[=] call[name[idx_dt].max, parameter[]] variable[df_init] assign[=] call[name[df_init].loc][name[dt_last]] variable[df_forcing] assign[=] call[call[name[df_forcing].assign, parameter[]].rename, parameter[]] variable[list_grid] assign[=] name[df_init].index variable[dict_state] assign[=] dictionary[[], []] variable[dict_output] assign[=] dictionary[[], []] variable[tstep_init] assign[=] call[name[df_forcing].index][constant[0]] variable[tstep_final] assign[=] call[name[df_forcing].index][<ast.UnaryOp object at 0x7da1b0de51b0>] variable[freq] assign[=] name[df_forcing].index.freq variable[dict_state] assign[=] <ast.DictComp object at 0x7da1b0de4700> if call[call[name[Path], parameter[constant[problems.txt]]].exists, parameter[]] begin[:] call[name[os].remove, parameter[constant[problems.txt]]] if name[save_state] begin[:] variable[dict_forcing] assign[=] <ast.DictComp object at 0x7da1b0de6aa0> for taget[name[tstep]] in starred[name[df_forcing].index] begin[:] variable[met_forcing_tstep] assign[=] call[name[dict_forcing]][name[tstep]] for taget[name[grid]] in starred[name[list_grid]] begin[:] variable[dict_state_start] assign[=] call[name[dict_state]][tuple[[<ast.Name object at 0x7da1b0de42b0>, <ast.Name object at 0x7da1b0de7d60>]]] <ast.Tuple object at 0x7da1b0de5a80> assign[=] call[name[suews_cal_tstep], parameter[name[dict_state_start], name[met_forcing_tstep]]] call[name[dict_output].update, parameter[dictionary[[<ast.Tuple object at 0x7da1b0de7310>], [<ast.Name object at 0x7da1b0de4bb0>]]]] call[name[dict_state].update, parameter[dictionary[[<ast.Tuple object at 0x7da1b0de7220>], [<ast.Name object at 0x7da1b0de5c90>]]]] variable[df_output] assign[=] call[call[name[pack_df_output], parameter[name[dict_output]]].swaplevel, parameter[constant[0], constant[1]]] variable[df_output] assign[=] call[name[df_output].drop, parameter[]] variable[df_state_final] assign[=] call[call[name[pack_df_state], parameter[name[dict_state]]].swaplevel, parameter[constant[0], constant[1]]] variable[df_output] assign[=] call[name[df_output].loc][tuple[[<ast.Slice object at 0x7da1b0d62260>, <ast.List object at 0x7da1b0d62ec0>]]] name[df_output].columns assign[=] call[name[df_output].columns.remove_unused_levels, parameter[]] variable[df_state_final] assign[=] call[name[pack_df_state_final], parameter[name[df_state_final], name[df_init]]] return[tuple[[<ast.Name object at 0x7da1b0d61270>, <ast.Name object at 0x7da1b0d607f0>]]]
keyword[def] identifier[run_supy] ( identifier[df_forcing] : identifier[pandas] . identifier[DataFrame] , identifier[df_state_init] : identifier[pandas] . identifier[DataFrame] , identifier[save_state] = keyword[False] , identifier[n_yr] = literal[int] , )-> identifier[Tuple] [ identifier[pandas] . identifier[DataFrame] , identifier[pandas] . identifier[DataFrame] ]: literal[string] identifier[df_init] = identifier[df_state_init] . identifier[copy] () keyword[if] identifier[df_init] . identifier[index] . identifier[nlevels] > literal[int] : identifier[idx_dt] = identifier[df_init] . identifier[index] . identifier[get_level_values] ( literal[string] ). identifier[unique] () identifier[dt_last] = identifier[idx_dt] . identifier[max] () identifier[df_init] = identifier[df_init] . identifier[loc] [ identifier[dt_last] ] identifier[df_forcing] = identifier[df_forcing] . identifier[assign] ( identifier[metforcingdata_grid] = literal[int] , identifier[ts5mindata_ir] = literal[int] , ). identifier[rename] ( identifier[columns] ={ literal[string] + literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , } ) identifier[list_grid] = identifier[df_init] . identifier[index] identifier[dict_state] ={} identifier[dict_output] ={} identifier[tstep_init] = identifier[df_forcing] . identifier[index] [ literal[int] ] identifier[tstep_final] = identifier[df_forcing] . identifier[index] [- literal[int] ] identifier[freq] = identifier[df_forcing] . identifier[index] . identifier[freq] identifier[dict_state] ={ ( identifier[tstep_init] , identifier[grid] ): identifier[pack_grid_dict] ( identifier[series_state_init] ) keyword[for] identifier[grid] , identifier[series_state_init] keyword[in] identifier[df_init] . identifier[iterrows] () } keyword[if] identifier[Path] ( literal[string] ). identifier[exists] (): identifier[os] . identifier[remove] ( literal[string] ) keyword[if] identifier[save_state] : identifier[dict_forcing] ={ identifier[row] . identifier[Index] : identifier[row] . identifier[_asdict] () keyword[for] identifier[row] keyword[in] identifier[df_forcing] . identifier[itertuples] ()} keyword[for] identifier[tstep] keyword[in] identifier[df_forcing] . identifier[index] : identifier[met_forcing_tstep] = identifier[dict_forcing] [ identifier[tstep] ] keyword[for] identifier[grid] keyword[in] identifier[list_grid] : identifier[dict_state_start] = identifier[dict_state] [( identifier[tstep] , identifier[grid] )] identifier[dict_state_end] , identifier[dict_output_tstep] = identifier[suews_cal_tstep] ( identifier[dict_state_start] , identifier[met_forcing_tstep] ) identifier[dict_output] . identifier[update] ({( identifier[tstep] , identifier[grid] ): identifier[dict_output_tstep] }) identifier[dict_state] . identifier[update] ({( identifier[tstep] + literal[int] * identifier[freq] , identifier[grid] ): identifier[dict_state_end] }) identifier[df_output] = identifier[pack_df_output] ( identifier[dict_output] ). identifier[swaplevel] ( literal[int] , literal[int] ) identifier[df_output] = identifier[df_output] . identifier[drop] ( identifier[columns] =[ literal[string] ], identifier[level] = literal[int] ) identifier[df_state_final] = identifier[pack_df_state] ( identifier[dict_state] ). identifier[swaplevel] ( literal[int] , literal[int] ) keyword[else] : identifier[grp_forcing_yr] = identifier[df_forcing] . identifier[groupby] ( identifier[df_forcing] . identifier[index] . identifier[year] // identifier[n_yr] ) keyword[if] identifier[len] ( identifier[grp_forcing_yr] )> literal[int] : identifier[df_state_init_yr] = identifier[df_state_init] . identifier[copy] () identifier[list_df_output] =[] identifier[list_df_state] =[] keyword[for] identifier[grp] keyword[in] identifier[grp_forcing_yr] . identifier[groups] : identifier[df_forcing_yr] = identifier[grp_forcing_yr] . identifier[get_group] ( identifier[grp] ) identifier[df_output_yr] , identifier[df_state_final_yr] = identifier[run_supy] ( identifier[df_forcing_yr] , identifier[df_state_init_yr] ) identifier[df_state_init_yr] = identifier[df_state_final_yr] . identifier[copy] () identifier[list_df_output] . identifier[append] ( identifier[df_output_yr] ) identifier[list_df_state] . identifier[append] ( identifier[df_state_final_yr] ) identifier[df_output] = identifier[pd] . identifier[concat] ( identifier[list_df_output] ). identifier[sort_index] () identifier[df_state_final] = identifier[pd] . identifier[concat] ( identifier[list_df_state] ). identifier[sort_index] (). identifier[drop_duplicates] () keyword[return] identifier[df_output] , identifier[df_state_final] keyword[else] : identifier[list_input] =[ identifier[dict_state] [( identifier[tstep_init] , identifier[grid] )] keyword[for] identifier[grid] keyword[in] identifier[list_grid] ] identifier[method_parallel] = literal[string] keyword[if] identifier[os] . identifier[name] == literal[string] keyword[else] literal[string] identifier[list_res] = identifier[db] . identifier[from_sequence] ( identifier[list_input] ). identifier[map] ( identifier[suews_cal_tstep_multi] , identifier[df_forcing] ). identifier[compute] ( identifier[scheduler] = identifier[method_parallel] ) identifier[list_state_end] , identifier[list_output_array] = identifier[zip] (* identifier[list_res] ) identifier[dict_output] ={ identifier[grid] : identifier[dict_output_array] keyword[for] identifier[grid] , identifier[dict_output_array] keyword[in] identifier[zip] ( identifier[list_grid] , identifier[list_output_array] ) } identifier[dict_state_final_tstep] ={ ( identifier[tstep_final] + identifier[freq] , identifier[grid] ): identifier[dict_state_end] keyword[for] identifier[grid] , identifier[dict_state_end] keyword[in] identifier[zip] ( identifier[list_grid] , identifier[list_state_end] ) } identifier[dict_state] . identifier[update] ( identifier[dict_state_final_tstep] ) identifier[df_output0] = identifier[pack_df_output_array] ( identifier[dict_output] , identifier[df_forcing] ) identifier[df_output] = identifier[df_output0] . identifier[replace] (- literal[int] , identifier[np] . identifier[nan] ) identifier[df_state_final] = identifier[pack_df_state] ( identifier[dict_state] ). identifier[swaplevel] ( literal[int] , literal[int] ) identifier[df_output] = identifier[df_output] . identifier[loc] [:,[ literal[string] , literal[string] , literal[string] ]] identifier[df_output] . identifier[columns] = identifier[df_output] . identifier[columns] . identifier[remove_unused_levels] () identifier[df_state_final] = identifier[pack_df_state_final] ( identifier[df_state_final] , identifier[df_init] ) keyword[return] identifier[df_output] , identifier[df_state_final]
def run_supy(df_forcing: pandas.DataFrame, df_state_init: pandas.DataFrame, save_state=False, n_yr=10) -> Tuple[pandas.DataFrame, pandas.DataFrame]: """Perform supy simulation. Parameters ---------- df_forcing : pandas.DataFrame forcing data for all grids in `df_state_init`. df_state_init : pandas.DataFrame initial model states; or a collection of model states with multiple timestamps, whose last temporal record will be used as the initial model states. save_state : bool, optional flag for saving model states at each time step, which can be useful in diagnosing model runtime performance or performing a restart run. (the default is False, which instructs supy not to save runtime model states). n_yr : int, optional chunk size (`n_yr` years) to split simulation periods so memory usage can be reduced. (the default is 10, which implies 10-year forcing chunks used in simulations). Returns ------- df_output, df_state_final : Tuple[pandas.DataFrame, pandas.DataFrame] - df_output: `output results <df_output_var>` - df_state_final: `final model states <df_state_var>` Examples -------- >>> df_output, df_state_final = supy.run_supy(df_forcing, df_state_init) """ # save df_init without changing its original data # df.copy() in pandas does work as a standard python deepcopy df_init = df_state_init.copy() # retrieve the last temporal record as `df_init` # if a `datetime` level existing in the index if df_init.index.nlevels > 1: idx_dt = df_init.index.get_level_values('datetime').unique() dt_last = idx_dt.max() df_init = df_init.loc[dt_last] # depends on [control=['if'], data=[]] # add placeholder variables for df_forcing # `metforcingdata_grid` and `ts5mindata_ir` are used by AnOHM and ESTM, respectively # they are now temporarily disabled in supy # remanae is a workaround to resolve naming inconsistency between # suews fortran code interface and input forcing file hearders df_forcing = df_forcing.assign(metforcingdata_grid=0, ts5mindata_ir=0).rename(columns={'%' + 'iy': 'iy', 'id': 'id', 'it': 'it', 'imin': 'imin', 'qn': 'qn1_obs', 'qh': 'qh_obs', 'qe': 'qe', 'qs': 'qs_obs', 'qf': 'qf_obs', 'U': 'avu1', 'RH': 'avrh', 'Tair': 'temp_c', 'pres': 'press_hpa', 'rain': 'precip', 'kdown': 'avkdn', 'snow': 'snow_obs', 'ldown': 'ldown_obs', 'fcld': 'fcld_obs', 'Wuh': 'wu_m3', 'xsmd': 'xsmd', 'lai': 'lai_obs', 'kdiff': 'kdiff', 'kdir': 'kdir', 'wdir': 'wdir'}) # grid list determined by initial states list_grid = df_init.index # initialise dicts for holding results and model states dict_state = {} dict_output = {} # initial and final tsteps retrieved from forcing data tstep_init = df_forcing.index[0] tstep_final = df_forcing.index[-1] # tstep size retrieved from forcing data freq = df_forcing.index.freq # dict_state is used to save model states for later use # (t_start, grid): series_state_init.to_dict() dict_state = {(tstep_init, grid): pack_grid_dict(series_state_init) for (grid, series_state_init) in df_init.iterrows()} # remove 'problems.txt' if Path('problems.txt').exists(): os.remove('problems.txt') # depends on [control=['if'], data=[]] if save_state: # use slower more functional single step wrapper # convert df to dict with `itertuples` for better performance dict_forcing = {row.Index: row._asdict() for row in df_forcing.itertuples()} for tstep in df_forcing.index: # temporal loop # initialise output of tstep: # load met_forcing if the same across all grids: met_forcing_tstep = dict_forcing[tstep] # spatial loop for grid in list_grid: dict_state_start = dict_state[tstep, grid] # calculation at one step: # series_state_end, series_output_tstep = suews_cal_tstep_df( # series_state_start, met_forcing_tstep) (dict_state_end, dict_output_tstep) = suews_cal_tstep(dict_state_start, met_forcing_tstep) # update output & model state at tstep for the current grid dict_output.update({(tstep, grid): dict_output_tstep}) dict_state.update({(tstep + 1 * freq, grid): dict_state_end}) # depends on [control=['for'], data=['grid']] # depends on [control=['for'], data=['tstep']] # pack results as easier DataFrames df_output = pack_df_output(dict_output).swaplevel(0, 1) # drop unnecessary 'datetime' as it is already included in the index df_output = df_output.drop(columns=['datetime'], level=0) df_state_final = pack_df_state(dict_state).swaplevel(0, 1) # depends on [control=['if'], data=[]] else: # for multi-year run, reduce the whole df_forcing into {n_yr}-year chunks for less memory consumption grp_forcing_yr = df_forcing.groupby(df_forcing.index.year // n_yr) if len(grp_forcing_yr) > 1: df_state_init_yr = df_state_init.copy() list_df_output = [] list_df_state = [] for grp in grp_forcing_yr.groups: # get forcing of a specific year df_forcing_yr = grp_forcing_yr.get_group(grp) # run supy: actual execution done in the `else` clause below (df_output_yr, df_state_final_yr) = run_supy(df_forcing_yr, df_state_init_yr) df_state_init_yr = df_state_final_yr.copy() # collect results list_df_output.append(df_output_yr) list_df_state.append(df_state_final_yr) # depends on [control=['for'], data=['grp']] # re-organise results of each year df_output = pd.concat(list_df_output).sort_index() df_state_final = pd.concat(list_df_state).sort_index().drop_duplicates() return (df_output, df_state_final) # depends on [control=['if'], data=[]] else: # for single-chunk run (1 chunk = {n_yr} years), directly put df_forcing into supy_driver for calculation # use higher level wrapper that calculate at a `block` level # for better performance # for grid in list_grid: # dict_state_start_grid = dict_state[(tstep_init, grid)] # dict_state_end, dict_output_array = suews_cal_tstep_multi( # dict_state_start_grid, # df_forcing) # # update output & model state at tstep for the current grid # dict_output.update({grid: dict_output_array}) # # model state for the next run # dict_state.update({(tstep_final + freq, grid): dict_state_end}) # # parallel run of grid_list for better efficiency # if os.name == 'nt': # if __name__ == '__main__': # p = Pool(min([len(list_grid), cpu_count()])) # else: # p = Pool(min([len(list_grid), cpu_count()])) # # construct input list for `Pool.starmap` # construct input list for `dask.bag` # (dict_state[(tstep_init, grid)], df_forcing) list_input = [dict_state[tstep_init, grid] for grid in list_grid] # on windows `processes` has issues when importing # so set `threads` here method_parallel = 'threads' if os.name == 'nt' else 'processes' list_res = db.from_sequence(list_input).map(suews_cal_tstep_multi, df_forcing).compute(scheduler=method_parallel) (list_state_end, list_output_array) = zip(*list_res) # collect output arrays dict_output = {grid: dict_output_array for (grid, dict_output_array) in zip(list_grid, list_output_array)} # collect final states dict_state_final_tstep = {(tstep_final + freq, grid): dict_state_end for (grid, dict_state_end) in zip(list_grid, list_state_end)} dict_state.update(dict_state_final_tstep) # save results as time-aware DataFrame df_output0 = pack_df_output_array(dict_output, df_forcing) df_output = df_output0.replace(-999.0, np.nan) df_state_final = pack_df_state(dict_state).swaplevel(0, 1) # drop ESTM for now as it is not supported yet # select only those supported output groups df_output = df_output.loc[:, ['SUEWS', 'snow', 'DailyState']] # trim multiindex based columns df_output.columns = df_output.columns.remove_unused_levels() # pack final model states into a proper dataframe df_state_final = pack_df_state_final(df_state_final, df_init) return (df_output, df_state_final)
def match_request_type(self, entry, request_type, regex=True): """ Helper function that returns entries with a request type matching the given `request_type` argument. :param entry: entry object to analyze :param request_type: ``str`` of request type to match :param regex: ``bool`` indicating whether to use a regex or string match """ if regex: return re.search(request_type, entry['request']['method'], flags=re.IGNORECASE) is not None else: return entry['request']['method'] == request_type
def function[match_request_type, parameter[self, entry, request_type, regex]]: constant[ Helper function that returns entries with a request type matching the given `request_type` argument. :param entry: entry object to analyze :param request_type: ``str`` of request type to match :param regex: ``bool`` indicating whether to use a regex or string match ] if name[regex] begin[:] return[compare[call[name[re].search, parameter[name[request_type], call[call[name[entry]][constant[request]]][constant[method]]]] is_not constant[None]]]
keyword[def] identifier[match_request_type] ( identifier[self] , identifier[entry] , identifier[request_type] , identifier[regex] = keyword[True] ): literal[string] keyword[if] identifier[regex] : keyword[return] identifier[re] . identifier[search] ( identifier[request_type] , identifier[entry] [ literal[string] ][ literal[string] ], identifier[flags] = identifier[re] . identifier[IGNORECASE] ) keyword[is] keyword[not] keyword[None] keyword[else] : keyword[return] identifier[entry] [ literal[string] ][ literal[string] ]== identifier[request_type]
def match_request_type(self, entry, request_type, regex=True): """ Helper function that returns entries with a request type matching the given `request_type` argument. :param entry: entry object to analyze :param request_type: ``str`` of request type to match :param regex: ``bool`` indicating whether to use a regex or string match """ if regex: return re.search(request_type, entry['request']['method'], flags=re.IGNORECASE) is not None # depends on [control=['if'], data=[]] else: return entry['request']['method'] == request_type
def read_metadata(self, symbol, as_of=None, allow_secondary=None): """ Return the metadata saved for a symbol. This method is fast as it doesn't actually load the data. Parameters ---------- symbol : `str` symbol name for the item as_of : `str` or int or `datetime.datetime` Return the data as it was as_of the point in time. `int` : specific version number `str` : snapshot name which contains the version `datetime.datetime` : the version of the data that existed as_of the requested point in time allow_secondary : `bool` or `None` Override the default behavior for allowing reads from secondary members of a cluster: `None` : use the settings from the top-level `Arctic` object used to query this version store. `True` : allow reads from secondary members `False` : only allow reads from primary members """ _version = self._read_metadata(symbol, as_of=as_of, read_preference=self._read_preference(allow_secondary)) return VersionedItem(symbol=symbol, library=self._arctic_lib.get_name(), version=_version['version'], metadata=_version.pop('metadata', None), data=None, host=self._arctic_lib.arctic.mongo_host)
def function[read_metadata, parameter[self, symbol, as_of, allow_secondary]]: constant[ Return the metadata saved for a symbol. This method is fast as it doesn't actually load the data. Parameters ---------- symbol : `str` symbol name for the item as_of : `str` or int or `datetime.datetime` Return the data as it was as_of the point in time. `int` : specific version number `str` : snapshot name which contains the version `datetime.datetime` : the version of the data that existed as_of the requested point in time allow_secondary : `bool` or `None` Override the default behavior for allowing reads from secondary members of a cluster: `None` : use the settings from the top-level `Arctic` object used to query this version store. `True` : allow reads from secondary members `False` : only allow reads from primary members ] variable[_version] assign[=] call[name[self]._read_metadata, parameter[name[symbol]]] return[call[name[VersionedItem], parameter[]]]
keyword[def] identifier[read_metadata] ( identifier[self] , identifier[symbol] , identifier[as_of] = keyword[None] , identifier[allow_secondary] = keyword[None] ): literal[string] identifier[_version] = identifier[self] . identifier[_read_metadata] ( identifier[symbol] , identifier[as_of] = identifier[as_of] , identifier[read_preference] = identifier[self] . identifier[_read_preference] ( identifier[allow_secondary] )) keyword[return] identifier[VersionedItem] ( identifier[symbol] = identifier[symbol] , identifier[library] = identifier[self] . identifier[_arctic_lib] . identifier[get_name] (), identifier[version] = identifier[_version] [ literal[string] ], identifier[metadata] = identifier[_version] . identifier[pop] ( literal[string] , keyword[None] ), identifier[data] = keyword[None] , identifier[host] = identifier[self] . identifier[_arctic_lib] . identifier[arctic] . identifier[mongo_host] )
def read_metadata(self, symbol, as_of=None, allow_secondary=None): """ Return the metadata saved for a symbol. This method is fast as it doesn't actually load the data. Parameters ---------- symbol : `str` symbol name for the item as_of : `str` or int or `datetime.datetime` Return the data as it was as_of the point in time. `int` : specific version number `str` : snapshot name which contains the version `datetime.datetime` : the version of the data that existed as_of the requested point in time allow_secondary : `bool` or `None` Override the default behavior for allowing reads from secondary members of a cluster: `None` : use the settings from the top-level `Arctic` object used to query this version store. `True` : allow reads from secondary members `False` : only allow reads from primary members """ _version = self._read_metadata(symbol, as_of=as_of, read_preference=self._read_preference(allow_secondary)) return VersionedItem(symbol=symbol, library=self._arctic_lib.get_name(), version=_version['version'], metadata=_version.pop('metadata', None), data=None, host=self._arctic_lib.arctic.mongo_host)
def decode_task(line): """ Parse a single record (task) from a task database file. I don't understand why they don't just use JSON or YAML. But that's okay. >>> decode_task('[description:"Make a python API for taskwarrior"]') {'description': 'Make a python API for taskwarrior'} """ task = {} for key, value in re.findall(r'(\w+):"(.*?)(?<!\\)"', line): value = value.replace('\\"', '"') # unescape quotes task[key] = value for unsafe, safe in six.iteritems(decode_replacements): task[key] = task[key].replace(unsafe, safe) if 'tags' in task: task['tags'] = task['tags'].split(',') return task
def function[decode_task, parameter[line]]: constant[ Parse a single record (task) from a task database file. I don't understand why they don't just use JSON or YAML. But that's okay. >>> decode_task('[description:"Make a python API for taskwarrior"]') {'description': 'Make a python API for taskwarrior'} ] variable[task] assign[=] dictionary[[], []] for taget[tuple[[<ast.Name object at 0x7da18c4cfcd0>, <ast.Name object at 0x7da18c4ccb50>]]] in starred[call[name[re].findall, parameter[constant[(\w+):"(.*?)(?<!\\)"], name[line]]]] begin[:] variable[value] assign[=] call[name[value].replace, parameter[constant[\"], constant["]]] call[name[task]][name[key]] assign[=] name[value] for taget[tuple[[<ast.Name object at 0x7da18c4cee90>, <ast.Name object at 0x7da18c4cda50>]]] in starred[call[name[six].iteritems, parameter[name[decode_replacements]]]] begin[:] call[name[task]][name[key]] assign[=] call[call[name[task]][name[key]].replace, parameter[name[unsafe], name[safe]]] if compare[constant[tags] in name[task]] begin[:] call[name[task]][constant[tags]] assign[=] call[call[name[task]][constant[tags]].split, parameter[constant[,]]] return[name[task]]
keyword[def] identifier[decode_task] ( identifier[line] ): literal[string] identifier[task] ={} keyword[for] identifier[key] , identifier[value] keyword[in] identifier[re] . identifier[findall] ( literal[string] , identifier[line] ): identifier[value] = identifier[value] . identifier[replace] ( literal[string] , literal[string] ) identifier[task] [ identifier[key] ]= identifier[value] keyword[for] identifier[unsafe] , identifier[safe] keyword[in] identifier[six] . identifier[iteritems] ( identifier[decode_replacements] ): identifier[task] [ identifier[key] ]= identifier[task] [ identifier[key] ]. identifier[replace] ( identifier[unsafe] , identifier[safe] ) keyword[if] literal[string] keyword[in] identifier[task] : identifier[task] [ literal[string] ]= identifier[task] [ literal[string] ]. identifier[split] ( literal[string] ) keyword[return] identifier[task]
def decode_task(line): """ Parse a single record (task) from a task database file. I don't understand why they don't just use JSON or YAML. But that's okay. >>> decode_task('[description:"Make a python API for taskwarrior"]') {'description': 'Make a python API for taskwarrior'} """ task = {} for (key, value) in re.findall('(\\w+):"(.*?)(?<!\\\\)"', line): value = value.replace('\\"', '"') # unescape quotes task[key] = value for (unsafe, safe) in six.iteritems(decode_replacements): task[key] = task[key].replace(unsafe, safe) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]] if 'tags' in task: task['tags'] = task['tags'].split(',') # depends on [control=['if'], data=['task']] return task
def readinto(self, buf, **kwargs): """ Read into ``buf`` from the device. The number of bytes read will be the length of ``buf``. If ``start`` or ``end`` is provided, then the buffer will be sliced as if ``buf[start:end]``. This will not cause an allocation like ``buf[start:end]`` will so it saves memory. :param bytearray buffer: buffer to write into :param int start: Index to start writing at :param int end: Index to write up to but not include """ self.i2c.readfrom_into(self.device_address, buf, **kwargs) if self._debug: print("i2c_device.readinto:", [hex(i) for i in buf])
def function[readinto, parameter[self, buf]]: constant[ Read into ``buf`` from the device. The number of bytes read will be the length of ``buf``. If ``start`` or ``end`` is provided, then the buffer will be sliced as if ``buf[start:end]``. This will not cause an allocation like ``buf[start:end]`` will so it saves memory. :param bytearray buffer: buffer to write into :param int start: Index to start writing at :param int end: Index to write up to but not include ] call[name[self].i2c.readfrom_into, parameter[name[self].device_address, name[buf]]] if name[self]._debug begin[:] call[name[print], parameter[constant[i2c_device.readinto:], <ast.ListComp object at 0x7da1b01b3340>]]
keyword[def] identifier[readinto] ( identifier[self] , identifier[buf] ,** identifier[kwargs] ): literal[string] identifier[self] . identifier[i2c] . identifier[readfrom_into] ( identifier[self] . identifier[device_address] , identifier[buf] ,** identifier[kwargs] ) keyword[if] identifier[self] . identifier[_debug] : identifier[print] ( literal[string] ,[ identifier[hex] ( identifier[i] ) keyword[for] identifier[i] keyword[in] identifier[buf] ])
def readinto(self, buf, **kwargs): """ Read into ``buf`` from the device. The number of bytes read will be the length of ``buf``. If ``start`` or ``end`` is provided, then the buffer will be sliced as if ``buf[start:end]``. This will not cause an allocation like ``buf[start:end]`` will so it saves memory. :param bytearray buffer: buffer to write into :param int start: Index to start writing at :param int end: Index to write up to but not include """ self.i2c.readfrom_into(self.device_address, buf, **kwargs) if self._debug: print('i2c_device.readinto:', [hex(i) for i in buf]) # depends on [control=['if'], data=[]]
def go_str(value): """Returns value as a valid Go string literal.""" io = StringIO.StringIO() io.write('"') for c in value: if c in _ESCAPES: io.write(_ESCAPES[c]) elif c in _SIMPLE_CHARS: io.write(c) else: io.write(r'\x{:02x}'.format(ord(c))) io.write('"') return io.getvalue()
def function[go_str, parameter[value]]: constant[Returns value as a valid Go string literal.] variable[io] assign[=] call[name[StringIO].StringIO, parameter[]] call[name[io].write, parameter[constant["]]] for taget[name[c]] in starred[name[value]] begin[:] if compare[name[c] in name[_ESCAPES]] begin[:] call[name[io].write, parameter[call[name[_ESCAPES]][name[c]]]] call[name[io].write, parameter[constant["]]] return[call[name[io].getvalue, parameter[]]]
keyword[def] identifier[go_str] ( identifier[value] ): literal[string] identifier[io] = identifier[StringIO] . identifier[StringIO] () identifier[io] . identifier[write] ( literal[string] ) keyword[for] identifier[c] keyword[in] identifier[value] : keyword[if] identifier[c] keyword[in] identifier[_ESCAPES] : identifier[io] . identifier[write] ( identifier[_ESCAPES] [ identifier[c] ]) keyword[elif] identifier[c] keyword[in] identifier[_SIMPLE_CHARS] : identifier[io] . identifier[write] ( identifier[c] ) keyword[else] : identifier[io] . identifier[write] ( literal[string] . identifier[format] ( identifier[ord] ( identifier[c] ))) identifier[io] . identifier[write] ( literal[string] ) keyword[return] identifier[io] . identifier[getvalue] ()
def go_str(value): """Returns value as a valid Go string literal.""" io = StringIO.StringIO() io.write('"') for c in value: if c in _ESCAPES: io.write(_ESCAPES[c]) # depends on [control=['if'], data=['c', '_ESCAPES']] elif c in _SIMPLE_CHARS: io.write(c) # depends on [control=['if'], data=['c']] else: io.write('\\x{:02x}'.format(ord(c))) # depends on [control=['for'], data=['c']] io.write('"') return io.getvalue()
def capture(command, encoding='UTF-8'): """ Capture the output of an external command as if it runs in an interactive terminal. :param command: The command name and its arguments (a list of strings). :param encoding: The encoding to use to decode the output (a string). :returns: The output of the command. This function runs an external command under ``script`` (emulating an interactive terminal) to capture the output of the command as if it was running in an interactive terminal (including ANSI escape sequences). """ with open(os.devnull, 'wb') as dev_null: # We start by invoking the `script' program in a form that is supported # by the Linux implementation [1] but fails command line validation on # the MacOS (BSD) implementation [2]: The command is specified using # the -c option and the typescript file is /dev/null. # # [1] http://man7.org/linux/man-pages/man1/script.1.html # [2] https://developer.apple.com/legacy/library/documentation/Darwin/Reference/ManPages/man1/script.1.html command_line = ['script', '-qc', ' '.join(map(pipes.quote, command)), '/dev/null'] script = subprocess.Popen(command_line, stdout=subprocess.PIPE, stderr=dev_null) stdout, stderr = script.communicate() if script.returncode == 0: # If `script' succeeded we assume that it understood our command line # invocation which means it's the Linux implementation (in this case # we can use standard output instead of a temporary file). output = stdout.decode(encoding) else: # If `script' failed we assume that it didn't understand our command # line invocation which means it's the MacOS (BSD) implementation # (in this case we need a temporary file because the command line # interface requires it). fd, temporary_file = tempfile.mkstemp(prefix='coloredlogs-', suffix='-capture.txt') try: command_line = ['script', '-q', temporary_file] + list(command) subprocess.Popen(command_line, stdout=dev_null, stderr=dev_null).wait() with codecs.open(temporary_file, 'r', encoding) as handle: output = handle.read() finally: os.unlink(temporary_file) # On MacOS when standard input is /dev/null I've observed # the captured output starting with the characters '^D': # # $ script -q capture.txt echo example </dev/null # example # $ xxd capture.txt # 00000000: 5e44 0808 6578 616d 706c 650d 0a ^D..example.. # # I'm not sure why this is here, although I suppose it has to do # with ^D in caret notation signifying end-of-file [1]. What I do # know is that this is an implementation detail that callers of the # capture() function shouldn't be bothered with, so we strip it. # # [1] https://en.wikipedia.org/wiki/End-of-file if output.startswith(b'^D'): output = output[2:] # Clean up backspace and carriage return characters and the 'erase line' # ANSI escape sequence and return the output as a Unicode string. return u'\n'.join(clean_terminal_output(output))
def function[capture, parameter[command, encoding]]: constant[ Capture the output of an external command as if it runs in an interactive terminal. :param command: The command name and its arguments (a list of strings). :param encoding: The encoding to use to decode the output (a string). :returns: The output of the command. This function runs an external command under ``script`` (emulating an interactive terminal) to capture the output of the command as if it was running in an interactive terminal (including ANSI escape sequences). ] with call[name[open], parameter[name[os].devnull, constant[wb]]] begin[:] variable[command_line] assign[=] list[[<ast.Constant object at 0x7da1b0698ac0>, <ast.Constant object at 0x7da1b0699ed0>, <ast.Call object at 0x7da1b0698280>, <ast.Constant object at 0x7da1b06980a0>]] variable[script] assign[=] call[name[subprocess].Popen, parameter[name[command_line]]] <ast.Tuple object at 0x7da1b069bca0> assign[=] call[name[script].communicate, parameter[]] if compare[name[script].returncode equal[==] constant[0]] begin[:] variable[output] assign[=] call[name[stdout].decode, parameter[name[encoding]]] return[call[constant[ ].join, parameter[call[name[clean_terminal_output], parameter[name[output]]]]]]
keyword[def] identifier[capture] ( identifier[command] , identifier[encoding] = literal[string] ): literal[string] keyword[with] identifier[open] ( identifier[os] . identifier[devnull] , literal[string] ) keyword[as] identifier[dev_null] : identifier[command_line] =[ literal[string] , literal[string] , literal[string] . identifier[join] ( identifier[map] ( identifier[pipes] . identifier[quote] , identifier[command] )), literal[string] ] identifier[script] = identifier[subprocess] . identifier[Popen] ( identifier[command_line] , identifier[stdout] = identifier[subprocess] . identifier[PIPE] , identifier[stderr] = identifier[dev_null] ) identifier[stdout] , identifier[stderr] = identifier[script] . identifier[communicate] () keyword[if] identifier[script] . identifier[returncode] == literal[int] : identifier[output] = identifier[stdout] . identifier[decode] ( identifier[encoding] ) keyword[else] : identifier[fd] , identifier[temporary_file] = identifier[tempfile] . identifier[mkstemp] ( identifier[prefix] = literal[string] , identifier[suffix] = literal[string] ) keyword[try] : identifier[command_line] =[ literal[string] , literal[string] , identifier[temporary_file] ]+ identifier[list] ( identifier[command] ) identifier[subprocess] . identifier[Popen] ( identifier[command_line] , identifier[stdout] = identifier[dev_null] , identifier[stderr] = identifier[dev_null] ). identifier[wait] () keyword[with] identifier[codecs] . identifier[open] ( identifier[temporary_file] , literal[string] , identifier[encoding] ) keyword[as] identifier[handle] : identifier[output] = identifier[handle] . identifier[read] () keyword[finally] : identifier[os] . identifier[unlink] ( identifier[temporary_file] ) keyword[if] identifier[output] . identifier[startswith] ( literal[string] ): identifier[output] = identifier[output] [ literal[int] :] keyword[return] literal[string] . identifier[join] ( identifier[clean_terminal_output] ( identifier[output] ))
def capture(command, encoding='UTF-8'): """ Capture the output of an external command as if it runs in an interactive terminal. :param command: The command name and its arguments (a list of strings). :param encoding: The encoding to use to decode the output (a string). :returns: The output of the command. This function runs an external command under ``script`` (emulating an interactive terminal) to capture the output of the command as if it was running in an interactive terminal (including ANSI escape sequences). """ with open(os.devnull, 'wb') as dev_null: # We start by invoking the `script' program in a form that is supported # by the Linux implementation [1] but fails command line validation on # the MacOS (BSD) implementation [2]: The command is specified using # the -c option and the typescript file is /dev/null. # # [1] http://man7.org/linux/man-pages/man1/script.1.html # [2] https://developer.apple.com/legacy/library/documentation/Darwin/Reference/ManPages/man1/script.1.html command_line = ['script', '-qc', ' '.join(map(pipes.quote, command)), '/dev/null'] script = subprocess.Popen(command_line, stdout=subprocess.PIPE, stderr=dev_null) (stdout, stderr) = script.communicate() if script.returncode == 0: # If `script' succeeded we assume that it understood our command line # invocation which means it's the Linux implementation (in this case # we can use standard output instead of a temporary file). output = stdout.decode(encoding) # depends on [control=['if'], data=[]] else: # If `script' failed we assume that it didn't understand our command # line invocation which means it's the MacOS (BSD) implementation # (in this case we need a temporary file because the command line # interface requires it). (fd, temporary_file) = tempfile.mkstemp(prefix='coloredlogs-', suffix='-capture.txt') try: command_line = ['script', '-q', temporary_file] + list(command) subprocess.Popen(command_line, stdout=dev_null, stderr=dev_null).wait() with codecs.open(temporary_file, 'r', encoding) as handle: output = handle.read() # depends on [control=['with'], data=['handle']] # depends on [control=['try'], data=[]] finally: os.unlink(temporary_file) # On MacOS when standard input is /dev/null I've observed # the captured output starting with the characters '^D': # # $ script -q capture.txt echo example </dev/null # example # $ xxd capture.txt # 00000000: 5e44 0808 6578 616d 706c 650d 0a ^D..example.. # # I'm not sure why this is here, although I suppose it has to do # with ^D in caret notation signifying end-of-file [1]. What I do # know is that this is an implementation detail that callers of the # capture() function shouldn't be bothered with, so we strip it. # # [1] https://en.wikipedia.org/wiki/End-of-file if output.startswith(b'^D'): output = output[2:] # depends on [control=['if'], data=[]] # depends on [control=['with'], data=['dev_null']] # Clean up backspace and carriage return characters and the 'erase line' # ANSI escape sequence and return the output as a Unicode string. return u'\n'.join(clean_terminal_output(output))
def render_to_string(self, template, context=None, def_name=None, subdir='templates'): '''App-specific render function that renders templates in the *current app*, attached to the request for convenience''' template_adapter = self.get_template_loader(subdir).get_template(template) return getattr(template_adapter, 'render')(context=context, request=self.request, def_name=def_name)
def function[render_to_string, parameter[self, template, context, def_name, subdir]]: constant[App-specific render function that renders templates in the *current app*, attached to the request for convenience] variable[template_adapter] assign[=] call[call[name[self].get_template_loader, parameter[name[subdir]]].get_template, parameter[name[template]]] return[call[call[name[getattr], parameter[name[template_adapter], constant[render]]], parameter[]]]
keyword[def] identifier[render_to_string] ( identifier[self] , identifier[template] , identifier[context] = keyword[None] , identifier[def_name] = keyword[None] , identifier[subdir] = literal[string] ): literal[string] identifier[template_adapter] = identifier[self] . identifier[get_template_loader] ( identifier[subdir] ). identifier[get_template] ( identifier[template] ) keyword[return] identifier[getattr] ( identifier[template_adapter] , literal[string] )( identifier[context] = identifier[context] , identifier[request] = identifier[self] . identifier[request] , identifier[def_name] = identifier[def_name] )
def render_to_string(self, template, context=None, def_name=None, subdir='templates'): """App-specific render function that renders templates in the *current app*, attached to the request for convenience""" template_adapter = self.get_template_loader(subdir).get_template(template) return getattr(template_adapter, 'render')(context=context, request=self.request, def_name=def_name)
def log_write(self, data, kind='input'): """Write data to the log file, if active""" #print 'data: %r' % data # dbg if self.log_active and data: write = self.logfile.write if kind=='input': if self.timestamp: write(str_to_unicode(time.strftime('# %a, %d %b %Y %H:%M:%S\n', time.localtime()))) write(data) elif kind=='output' and self.log_output: odata = u'\n'.join([u'#[Out]# %s' % s for s in data.splitlines()]) write(u'%s\n' % odata) self.logfile.flush()
def function[log_write, parameter[self, data, kind]]: constant[Write data to the log file, if active] if <ast.BoolOp object at 0x7da20c6aaa70> begin[:] variable[write] assign[=] name[self].logfile.write if compare[name[kind] equal[==] constant[input]] begin[:] if name[self].timestamp begin[:] call[name[write], parameter[call[name[str_to_unicode], parameter[call[name[time].strftime, parameter[constant[# %a, %d %b %Y %H:%M:%S ], call[name[time].localtime, parameter[]]]]]]]] call[name[write], parameter[name[data]]] call[name[self].logfile.flush, parameter[]]
keyword[def] identifier[log_write] ( identifier[self] , identifier[data] , identifier[kind] = literal[string] ): literal[string] keyword[if] identifier[self] . identifier[log_active] keyword[and] identifier[data] : identifier[write] = identifier[self] . identifier[logfile] . identifier[write] keyword[if] identifier[kind] == literal[string] : keyword[if] identifier[self] . identifier[timestamp] : identifier[write] ( identifier[str_to_unicode] ( identifier[time] . identifier[strftime] ( literal[string] , identifier[time] . identifier[localtime] ()))) identifier[write] ( identifier[data] ) keyword[elif] identifier[kind] == literal[string] keyword[and] identifier[self] . identifier[log_output] : identifier[odata] = literal[string] . identifier[join] ([ literal[string] % identifier[s] keyword[for] identifier[s] keyword[in] identifier[data] . identifier[splitlines] ()]) identifier[write] ( literal[string] % identifier[odata] ) identifier[self] . identifier[logfile] . identifier[flush] ()
def log_write(self, data, kind='input'): """Write data to the log file, if active""" #print 'data: %r' % data # dbg if self.log_active and data: write = self.logfile.write if kind == 'input': if self.timestamp: write(str_to_unicode(time.strftime('# %a, %d %b %Y %H:%M:%S\n', time.localtime()))) # depends on [control=['if'], data=[]] write(data) # depends on [control=['if'], data=[]] elif kind == 'output' and self.log_output: odata = u'\n'.join([u'#[Out]# %s' % s for s in data.splitlines()]) write(u'%s\n' % odata) # depends on [control=['if'], data=[]] self.logfile.flush() # depends on [control=['if'], data=[]]
def create_acs(self, **kwargs): """ Creates an instance of the Asset Service. """ acs = predix.admin.acs.AccessControl(**kwargs) acs.create() client_id = self.get_client_id() if client_id: acs.grant_client(client_id) acs.grant_client(client_id) acs.add_to_manifest(self) return acs
def function[create_acs, parameter[self]]: constant[ Creates an instance of the Asset Service. ] variable[acs] assign[=] call[name[predix].admin.acs.AccessControl, parameter[]] call[name[acs].create, parameter[]] variable[client_id] assign[=] call[name[self].get_client_id, parameter[]] if name[client_id] begin[:] call[name[acs].grant_client, parameter[name[client_id]]] call[name[acs].grant_client, parameter[name[client_id]]] call[name[acs].add_to_manifest, parameter[name[self]]] return[name[acs]]
keyword[def] identifier[create_acs] ( identifier[self] ,** identifier[kwargs] ): literal[string] identifier[acs] = identifier[predix] . identifier[admin] . identifier[acs] . identifier[AccessControl] (** identifier[kwargs] ) identifier[acs] . identifier[create] () identifier[client_id] = identifier[self] . identifier[get_client_id] () keyword[if] identifier[client_id] : identifier[acs] . identifier[grant_client] ( identifier[client_id] ) identifier[acs] . identifier[grant_client] ( identifier[client_id] ) identifier[acs] . identifier[add_to_manifest] ( identifier[self] ) keyword[return] identifier[acs]
def create_acs(self, **kwargs): """ Creates an instance of the Asset Service. """ acs = predix.admin.acs.AccessControl(**kwargs) acs.create() client_id = self.get_client_id() if client_id: acs.grant_client(client_id) # depends on [control=['if'], data=[]] acs.grant_client(client_id) acs.add_to_manifest(self) return acs
def loads(data): """Parser entry point. Parses the output of a traceroute execution""" data += "\n_EOS_" # Append EOS token. Helps to match last RE_HOP # Get headers match_dest = RE_HEADER.search(data) dest_name = match_dest.group(1) dest_ip = match_dest.group(2) # The Traceroute is the root of the tree. traceroute = Traceroute(dest_name, dest_ip) # Get hops matches_hop = RE_HOP.findall(data) for match_hop in matches_hop: # Initialize a hop idx = int(match_hop[0]) if match_hop[1]: asn = int(match_hop[1]) else: asn = None hop = Hop(idx, asn) # Parse probes data: <name> | <(IP)> | <rtt> | 'ms' | '*' probes_data = match_hop[2].split() # Get rid of 'ms': <name> | <(IP)> | <rtt> | '*' probes_data = filter(lambda s: s.lower() != 'ms', probes_data) i = 0 while i < len(probes_data): # For each hop parse probes name = None ip = None rtt = None anno = '' # RTT check comes first because RE_PROBE_NAME can confuse rtt with an IP as name # The regex RE_PROBE_NAME can be improved if RE_PROBE_RTT.match(probes_data[i]): # Matched rtt, so name and IP have been parsed before rtt = float(probes_data[i]) i += 1 elif RE_PROBE_NAME.match(probes_data[i]): # Matched a name, so next elements are IP and rtt name = probes_data[i] ip = probes_data[i+1].strip('()') rtt = float(probes_data[i+2]) i += 3 elif RE_PROBE_TIMEOUT.match(probes_data[i]): # Its a timeout, so maybe name and IP have been parsed before # or maybe not. But it's Hop job to deal with it rtt = None i += 1 else: ext = "i: %d\nprobes_data: %s\nname: %s\nip: %s\nrtt: %s\nanno: %s" % (i, probes_data, name, ip, rtt, anno) raise ParseError("Parse error \n%s" % ext) # Check for annotation try: if RE_PROBE_ANNOTATION.match(probes_data[i]): anno = probes_data[i] i += 1 except IndexError: pass probe = Probe(name, ip, rtt, anno) hop.add_probe(probe) traceroute.add_hop(hop) return traceroute
def function[loads, parameter[data]]: constant[Parser entry point. Parses the output of a traceroute execution] <ast.AugAssign object at 0x7da204620100> variable[match_dest] assign[=] call[name[RE_HEADER].search, parameter[name[data]]] variable[dest_name] assign[=] call[name[match_dest].group, parameter[constant[1]]] variable[dest_ip] assign[=] call[name[match_dest].group, parameter[constant[2]]] variable[traceroute] assign[=] call[name[Traceroute], parameter[name[dest_name], name[dest_ip]]] variable[matches_hop] assign[=] call[name[RE_HOP].findall, parameter[name[data]]] for taget[name[match_hop]] in starred[name[matches_hop]] begin[:] variable[idx] assign[=] call[name[int], parameter[call[name[match_hop]][constant[0]]]] if call[name[match_hop]][constant[1]] begin[:] variable[asn] assign[=] call[name[int], parameter[call[name[match_hop]][constant[1]]]] variable[hop] assign[=] call[name[Hop], parameter[name[idx], name[asn]]] variable[probes_data] assign[=] call[call[name[match_hop]][constant[2]].split, parameter[]] variable[probes_data] assign[=] call[name[filter], parameter[<ast.Lambda object at 0x7da204621150>, name[probes_data]]] variable[i] assign[=] constant[0] while compare[name[i] less[<] call[name[len], parameter[name[probes_data]]]] begin[:] variable[name] assign[=] constant[None] variable[ip] assign[=] constant[None] variable[rtt] assign[=] constant[None] variable[anno] assign[=] constant[] if call[name[RE_PROBE_RTT].match, parameter[call[name[probes_data]][name[i]]]] begin[:] variable[rtt] assign[=] call[name[float], parameter[call[name[probes_data]][name[i]]]] <ast.AugAssign object at 0x7da204622da0> <ast.Try object at 0x7da204620b20> variable[probe] assign[=] call[name[Probe], parameter[name[name], name[ip], name[rtt], name[anno]]] call[name[hop].add_probe, parameter[name[probe]]] call[name[traceroute].add_hop, parameter[name[hop]]] return[name[traceroute]]
keyword[def] identifier[loads] ( identifier[data] ): literal[string] identifier[data] += literal[string] identifier[match_dest] = identifier[RE_HEADER] . identifier[search] ( identifier[data] ) identifier[dest_name] = identifier[match_dest] . identifier[group] ( literal[int] ) identifier[dest_ip] = identifier[match_dest] . identifier[group] ( literal[int] ) identifier[traceroute] = identifier[Traceroute] ( identifier[dest_name] , identifier[dest_ip] ) identifier[matches_hop] = identifier[RE_HOP] . identifier[findall] ( identifier[data] ) keyword[for] identifier[match_hop] keyword[in] identifier[matches_hop] : identifier[idx] = identifier[int] ( identifier[match_hop] [ literal[int] ]) keyword[if] identifier[match_hop] [ literal[int] ]: identifier[asn] = identifier[int] ( identifier[match_hop] [ literal[int] ]) keyword[else] : identifier[asn] = keyword[None] identifier[hop] = identifier[Hop] ( identifier[idx] , identifier[asn] ) identifier[probes_data] = identifier[match_hop] [ literal[int] ]. identifier[split] () identifier[probes_data] = identifier[filter] ( keyword[lambda] identifier[s] : identifier[s] . identifier[lower] ()!= literal[string] , identifier[probes_data] ) identifier[i] = literal[int] keyword[while] identifier[i] < identifier[len] ( identifier[probes_data] ): identifier[name] = keyword[None] identifier[ip] = keyword[None] identifier[rtt] = keyword[None] identifier[anno] = literal[string] keyword[if] identifier[RE_PROBE_RTT] . identifier[match] ( identifier[probes_data] [ identifier[i] ]): identifier[rtt] = identifier[float] ( identifier[probes_data] [ identifier[i] ]) identifier[i] += literal[int] keyword[elif] identifier[RE_PROBE_NAME] . identifier[match] ( identifier[probes_data] [ identifier[i] ]): identifier[name] = identifier[probes_data] [ identifier[i] ] identifier[ip] = identifier[probes_data] [ identifier[i] + literal[int] ]. identifier[strip] ( literal[string] ) identifier[rtt] = identifier[float] ( identifier[probes_data] [ identifier[i] + literal[int] ]) identifier[i] += literal[int] keyword[elif] identifier[RE_PROBE_TIMEOUT] . identifier[match] ( identifier[probes_data] [ identifier[i] ]): identifier[rtt] = keyword[None] identifier[i] += literal[int] keyword[else] : identifier[ext] = literal[string] %( identifier[i] , identifier[probes_data] , identifier[name] , identifier[ip] , identifier[rtt] , identifier[anno] ) keyword[raise] identifier[ParseError] ( literal[string] % identifier[ext] ) keyword[try] : keyword[if] identifier[RE_PROBE_ANNOTATION] . identifier[match] ( identifier[probes_data] [ identifier[i] ]): identifier[anno] = identifier[probes_data] [ identifier[i] ] identifier[i] += literal[int] keyword[except] identifier[IndexError] : keyword[pass] identifier[probe] = identifier[Probe] ( identifier[name] , identifier[ip] , identifier[rtt] , identifier[anno] ) identifier[hop] . identifier[add_probe] ( identifier[probe] ) identifier[traceroute] . identifier[add_hop] ( identifier[hop] ) keyword[return] identifier[traceroute]
def loads(data): """Parser entry point. Parses the output of a traceroute execution""" data += '\n_EOS_' # Append EOS token. Helps to match last RE_HOP # Get headers match_dest = RE_HEADER.search(data) dest_name = match_dest.group(1) dest_ip = match_dest.group(2) # The Traceroute is the root of the tree. traceroute = Traceroute(dest_name, dest_ip) # Get hops matches_hop = RE_HOP.findall(data) for match_hop in matches_hop: # Initialize a hop idx = int(match_hop[0]) if match_hop[1]: asn = int(match_hop[1]) # depends on [control=['if'], data=[]] else: asn = None hop = Hop(idx, asn) # Parse probes data: <name> | <(IP)> | <rtt> | 'ms' | '*' probes_data = match_hop[2].split() # Get rid of 'ms': <name> | <(IP)> | <rtt> | '*' probes_data = filter(lambda s: s.lower() != 'ms', probes_data) i = 0 while i < len(probes_data): # For each hop parse probes name = None ip = None rtt = None anno = '' # RTT check comes first because RE_PROBE_NAME can confuse rtt with an IP as name # The regex RE_PROBE_NAME can be improved if RE_PROBE_RTT.match(probes_data[i]): # Matched rtt, so name and IP have been parsed before rtt = float(probes_data[i]) i += 1 # depends on [control=['if'], data=[]] elif RE_PROBE_NAME.match(probes_data[i]): # Matched a name, so next elements are IP and rtt name = probes_data[i] ip = probes_data[i + 1].strip('()') rtt = float(probes_data[i + 2]) i += 3 # depends on [control=['if'], data=[]] elif RE_PROBE_TIMEOUT.match(probes_data[i]): # Its a timeout, so maybe name and IP have been parsed before # or maybe not. But it's Hop job to deal with it rtt = None i += 1 # depends on [control=['if'], data=[]] else: ext = 'i: %d\nprobes_data: %s\nname: %s\nip: %s\nrtt: %s\nanno: %s' % (i, probes_data, name, ip, rtt, anno) raise ParseError('Parse error \n%s' % ext) # Check for annotation try: if RE_PROBE_ANNOTATION.match(probes_data[i]): anno = probes_data[i] i += 1 # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except IndexError: pass # depends on [control=['except'], data=[]] probe = Probe(name, ip, rtt, anno) hop.add_probe(probe) # depends on [control=['while'], data=['i']] traceroute.add_hop(hop) # depends on [control=['for'], data=['match_hop']] return traceroute
def vcontour(self, win, n, levels, labels=False, decimals=0, color=None, vinterp=True, nudge=1e-6, newfig=True, figsize=None, layout=True): """Vertical contour """ x1, x2, y1, y2 = win h = self.headalongline(np.linspace(x1 + nudge, x2 - nudge, n), np.linspace(y1 + nudge, y2 - nudge, n)) L = np.sqrt((x2 - x1) ** 2 + (y2 - y1) ** 2) xg = np.linspace(0, L, n) if vinterp: zg = 0.5 * (self.aq.zaqbot + self.aq.zaqtop) zg = np.hstack((self.aq.zaqtop[0], zg, self.aq.zaqbot[-1])) h = np.vstack((h[0], h, h[-1])) else: zg = np.empty(2 * self.aq.naq) for i in range(self.aq.naq): zg[2 * i] = self.aq.zaqtop[i] zg[2 * i + 1] = self.aq.zaqbot[i] h = np.repeat(h, 2, 0) if newfig: plt.figure(figsize=figsize) cs = plt.contour(xg, zg, h, levels, colors=color) if labels: fmt = '%1.' + str(decimals) + 'f' plt.clabel(cs, fmt=fmt) if layout: self.plot(win=[x1, x2, y1, y2], orientation='ver', newfig=False)
def function[vcontour, parameter[self, win, n, levels, labels, decimals, color, vinterp, nudge, newfig, figsize, layout]]: constant[Vertical contour ] <ast.Tuple object at 0x7da1b0c51000> assign[=] name[win] variable[h] assign[=] call[name[self].headalongline, parameter[call[name[np].linspace, parameter[binary_operation[name[x1] + name[nudge]], binary_operation[name[x2] - name[nudge]], name[n]]], call[name[np].linspace, parameter[binary_operation[name[y1] + name[nudge]], binary_operation[name[y2] - name[nudge]], name[n]]]]] variable[L] assign[=] call[name[np].sqrt, parameter[binary_operation[binary_operation[binary_operation[name[x2] - name[x1]] ** constant[2]] + binary_operation[binary_operation[name[y2] - name[y1]] ** constant[2]]]]] variable[xg] assign[=] call[name[np].linspace, parameter[constant[0], name[L], name[n]]] if name[vinterp] begin[:] variable[zg] assign[=] binary_operation[constant[0.5] * binary_operation[name[self].aq.zaqbot + name[self].aq.zaqtop]] variable[zg] assign[=] call[name[np].hstack, parameter[tuple[[<ast.Subscript object at 0x7da1b0c53940>, <ast.Name object at 0x7da1b0c537c0>, <ast.Subscript object at 0x7da1b0c51c60>]]]] variable[h] assign[=] call[name[np].vstack, parameter[tuple[[<ast.Subscript object at 0x7da1b0ba6f80>, <ast.Name object at 0x7da1b0ba76d0>, <ast.Subscript object at 0x7da1b0ba40a0>]]]] if name[newfig] begin[:] call[name[plt].figure, parameter[]] variable[cs] assign[=] call[name[plt].contour, parameter[name[xg], name[zg], name[h], name[levels]]] if name[labels] begin[:] variable[fmt] assign[=] binary_operation[binary_operation[constant[%1.] + call[name[str], parameter[name[decimals]]]] + constant[f]] call[name[plt].clabel, parameter[name[cs]]] if name[layout] begin[:] call[name[self].plot, parameter[]]
keyword[def] identifier[vcontour] ( identifier[self] , identifier[win] , identifier[n] , identifier[levels] , identifier[labels] = keyword[False] , identifier[decimals] = literal[int] , identifier[color] = keyword[None] , identifier[vinterp] = keyword[True] , identifier[nudge] = literal[int] , identifier[newfig] = keyword[True] , identifier[figsize] = keyword[None] , identifier[layout] = keyword[True] ): literal[string] identifier[x1] , identifier[x2] , identifier[y1] , identifier[y2] = identifier[win] identifier[h] = identifier[self] . identifier[headalongline] ( identifier[np] . identifier[linspace] ( identifier[x1] + identifier[nudge] , identifier[x2] - identifier[nudge] , identifier[n] ), identifier[np] . identifier[linspace] ( identifier[y1] + identifier[nudge] , identifier[y2] - identifier[nudge] , identifier[n] )) identifier[L] = identifier[np] . identifier[sqrt] (( identifier[x2] - identifier[x1] )** literal[int] +( identifier[y2] - identifier[y1] )** literal[int] ) identifier[xg] = identifier[np] . identifier[linspace] ( literal[int] , identifier[L] , identifier[n] ) keyword[if] identifier[vinterp] : identifier[zg] = literal[int] *( identifier[self] . identifier[aq] . identifier[zaqbot] + identifier[self] . identifier[aq] . identifier[zaqtop] ) identifier[zg] = identifier[np] . identifier[hstack] (( identifier[self] . identifier[aq] . identifier[zaqtop] [ literal[int] ], identifier[zg] , identifier[self] . identifier[aq] . identifier[zaqbot] [- literal[int] ])) identifier[h] = identifier[np] . identifier[vstack] (( identifier[h] [ literal[int] ], identifier[h] , identifier[h] [- literal[int] ])) keyword[else] : identifier[zg] = identifier[np] . identifier[empty] ( literal[int] * identifier[self] . identifier[aq] . identifier[naq] ) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[self] . identifier[aq] . identifier[naq] ): identifier[zg] [ literal[int] * identifier[i] ]= identifier[self] . identifier[aq] . identifier[zaqtop] [ identifier[i] ] identifier[zg] [ literal[int] * identifier[i] + literal[int] ]= identifier[self] . identifier[aq] . identifier[zaqbot] [ identifier[i] ] identifier[h] = identifier[np] . identifier[repeat] ( identifier[h] , literal[int] , literal[int] ) keyword[if] identifier[newfig] : identifier[plt] . identifier[figure] ( identifier[figsize] = identifier[figsize] ) identifier[cs] = identifier[plt] . identifier[contour] ( identifier[xg] , identifier[zg] , identifier[h] , identifier[levels] , identifier[colors] = identifier[color] ) keyword[if] identifier[labels] : identifier[fmt] = literal[string] + identifier[str] ( identifier[decimals] )+ literal[string] identifier[plt] . identifier[clabel] ( identifier[cs] , identifier[fmt] = identifier[fmt] ) keyword[if] identifier[layout] : identifier[self] . identifier[plot] ( identifier[win] =[ identifier[x1] , identifier[x2] , identifier[y1] , identifier[y2] ], identifier[orientation] = literal[string] , identifier[newfig] = keyword[False] )
def vcontour(self, win, n, levels, labels=False, decimals=0, color=None, vinterp=True, nudge=1e-06, newfig=True, figsize=None, layout=True): """Vertical contour """ (x1, x2, y1, y2) = win h = self.headalongline(np.linspace(x1 + nudge, x2 - nudge, n), np.linspace(y1 + nudge, y2 - nudge, n)) L = np.sqrt((x2 - x1) ** 2 + (y2 - y1) ** 2) xg = np.linspace(0, L, n) if vinterp: zg = 0.5 * (self.aq.zaqbot + self.aq.zaqtop) zg = np.hstack((self.aq.zaqtop[0], zg, self.aq.zaqbot[-1])) h = np.vstack((h[0], h, h[-1])) # depends on [control=['if'], data=[]] else: zg = np.empty(2 * self.aq.naq) for i in range(self.aq.naq): zg[2 * i] = self.aq.zaqtop[i] zg[2 * i + 1] = self.aq.zaqbot[i] # depends on [control=['for'], data=['i']] h = np.repeat(h, 2, 0) if newfig: plt.figure(figsize=figsize) # depends on [control=['if'], data=[]] cs = plt.contour(xg, zg, h, levels, colors=color) if labels: fmt = '%1.' + str(decimals) + 'f' plt.clabel(cs, fmt=fmt) # depends on [control=['if'], data=[]] if layout: self.plot(win=[x1, x2, y1, y2], orientation='ver', newfig=False) # depends on [control=['if'], data=[]]
def to_graphml(graph: BELGraph, path: Union[str, BinaryIO]) -> None: """Write this graph to GraphML XML file using :func:`networkx.write_graphml`. The .graphml file extension is suggested so Cytoscape can recognize it. """ rv = nx.MultiDiGraph() for node in graph: rv.add_node(node.as_bel(), function=node.function) for u, v, key, edge_data in graph.edges(data=True, keys=True): rv.add_edge( u.as_bel(), v.as_bel(), interaction=edge_data[RELATION], bel=graph.edge_to_bel(u, v, edge_data), key=key, ) nx.write_graphml(rv, path)
def function[to_graphml, parameter[graph, path]]: constant[Write this graph to GraphML XML file using :func:`networkx.write_graphml`. The .graphml file extension is suggested so Cytoscape can recognize it. ] variable[rv] assign[=] call[name[nx].MultiDiGraph, parameter[]] for taget[name[node]] in starred[name[graph]] begin[:] call[name[rv].add_node, parameter[call[name[node].as_bel, parameter[]]]] for taget[tuple[[<ast.Name object at 0x7da1b0cb37c0>, <ast.Name object at 0x7da1b0cb1c30>, <ast.Name object at 0x7da1b0cb0160>, <ast.Name object at 0x7da1b0cb0250>]]] in starred[call[name[graph].edges, parameter[]]] begin[:] call[name[rv].add_edge, parameter[call[name[u].as_bel, parameter[]], call[name[v].as_bel, parameter[]]]] call[name[nx].write_graphml, parameter[name[rv], name[path]]]
keyword[def] identifier[to_graphml] ( identifier[graph] : identifier[BELGraph] , identifier[path] : identifier[Union] [ identifier[str] , identifier[BinaryIO] ])-> keyword[None] : literal[string] identifier[rv] = identifier[nx] . identifier[MultiDiGraph] () keyword[for] identifier[node] keyword[in] identifier[graph] : identifier[rv] . identifier[add_node] ( identifier[node] . identifier[as_bel] (), identifier[function] = identifier[node] . identifier[function] ) keyword[for] identifier[u] , identifier[v] , identifier[key] , identifier[edge_data] keyword[in] identifier[graph] . identifier[edges] ( identifier[data] = keyword[True] , identifier[keys] = keyword[True] ): identifier[rv] . identifier[add_edge] ( identifier[u] . identifier[as_bel] (), identifier[v] . identifier[as_bel] (), identifier[interaction] = identifier[edge_data] [ identifier[RELATION] ], identifier[bel] = identifier[graph] . identifier[edge_to_bel] ( identifier[u] , identifier[v] , identifier[edge_data] ), identifier[key] = identifier[key] , ) identifier[nx] . identifier[write_graphml] ( identifier[rv] , identifier[path] )
def to_graphml(graph: BELGraph, path: Union[str, BinaryIO]) -> None: """Write this graph to GraphML XML file using :func:`networkx.write_graphml`. The .graphml file extension is suggested so Cytoscape can recognize it. """ rv = nx.MultiDiGraph() for node in graph: rv.add_node(node.as_bel(), function=node.function) # depends on [control=['for'], data=['node']] for (u, v, key, edge_data) in graph.edges(data=True, keys=True): rv.add_edge(u.as_bel(), v.as_bel(), interaction=edge_data[RELATION], bel=graph.edge_to_bel(u, v, edge_data), key=key) # depends on [control=['for'], data=[]] nx.write_graphml(rv, path)
def get_configuration_file(): """ return jenks configuration file """ path = os.path.abspath(os.curdir) while path != os.sep: config_path = os.path.join(path, CONFIG_FILE_NAME) if os.path.exists(config_path): return config_path path = os.path.dirname(path) return None
def function[get_configuration_file, parameter[]]: constant[ return jenks configuration file ] variable[path] assign[=] call[name[os].path.abspath, parameter[name[os].curdir]] while compare[name[path] not_equal[!=] name[os].sep] begin[:] variable[config_path] assign[=] call[name[os].path.join, parameter[name[path], name[CONFIG_FILE_NAME]]] if call[name[os].path.exists, parameter[name[config_path]]] begin[:] return[name[config_path]] variable[path] assign[=] call[name[os].path.dirname, parameter[name[path]]] return[constant[None]]
keyword[def] identifier[get_configuration_file] (): literal[string] identifier[path] = identifier[os] . identifier[path] . identifier[abspath] ( identifier[os] . identifier[curdir] ) keyword[while] identifier[path] != identifier[os] . identifier[sep] : identifier[config_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[path] , identifier[CONFIG_FILE_NAME] ) keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[config_path] ): keyword[return] identifier[config_path] identifier[path] = identifier[os] . identifier[path] . identifier[dirname] ( identifier[path] ) keyword[return] keyword[None]
def get_configuration_file(): """ return jenks configuration file """ path = os.path.abspath(os.curdir) while path != os.sep: config_path = os.path.join(path, CONFIG_FILE_NAME) if os.path.exists(config_path): return config_path # depends on [control=['if'], data=[]] path = os.path.dirname(path) # depends on [control=['while'], data=['path']] return None
def short_answer(question, answers, explanation=None): """ Generates a short answer question that allows user to input an answer in a textbox and a submit button to check the answer. Args: question (str): The question being asked. answers (str | list str | func): If a string, only that string will be marked correct. If a list of string, any string in the list will be marked correct. If a function, any input that causes the function to return True will be marked correct. explanation (str): The explanation to the question is displayed when the user inputs the correct answer. Returns: None >>> short_answer('What is 1 + 1?', '2', ... explanation='1+1 is 2') #doctest: +SKIP <What is 1+1?> <Input box, Submit button> >>> short_answer('Enter the first name of a member of the Beatles.', ... ['John', 'Paul', 'George', 'Ringo']) #doctest: +SKIP <Enter the first name of a member of the Beatles.> <Input box, Submit button> >>> short_answer('Enter an even number.', ... lambda x: int(x) % 2 == 0) #doctest: +SKIP <Enter an even number.> <Input box, Submit button> """ # Input textbox textbox = widgets.Text(placeholder='Write your answer here') # Submit button submit_button = widgets.Button(description='Submit') # Space right of the submit button to show checkmark/x-mark visual_correct = widgets.HTML() # Space below input line to display error if function call errored error_space = widgets.HTML() # Space below input line to display explanation if answer is correct explain_space = widgets.HTML() # correctness function linked to the submit button def check_answer(_): response = textbox.value if isinstance(answers, collections.Callable): try: error_space.value = '' correct = answers(response) except Exception as e: correct = False error_space.value = 'Error in checking answer: {}'.format(e) elif isinstance(answers, str): correct = response == answers elif isinstance(answers, collections.Iterable): correct = response in answers else: raise TypeError('The `answers` arg is an incorrect type.') visual_correct.value = CHECK_ICON if correct else X_ICON if correct and explanation: explain_space.value = explanation submit_button.on_click(check_answer) question_tag = widgets.HTML(TEXT_STYLE.format(question)) user_input_line = widgets.HBox([textbox, submit_button, visual_correct]) display( widgets.VBox([ question_tag, user_input_line, error_space, explain_space ]) )
def function[short_answer, parameter[question, answers, explanation]]: constant[ Generates a short answer question that allows user to input an answer in a textbox and a submit button to check the answer. Args: question (str): The question being asked. answers (str | list str | func): If a string, only that string will be marked correct. If a list of string, any string in the list will be marked correct. If a function, any input that causes the function to return True will be marked correct. explanation (str): The explanation to the question is displayed when the user inputs the correct answer. Returns: None >>> short_answer('What is 1 + 1?', '2', ... explanation='1+1 is 2') #doctest: +SKIP <What is 1+1?> <Input box, Submit button> >>> short_answer('Enter the first name of a member of the Beatles.', ... ['John', 'Paul', 'George', 'Ringo']) #doctest: +SKIP <Enter the first name of a member of the Beatles.> <Input box, Submit button> >>> short_answer('Enter an even number.', ... lambda x: int(x) % 2 == 0) #doctest: +SKIP <Enter an even number.> <Input box, Submit button> ] variable[textbox] assign[=] call[name[widgets].Text, parameter[]] variable[submit_button] assign[=] call[name[widgets].Button, parameter[]] variable[visual_correct] assign[=] call[name[widgets].HTML, parameter[]] variable[error_space] assign[=] call[name[widgets].HTML, parameter[]] variable[explain_space] assign[=] call[name[widgets].HTML, parameter[]] def function[check_answer, parameter[_]]: variable[response] assign[=] name[textbox].value if call[name[isinstance], parameter[name[answers], name[collections].Callable]] begin[:] <ast.Try object at 0x7da1b1803820> name[visual_correct].value assign[=] <ast.IfExp object at 0x7da1b1803790> if <ast.BoolOp object at 0x7da1b1802890> begin[:] name[explain_space].value assign[=] name[explanation] call[name[submit_button].on_click, parameter[name[check_answer]]] variable[question_tag] assign[=] call[name[widgets].HTML, parameter[call[name[TEXT_STYLE].format, parameter[name[question]]]]] variable[user_input_line] assign[=] call[name[widgets].HBox, parameter[list[[<ast.Name object at 0x7da1b1803a30>, <ast.Name object at 0x7da1b1803b50>, <ast.Name object at 0x7da1b1933220>]]]] call[name[display], parameter[call[name[widgets].VBox, parameter[list[[<ast.Name object at 0x7da1b1931240>, <ast.Name object at 0x7da1b19334f0>, <ast.Name object at 0x7da1b1930af0>, <ast.Name object at 0x7da1b1931c90>]]]]]]
keyword[def] identifier[short_answer] ( identifier[question] , identifier[answers] , identifier[explanation] = keyword[None] ): literal[string] identifier[textbox] = identifier[widgets] . identifier[Text] ( identifier[placeholder] = literal[string] ) identifier[submit_button] = identifier[widgets] . identifier[Button] ( identifier[description] = literal[string] ) identifier[visual_correct] = identifier[widgets] . identifier[HTML] () identifier[error_space] = identifier[widgets] . identifier[HTML] () identifier[explain_space] = identifier[widgets] . identifier[HTML] () keyword[def] identifier[check_answer] ( identifier[_] ): identifier[response] = identifier[textbox] . identifier[value] keyword[if] identifier[isinstance] ( identifier[answers] , identifier[collections] . identifier[Callable] ): keyword[try] : identifier[error_space] . identifier[value] = literal[string] identifier[correct] = identifier[answers] ( identifier[response] ) keyword[except] identifier[Exception] keyword[as] identifier[e] : identifier[correct] = keyword[False] identifier[error_space] . identifier[value] = literal[string] . identifier[format] ( identifier[e] ) keyword[elif] identifier[isinstance] ( identifier[answers] , identifier[str] ): identifier[correct] = identifier[response] == identifier[answers] keyword[elif] identifier[isinstance] ( identifier[answers] , identifier[collections] . identifier[Iterable] ): identifier[correct] = identifier[response] keyword[in] identifier[answers] keyword[else] : keyword[raise] identifier[TypeError] ( literal[string] ) identifier[visual_correct] . identifier[value] = identifier[CHECK_ICON] keyword[if] identifier[correct] keyword[else] identifier[X_ICON] keyword[if] identifier[correct] keyword[and] identifier[explanation] : identifier[explain_space] . identifier[value] = identifier[explanation] identifier[submit_button] . identifier[on_click] ( identifier[check_answer] ) identifier[question_tag] = identifier[widgets] . identifier[HTML] ( identifier[TEXT_STYLE] . identifier[format] ( identifier[question] )) identifier[user_input_line] = identifier[widgets] . identifier[HBox] ([ identifier[textbox] , identifier[submit_button] , identifier[visual_correct] ]) identifier[display] ( identifier[widgets] . identifier[VBox] ([ identifier[question_tag] , identifier[user_input_line] , identifier[error_space] , identifier[explain_space] ]) )
def short_answer(question, answers, explanation=None): """ Generates a short answer question that allows user to input an answer in a textbox and a submit button to check the answer. Args: question (str): The question being asked. answers (str | list str | func): If a string, only that string will be marked correct. If a list of string, any string in the list will be marked correct. If a function, any input that causes the function to return True will be marked correct. explanation (str): The explanation to the question is displayed when the user inputs the correct answer. Returns: None >>> short_answer('What is 1 + 1?', '2', ... explanation='1+1 is 2') #doctest: +SKIP <What is 1+1?> <Input box, Submit button> >>> short_answer('Enter the first name of a member of the Beatles.', ... ['John', 'Paul', 'George', 'Ringo']) #doctest: +SKIP <Enter the first name of a member of the Beatles.> <Input box, Submit button> >>> short_answer('Enter an even number.', ... lambda x: int(x) % 2 == 0) #doctest: +SKIP <Enter an even number.> <Input box, Submit button> """ # Input textbox textbox = widgets.Text(placeholder='Write your answer here') # Submit button submit_button = widgets.Button(description='Submit') # Space right of the submit button to show checkmark/x-mark visual_correct = widgets.HTML() # Space below input line to display error if function call errored error_space = widgets.HTML() # Space below input line to display explanation if answer is correct explain_space = widgets.HTML() # correctness function linked to the submit button def check_answer(_): response = textbox.value if isinstance(answers, collections.Callable): try: error_space.value = '' correct = answers(response) # depends on [control=['try'], data=[]] except Exception as e: correct = False error_space.value = 'Error in checking answer: {}'.format(e) # depends on [control=['except'], data=['e']] # depends on [control=['if'], data=[]] elif isinstance(answers, str): correct = response == answers # depends on [control=['if'], data=[]] elif isinstance(answers, collections.Iterable): correct = response in answers # depends on [control=['if'], data=[]] else: raise TypeError('The `answers` arg is an incorrect type.') visual_correct.value = CHECK_ICON if correct else X_ICON if correct and explanation: explain_space.value = explanation # depends on [control=['if'], data=[]] submit_button.on_click(check_answer) question_tag = widgets.HTML(TEXT_STYLE.format(question)) user_input_line = widgets.HBox([textbox, submit_button, visual_correct]) display(widgets.VBox([question_tag, user_input_line, error_space, explain_space]))
def create(cls, netlink, ip_range=None, netlink_role='active'): """ Create a multilink member. Multilink members are added to an Outbound Multilink configuration and define the ip range, static netlink to use, and the role. This element can be passed to the Multilink constructor to simplify creation of the outbound multilink. :param StaticNetlink,DynamicNetlink netlink: static netlink element to use as member :param str ip_range: the IP range for source NAT for this member. The IP range should be part of the defined network range used by this netlink. Not required for dynamic netlink :param str netlink_role: role of this netlink, 'active' or 'standby' :raises ElementNotFound: Specified netlink could not be found :rtype: MultilinkMember """ member_def = dict( netlink_ref=netlink.href, netlink_role=netlink_role, ip_range=ip_range if netlink.typeof == 'netlink' else '0.0.0.0') if netlink.typeof == 'netlink': # static netlink vs dynamic netlink member_def.update(network_ref=netlink.network[0].href) return cls(member_def)
def function[create, parameter[cls, netlink, ip_range, netlink_role]]: constant[ Create a multilink member. Multilink members are added to an Outbound Multilink configuration and define the ip range, static netlink to use, and the role. This element can be passed to the Multilink constructor to simplify creation of the outbound multilink. :param StaticNetlink,DynamicNetlink netlink: static netlink element to use as member :param str ip_range: the IP range for source NAT for this member. The IP range should be part of the defined network range used by this netlink. Not required for dynamic netlink :param str netlink_role: role of this netlink, 'active' or 'standby' :raises ElementNotFound: Specified netlink could not be found :rtype: MultilinkMember ] variable[member_def] assign[=] call[name[dict], parameter[]] if compare[name[netlink].typeof equal[==] constant[netlink]] begin[:] call[name[member_def].update, parameter[]] return[call[name[cls], parameter[name[member_def]]]]
keyword[def] identifier[create] ( identifier[cls] , identifier[netlink] , identifier[ip_range] = keyword[None] , identifier[netlink_role] = literal[string] ): literal[string] identifier[member_def] = identifier[dict] ( identifier[netlink_ref] = identifier[netlink] . identifier[href] , identifier[netlink_role] = identifier[netlink_role] , identifier[ip_range] = identifier[ip_range] keyword[if] identifier[netlink] . identifier[typeof] == literal[string] keyword[else] literal[string] ) keyword[if] identifier[netlink] . identifier[typeof] == literal[string] : identifier[member_def] . identifier[update] ( identifier[network_ref] = identifier[netlink] . identifier[network] [ literal[int] ]. identifier[href] ) keyword[return] identifier[cls] ( identifier[member_def] )
def create(cls, netlink, ip_range=None, netlink_role='active'): """ Create a multilink member. Multilink members are added to an Outbound Multilink configuration and define the ip range, static netlink to use, and the role. This element can be passed to the Multilink constructor to simplify creation of the outbound multilink. :param StaticNetlink,DynamicNetlink netlink: static netlink element to use as member :param str ip_range: the IP range for source NAT for this member. The IP range should be part of the defined network range used by this netlink. Not required for dynamic netlink :param str netlink_role: role of this netlink, 'active' or 'standby' :raises ElementNotFound: Specified netlink could not be found :rtype: MultilinkMember """ member_def = dict(netlink_ref=netlink.href, netlink_role=netlink_role, ip_range=ip_range if netlink.typeof == 'netlink' else '0.0.0.0') if netlink.typeof == 'netlink': # static netlink vs dynamic netlink member_def.update(network_ref=netlink.network[0].href) # depends on [control=['if'], data=[]] return cls(member_def)
def sorted(self, key=None, reverse=False): """ Uses python sort and its passed arguments to sort the input. >>> seq([2, 1, 4, 3]).sorted() [1, 2, 3, 4] :param key: sort using key function :param reverse: return list reversed or not :return: sorted sequence """ return self._transform(transformations.sorted_t(key=key, reverse=reverse))
def function[sorted, parameter[self, key, reverse]]: constant[ Uses python sort and its passed arguments to sort the input. >>> seq([2, 1, 4, 3]).sorted() [1, 2, 3, 4] :param key: sort using key function :param reverse: return list reversed or not :return: sorted sequence ] return[call[name[self]._transform, parameter[call[name[transformations].sorted_t, parameter[]]]]]
keyword[def] identifier[sorted] ( identifier[self] , identifier[key] = keyword[None] , identifier[reverse] = keyword[False] ): literal[string] keyword[return] identifier[self] . identifier[_transform] ( identifier[transformations] . identifier[sorted_t] ( identifier[key] = identifier[key] , identifier[reverse] = identifier[reverse] ))
def sorted(self, key=None, reverse=False): """ Uses python sort and its passed arguments to sort the input. >>> seq([2, 1, 4, 3]).sorted() [1, 2, 3, 4] :param key: sort using key function :param reverse: return list reversed or not :return: sorted sequence """ return self._transform(transformations.sorted_t(key=key, reverse=reverse))
def rcm_vertex_order(vertices_resources, nets): """A generator which iterates over the vertices in Reverse-Cuthill-McKee order. For use as a vertex ordering for the sequential placer. """ vertices_neighbours = _get_vertices_neighbours(nets) for subgraph_vertices in _get_connected_subgraphs(vertices_resources, vertices_neighbours): cm_order = _cuthill_mckee(subgraph_vertices, vertices_neighbours) for vertex in reversed(cm_order): yield vertex
def function[rcm_vertex_order, parameter[vertices_resources, nets]]: constant[A generator which iterates over the vertices in Reverse-Cuthill-McKee order. For use as a vertex ordering for the sequential placer. ] variable[vertices_neighbours] assign[=] call[name[_get_vertices_neighbours], parameter[name[nets]]] for taget[name[subgraph_vertices]] in starred[call[name[_get_connected_subgraphs], parameter[name[vertices_resources], name[vertices_neighbours]]]] begin[:] variable[cm_order] assign[=] call[name[_cuthill_mckee], parameter[name[subgraph_vertices], name[vertices_neighbours]]] for taget[name[vertex]] in starred[call[name[reversed], parameter[name[cm_order]]]] begin[:] <ast.Yield object at 0x7da1b1859360>
keyword[def] identifier[rcm_vertex_order] ( identifier[vertices_resources] , identifier[nets] ): literal[string] identifier[vertices_neighbours] = identifier[_get_vertices_neighbours] ( identifier[nets] ) keyword[for] identifier[subgraph_vertices] keyword[in] identifier[_get_connected_subgraphs] ( identifier[vertices_resources] , identifier[vertices_neighbours] ): identifier[cm_order] = identifier[_cuthill_mckee] ( identifier[subgraph_vertices] , identifier[vertices_neighbours] ) keyword[for] identifier[vertex] keyword[in] identifier[reversed] ( identifier[cm_order] ): keyword[yield] identifier[vertex]
def rcm_vertex_order(vertices_resources, nets): """A generator which iterates over the vertices in Reverse-Cuthill-McKee order. For use as a vertex ordering for the sequential placer. """ vertices_neighbours = _get_vertices_neighbours(nets) for subgraph_vertices in _get_connected_subgraphs(vertices_resources, vertices_neighbours): cm_order = _cuthill_mckee(subgraph_vertices, vertices_neighbours) for vertex in reversed(cm_order): yield vertex # depends on [control=['for'], data=['vertex']] # depends on [control=['for'], data=['subgraph_vertices']]
def on_person_update(self, people): """ People have changed Should always include all people (all that were added via on_person_new) :param people: People to update :type people: list[paps.people.People] :rtype: None :raises Exception: On error (for now just an exception) """ try: self.sensor_client.person_update(people) except: self.exception("Failed to update people") raise Exception("Updating people failed")
def function[on_person_update, parameter[self, people]]: constant[ People have changed Should always include all people (all that were added via on_person_new) :param people: People to update :type people: list[paps.people.People] :rtype: None :raises Exception: On error (for now just an exception) ] <ast.Try object at 0x7da20c76d750>
keyword[def] identifier[on_person_update] ( identifier[self] , identifier[people] ): literal[string] keyword[try] : identifier[self] . identifier[sensor_client] . identifier[person_update] ( identifier[people] ) keyword[except] : identifier[self] . identifier[exception] ( literal[string] ) keyword[raise] identifier[Exception] ( literal[string] )
def on_person_update(self, people): """ People have changed Should always include all people (all that were added via on_person_new) :param people: People to update :type people: list[paps.people.People] :rtype: None :raises Exception: On error (for now just an exception) """ try: self.sensor_client.person_update(people) # depends on [control=['try'], data=[]] except: self.exception('Failed to update people') raise Exception('Updating people failed') # depends on [control=['except'], data=[]]
def open_stream(self, class_attr_name=None, fn=None): """ Save an arff structure to a file, leaving the file object open for writing of new data samples. This prevents you from directly accessing the data via Python, but when generating a huge file, this prevents all your data from being stored in memory. """ if fn: self.fout_fn = fn else: fd, self.fout_fn = tempfile.mkstemp() os.close(fd) self.fout = open(self.fout_fn, 'w') if class_attr_name: self.class_attr_name = class_attr_name self.write(fout=self.fout, schema_only=True) self.write(fout=self.fout, data_only=True) self.fout.flush()
def function[open_stream, parameter[self, class_attr_name, fn]]: constant[ Save an arff structure to a file, leaving the file object open for writing of new data samples. This prevents you from directly accessing the data via Python, but when generating a huge file, this prevents all your data from being stored in memory. ] if name[fn] begin[:] name[self].fout_fn assign[=] name[fn] name[self].fout assign[=] call[name[open], parameter[name[self].fout_fn, constant[w]]] if name[class_attr_name] begin[:] name[self].class_attr_name assign[=] name[class_attr_name] call[name[self].write, parameter[]] call[name[self].write, parameter[]] call[name[self].fout.flush, parameter[]]
keyword[def] identifier[open_stream] ( identifier[self] , identifier[class_attr_name] = keyword[None] , identifier[fn] = keyword[None] ): literal[string] keyword[if] identifier[fn] : identifier[self] . identifier[fout_fn] = identifier[fn] keyword[else] : identifier[fd] , identifier[self] . identifier[fout_fn] = identifier[tempfile] . identifier[mkstemp] () identifier[os] . identifier[close] ( identifier[fd] ) identifier[self] . identifier[fout] = identifier[open] ( identifier[self] . identifier[fout_fn] , literal[string] ) keyword[if] identifier[class_attr_name] : identifier[self] . identifier[class_attr_name] = identifier[class_attr_name] identifier[self] . identifier[write] ( identifier[fout] = identifier[self] . identifier[fout] , identifier[schema_only] = keyword[True] ) identifier[self] . identifier[write] ( identifier[fout] = identifier[self] . identifier[fout] , identifier[data_only] = keyword[True] ) identifier[self] . identifier[fout] . identifier[flush] ()
def open_stream(self, class_attr_name=None, fn=None): """ Save an arff structure to a file, leaving the file object open for writing of new data samples. This prevents you from directly accessing the data via Python, but when generating a huge file, this prevents all your data from being stored in memory. """ if fn: self.fout_fn = fn # depends on [control=['if'], data=[]] else: (fd, self.fout_fn) = tempfile.mkstemp() os.close(fd) self.fout = open(self.fout_fn, 'w') if class_attr_name: self.class_attr_name = class_attr_name # depends on [control=['if'], data=[]] self.write(fout=self.fout, schema_only=True) self.write(fout=self.fout, data_only=True) self.fout.flush()
def unwatch(self): """Get rid of any lingering watchers and remove from list""" if self.watchers is not None: unwatched = [] for watcher in self.watchers: watcher.inst.param.unwatch(watcher) unwatched.append(watcher) self.watchers = [w for w in self.watchers if w not in unwatched]
def function[unwatch, parameter[self]]: constant[Get rid of any lingering watchers and remove from list] if compare[name[self].watchers is_not constant[None]] begin[:] variable[unwatched] assign[=] list[[]] for taget[name[watcher]] in starred[name[self].watchers] begin[:] call[name[watcher].inst.param.unwatch, parameter[name[watcher]]] call[name[unwatched].append, parameter[name[watcher]]] name[self].watchers assign[=] <ast.ListComp object at 0x7da1b17f5540>
keyword[def] identifier[unwatch] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[watchers] keyword[is] keyword[not] keyword[None] : identifier[unwatched] =[] keyword[for] identifier[watcher] keyword[in] identifier[self] . identifier[watchers] : identifier[watcher] . identifier[inst] . identifier[param] . identifier[unwatch] ( identifier[watcher] ) identifier[unwatched] . identifier[append] ( identifier[watcher] ) identifier[self] . identifier[watchers] =[ identifier[w] keyword[for] identifier[w] keyword[in] identifier[self] . identifier[watchers] keyword[if] identifier[w] keyword[not] keyword[in] identifier[unwatched] ]
def unwatch(self): """Get rid of any lingering watchers and remove from list""" if self.watchers is not None: unwatched = [] for watcher in self.watchers: watcher.inst.param.unwatch(watcher) unwatched.append(watcher) # depends on [control=['for'], data=['watcher']] self.watchers = [w for w in self.watchers if w not in unwatched] # depends on [control=['if'], data=[]]
def intake_path_dirs(path): """Return a list of directories from the intake path. If a string, perhaps taken from an environment variable, then the list of paths will be split on the character ":" for posix of ";" for windows. Protocol indicators ("protocol://") will be ignored. """ if isinstance(path, (list, tuple)): return path import re pattern = re.compile(";" if os.name == 'nt' else r"(?<!:):(?![:/])") return pattern.split(path)
def function[intake_path_dirs, parameter[path]]: constant[Return a list of directories from the intake path. If a string, perhaps taken from an environment variable, then the list of paths will be split on the character ":" for posix of ";" for windows. Protocol indicators ("protocol://") will be ignored. ] if call[name[isinstance], parameter[name[path], tuple[[<ast.Name object at 0x7da1b1720df0>, <ast.Name object at 0x7da1b1720730>]]]] begin[:] return[name[path]] import module[re] variable[pattern] assign[=] call[name[re].compile, parameter[<ast.IfExp object at 0x7da1b1721120>]] return[call[name[pattern].split, parameter[name[path]]]]
keyword[def] identifier[intake_path_dirs] ( identifier[path] ): literal[string] keyword[if] identifier[isinstance] ( identifier[path] ,( identifier[list] , identifier[tuple] )): keyword[return] identifier[path] keyword[import] identifier[re] identifier[pattern] = identifier[re] . identifier[compile] ( literal[string] keyword[if] identifier[os] . identifier[name] == literal[string] keyword[else] literal[string] ) keyword[return] identifier[pattern] . identifier[split] ( identifier[path] )
def intake_path_dirs(path): """Return a list of directories from the intake path. If a string, perhaps taken from an environment variable, then the list of paths will be split on the character ":" for posix of ";" for windows. Protocol indicators ("protocol://") will be ignored. """ if isinstance(path, (list, tuple)): return path # depends on [control=['if'], data=[]] import re pattern = re.compile(';' if os.name == 'nt' else '(?<!:):(?![:/])') return pattern.split(path)
def load_hpo_term(self, hpo_obj): """Add a hpo object Arguments: hpo_obj(dict) """ LOG.debug("Loading hpo term %s into database", hpo_obj['_id']) try: self.hpo_term_collection.insert_one(hpo_obj) except DuplicateKeyError as err: raise IntegrityError("Hpo term %s already exists in database".format(hpo_obj['_id'])) LOG.debug("Hpo term saved")
def function[load_hpo_term, parameter[self, hpo_obj]]: constant[Add a hpo object Arguments: hpo_obj(dict) ] call[name[LOG].debug, parameter[constant[Loading hpo term %s into database], call[name[hpo_obj]][constant[_id]]]] <ast.Try object at 0x7da204345e40> call[name[LOG].debug, parameter[constant[Hpo term saved]]]
keyword[def] identifier[load_hpo_term] ( identifier[self] , identifier[hpo_obj] ): literal[string] identifier[LOG] . identifier[debug] ( literal[string] , identifier[hpo_obj] [ literal[string] ]) keyword[try] : identifier[self] . identifier[hpo_term_collection] . identifier[insert_one] ( identifier[hpo_obj] ) keyword[except] identifier[DuplicateKeyError] keyword[as] identifier[err] : keyword[raise] identifier[IntegrityError] ( literal[string] . identifier[format] ( identifier[hpo_obj] [ literal[string] ])) identifier[LOG] . identifier[debug] ( literal[string] )
def load_hpo_term(self, hpo_obj): """Add a hpo object Arguments: hpo_obj(dict) """ LOG.debug('Loading hpo term %s into database', hpo_obj['_id']) try: self.hpo_term_collection.insert_one(hpo_obj) # depends on [control=['try'], data=[]] except DuplicateKeyError as err: raise IntegrityError('Hpo term %s already exists in database'.format(hpo_obj['_id'])) # depends on [control=['except'], data=[]] LOG.debug('Hpo term saved')
def get_visual_content(self, id_or_uri): """ Gets a list of visual content objects describing each rack within the data center. The response aggregates data center and rack data with a specified metric (peak24HourTemp) to provide simplified access to display data for the data center. Args: id_or_uri: Can be either the resource ID or the resource URI. Return: list: List of visual content objects. """ uri = self._client.build_uri(id_or_uri) + "/visualContent" return self._client.get(uri)
def function[get_visual_content, parameter[self, id_or_uri]]: constant[ Gets a list of visual content objects describing each rack within the data center. The response aggregates data center and rack data with a specified metric (peak24HourTemp) to provide simplified access to display data for the data center. Args: id_or_uri: Can be either the resource ID or the resource URI. Return: list: List of visual content objects. ] variable[uri] assign[=] binary_operation[call[name[self]._client.build_uri, parameter[name[id_or_uri]]] + constant[/visualContent]] return[call[name[self]._client.get, parameter[name[uri]]]]
keyword[def] identifier[get_visual_content] ( identifier[self] , identifier[id_or_uri] ): literal[string] identifier[uri] = identifier[self] . identifier[_client] . identifier[build_uri] ( identifier[id_or_uri] )+ literal[string] keyword[return] identifier[self] . identifier[_client] . identifier[get] ( identifier[uri] )
def get_visual_content(self, id_or_uri): """ Gets a list of visual content objects describing each rack within the data center. The response aggregates data center and rack data with a specified metric (peak24HourTemp) to provide simplified access to display data for the data center. Args: id_or_uri: Can be either the resource ID or the resource URI. Return: list: List of visual content objects. """ uri = self._client.build_uri(id_or_uri) + '/visualContent' return self._client.get(uri)
def installed(name, # pylint: disable=C0103 ruby=None, gem_bin=None, user=None, version=None, rdoc=False, ri=False, pre_releases=False, proxy=None, source=None): # pylint: disable=C0103 ''' Make sure that a gem is installed. name The name of the gem to install ruby: None Only for RVM or rbenv installations: the ruby version and gemset to target. gem_bin: None Custom ``gem`` command to run instead of the default. Use this to install gems to a non-default ruby install. If you are using rvm or rbenv use the ruby argument instead. user: None The user under which to run the ``gem`` command .. versionadded:: 0.17.0 version : None Specify the version to install for the gem. Doesn't play nice with multiple gems at once rdoc : False Generate RDoc documentation for the gem(s). ri : False Generate RI documentation for the gem(s). pre_releases : False Install pre-release version of gem(s) if available. proxy : None Use the specified HTTP proxy server for all outgoing traffic. Format: http://hostname[:port] source : None Use the specified HTTP gem source server to download gem. Format: http://hostname[:port] ''' ret = {'name': name, 'result': None, 'comment': '', 'changes': {}} if ruby is not None and not(__salt__['rvm.is_installed'](runas=user) or __salt__['rbenv.is_installed'](runas=user)): log.warning( 'Use of argument ruby found, but neither rvm or rbenv is installed' ) gems = __salt__['gem.list'](name, ruby, gem_bin=gem_bin, runas=user) if name in gems and version is not None: versions = list([x.replace('default: ', '') for x in gems[name]]) match = re.match(r'(>=|>|<|<=)', version) if match: # Grab the comparison cmpr = match.group() # Clear out 'default:' and any whitespace installed_version = re.sub('default: ', '', gems[name][0]).strip() # Clear out comparison from version and whitespace desired_version = re.sub(cmpr, '', version).strip() if salt.utils.versions.compare(installed_version, cmpr, desired_version): ret['result'] = True ret['comment'] = 'Installed Gem meets version requirements.' return ret elif str(version) in versions: ret['result'] = True ret['comment'] = 'Gem is already installed.' return ret else: if str(version) in gems[name]: ret['result'] = True ret['comment'] = 'Gem is already installed.' return ret elif name in gems and version is None: ret['result'] = True ret['comment'] = 'Gem is already installed.' return ret if __opts__['test']: ret['comment'] = 'The gem {0} would have been installed'.format(name) return ret if __salt__['gem.install'](name, ruby=ruby, gem_bin=gem_bin, runas=user, version=version, rdoc=rdoc, ri=ri, pre_releases=pre_releases, proxy=proxy, source=source): ret['result'] = True ret['changes'][name] = 'Installed' ret['comment'] = 'Gem was successfully installed' else: ret['result'] = False ret['comment'] = 'Could not install gem.' return ret
def function[installed, parameter[name, ruby, gem_bin, user, version, rdoc, ri, pre_releases, proxy, source]]: constant[ Make sure that a gem is installed. name The name of the gem to install ruby: None Only for RVM or rbenv installations: the ruby version and gemset to target. gem_bin: None Custom ``gem`` command to run instead of the default. Use this to install gems to a non-default ruby install. If you are using rvm or rbenv use the ruby argument instead. user: None The user under which to run the ``gem`` command .. versionadded:: 0.17.0 version : None Specify the version to install for the gem. Doesn't play nice with multiple gems at once rdoc : False Generate RDoc documentation for the gem(s). ri : False Generate RI documentation for the gem(s). pre_releases : False Install pre-release version of gem(s) if available. proxy : None Use the specified HTTP proxy server for all outgoing traffic. Format: http://hostname[:port] source : None Use the specified HTTP gem source server to download gem. Format: http://hostname[:port] ] variable[ret] assign[=] dictionary[[<ast.Constant object at 0x7da18f00e3b0>, <ast.Constant object at 0x7da18f00c460>, <ast.Constant object at 0x7da18f00ee60>, <ast.Constant object at 0x7da18f00dd20>], [<ast.Name object at 0x7da18f00fc40>, <ast.Constant object at 0x7da18f00d2d0>, <ast.Constant object at 0x7da18f00f5e0>, <ast.Dict object at 0x7da18f00e740>]] if <ast.BoolOp object at 0x7da18f00e920> begin[:] call[name[log].warning, parameter[constant[Use of argument ruby found, but neither rvm or rbenv is installed]]] variable[gems] assign[=] call[call[name[__salt__]][constant[gem.list]], parameter[name[name], name[ruby]]] if <ast.BoolOp object at 0x7da204345c90> begin[:] variable[versions] assign[=] call[name[list], parameter[<ast.ListComp object at 0x7da2043467a0>]] variable[match] assign[=] call[name[re].match, parameter[constant[(>=|>|<|<=)], name[version]]] if name[match] begin[:] variable[cmpr] assign[=] call[name[match].group, parameter[]] variable[installed_version] assign[=] call[call[name[re].sub, parameter[constant[default: ], constant[], call[call[name[gems]][name[name]]][constant[0]]]].strip, parameter[]] variable[desired_version] assign[=] call[call[name[re].sub, parameter[name[cmpr], constant[], name[version]]].strip, parameter[]] if call[name[salt].utils.versions.compare, parameter[name[installed_version], name[cmpr], name[desired_version]]] begin[:] call[name[ret]][constant[result]] assign[=] constant[True] call[name[ret]][constant[comment]] assign[=] constant[Installed Gem meets version requirements.] return[name[ret]] if call[name[__opts__]][constant[test]] begin[:] call[name[ret]][constant[comment]] assign[=] call[constant[The gem {0} would have been installed].format, parameter[name[name]]] return[name[ret]] if call[call[name[__salt__]][constant[gem.install]], parameter[name[name]]] begin[:] call[name[ret]][constant[result]] assign[=] constant[True] call[call[name[ret]][constant[changes]]][name[name]] assign[=] constant[Installed] call[name[ret]][constant[comment]] assign[=] constant[Gem was successfully installed] return[name[ret]]
keyword[def] identifier[installed] ( identifier[name] , identifier[ruby] = keyword[None] , identifier[gem_bin] = keyword[None] , identifier[user] = keyword[None] , identifier[version] = keyword[None] , identifier[rdoc] = keyword[False] , identifier[ri] = keyword[False] , identifier[pre_releases] = keyword[False] , identifier[proxy] = keyword[None] , identifier[source] = keyword[None] ): literal[string] identifier[ret] ={ literal[string] : identifier[name] , literal[string] : keyword[None] , literal[string] : literal[string] , literal[string] :{}} keyword[if] identifier[ruby] keyword[is] keyword[not] keyword[None] keyword[and] keyword[not] ( identifier[__salt__] [ literal[string] ]( identifier[runas] = identifier[user] ) keyword[or] identifier[__salt__] [ literal[string] ]( identifier[runas] = identifier[user] )): identifier[log] . identifier[warning] ( literal[string] ) identifier[gems] = identifier[__salt__] [ literal[string] ]( identifier[name] , identifier[ruby] , identifier[gem_bin] = identifier[gem_bin] , identifier[runas] = identifier[user] ) keyword[if] identifier[name] keyword[in] identifier[gems] keyword[and] identifier[version] keyword[is] keyword[not] keyword[None] : identifier[versions] = identifier[list] ([ identifier[x] . identifier[replace] ( literal[string] , literal[string] ) keyword[for] identifier[x] keyword[in] identifier[gems] [ identifier[name] ]]) identifier[match] = identifier[re] . identifier[match] ( literal[string] , identifier[version] ) keyword[if] identifier[match] : identifier[cmpr] = identifier[match] . identifier[group] () identifier[installed_version] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[gems] [ identifier[name] ][ literal[int] ]). identifier[strip] () identifier[desired_version] = identifier[re] . identifier[sub] ( identifier[cmpr] , literal[string] , identifier[version] ). identifier[strip] () keyword[if] identifier[salt] . identifier[utils] . identifier[versions] . identifier[compare] ( identifier[installed_version] , identifier[cmpr] , identifier[desired_version] ): identifier[ret] [ literal[string] ]= keyword[True] identifier[ret] [ literal[string] ]= literal[string] keyword[return] identifier[ret] keyword[elif] identifier[str] ( identifier[version] ) keyword[in] identifier[versions] : identifier[ret] [ literal[string] ]= keyword[True] identifier[ret] [ literal[string] ]= literal[string] keyword[return] identifier[ret] keyword[else] : keyword[if] identifier[str] ( identifier[version] ) keyword[in] identifier[gems] [ identifier[name] ]: identifier[ret] [ literal[string] ]= keyword[True] identifier[ret] [ literal[string] ]= literal[string] keyword[return] identifier[ret] keyword[elif] identifier[name] keyword[in] identifier[gems] keyword[and] identifier[version] keyword[is] keyword[None] : identifier[ret] [ literal[string] ]= keyword[True] identifier[ret] [ literal[string] ]= literal[string] keyword[return] identifier[ret] keyword[if] identifier[__opts__] [ literal[string] ]: identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[name] ) keyword[return] identifier[ret] keyword[if] identifier[__salt__] [ literal[string] ]( identifier[name] , identifier[ruby] = identifier[ruby] , identifier[gem_bin] = identifier[gem_bin] , identifier[runas] = identifier[user] , identifier[version] = identifier[version] , identifier[rdoc] = identifier[rdoc] , identifier[ri] = identifier[ri] , identifier[pre_releases] = identifier[pre_releases] , identifier[proxy] = identifier[proxy] , identifier[source] = identifier[source] ): identifier[ret] [ literal[string] ]= keyword[True] identifier[ret] [ literal[string] ][ identifier[name] ]= literal[string] identifier[ret] [ literal[string] ]= literal[string] keyword[else] : identifier[ret] [ literal[string] ]= keyword[False] identifier[ret] [ literal[string] ]= literal[string] keyword[return] identifier[ret]
def installed(name, ruby=None, gem_bin=None, user=None, version=None, rdoc=False, ri=False, pre_releases=False, proxy=None, source=None): # pylint: disable=C0103 # pylint: disable=C0103 "\n Make sure that a gem is installed.\n\n name\n The name of the gem to install\n\n ruby: None\n Only for RVM or rbenv installations: the ruby version and gemset to\n target.\n\n gem_bin: None\n Custom ``gem`` command to run instead of the default.\n Use this to install gems to a non-default ruby install. If you are\n using rvm or rbenv use the ruby argument instead.\n\n user: None\n The user under which to run the ``gem`` command\n\n .. versionadded:: 0.17.0\n\n version : None\n Specify the version to install for the gem.\n Doesn't play nice with multiple gems at once\n\n rdoc : False\n Generate RDoc documentation for the gem(s).\n\n ri : False\n Generate RI documentation for the gem(s).\n\n pre_releases : False\n Install pre-release version of gem(s) if available.\n\n proxy : None\n Use the specified HTTP proxy server for all outgoing traffic.\n Format: http://hostname[:port]\n\n source : None\n Use the specified HTTP gem source server to download gem.\n Format: http://hostname[:port]\n " ret = {'name': name, 'result': None, 'comment': '', 'changes': {}} if ruby is not None and (not (__salt__['rvm.is_installed'](runas=user) or __salt__['rbenv.is_installed'](runas=user))): log.warning('Use of argument ruby found, but neither rvm or rbenv is installed') # depends on [control=['if'], data=[]] gems = __salt__['gem.list'](name, ruby, gem_bin=gem_bin, runas=user) if name in gems and version is not None: versions = list([x.replace('default: ', '') for x in gems[name]]) match = re.match('(>=|>|<|<=)', version) if match: # Grab the comparison cmpr = match.group() # Clear out 'default:' and any whitespace installed_version = re.sub('default: ', '', gems[name][0]).strip() # Clear out comparison from version and whitespace desired_version = re.sub(cmpr, '', version).strip() if salt.utils.versions.compare(installed_version, cmpr, desired_version): ret['result'] = True ret['comment'] = 'Installed Gem meets version requirements.' return ret # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif str(version) in versions: ret['result'] = True ret['comment'] = 'Gem is already installed.' return ret # depends on [control=['if'], data=[]] elif str(version) in gems[name]: ret['result'] = True ret['comment'] = 'Gem is already installed.' return ret # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif name in gems and version is None: ret['result'] = True ret['comment'] = 'Gem is already installed.' return ret # depends on [control=['if'], data=[]] if __opts__['test']: ret['comment'] = 'The gem {0} would have been installed'.format(name) return ret # depends on [control=['if'], data=[]] if __salt__['gem.install'](name, ruby=ruby, gem_bin=gem_bin, runas=user, version=version, rdoc=rdoc, ri=ri, pre_releases=pre_releases, proxy=proxy, source=source): ret['result'] = True ret['changes'][name] = 'Installed' ret['comment'] = 'Gem was successfully installed' # depends on [control=['if'], data=[]] else: ret['result'] = False ret['comment'] = 'Could not install gem.' return ret
def JoinPath(stem="", *parts): """A sane version of os.path.join. The intention here is to append the stem to the path. The standard module removes the path if the stem begins with a /. Args: stem: The stem to join to. *parts: parts of the path to join. The first arg is always the root and directory traversal is not allowed. Returns: a normalized path. """ # Ensure all path components are unicode parts = [SmartUnicode(path) for path in parts] result = (stem + NormalizePath(u"/".join(parts))).replace("//", "/") result = result.rstrip("/") return result or "/"
def function[JoinPath, parameter[stem]]: constant[A sane version of os.path.join. The intention here is to append the stem to the path. The standard module removes the path if the stem begins with a /. Args: stem: The stem to join to. *parts: parts of the path to join. The first arg is always the root and directory traversal is not allowed. Returns: a normalized path. ] variable[parts] assign[=] <ast.ListComp object at 0x7da1b1c3d9c0> variable[result] assign[=] call[binary_operation[name[stem] + call[name[NormalizePath], parameter[call[constant[/].join, parameter[name[parts]]]]]].replace, parameter[constant[//], constant[/]]] variable[result] assign[=] call[name[result].rstrip, parameter[constant[/]]] return[<ast.BoolOp object at 0x7da1b1c3c310>]
keyword[def] identifier[JoinPath] ( identifier[stem] = literal[string] ,* identifier[parts] ): literal[string] identifier[parts] =[ identifier[SmartUnicode] ( identifier[path] ) keyword[for] identifier[path] keyword[in] identifier[parts] ] identifier[result] =( identifier[stem] + identifier[NormalizePath] ( literal[string] . identifier[join] ( identifier[parts] ))). identifier[replace] ( literal[string] , literal[string] ) identifier[result] = identifier[result] . identifier[rstrip] ( literal[string] ) keyword[return] identifier[result] keyword[or] literal[string]
def JoinPath(stem='', *parts): """A sane version of os.path.join. The intention here is to append the stem to the path. The standard module removes the path if the stem begins with a /. Args: stem: The stem to join to. *parts: parts of the path to join. The first arg is always the root and directory traversal is not allowed. Returns: a normalized path. """ # Ensure all path components are unicode parts = [SmartUnicode(path) for path in parts] result = (stem + NormalizePath(u'/'.join(parts))).replace('//', '/') result = result.rstrip('/') return result or '/'
def get_article_content(self, url, del_qqmusic=True, del_mpvoice=True, unlock_callback=None, identify_image_callback=None, hosting_callback=None, raw=False): """获取文章原文,避免临时链接失效 Parameters ---------- url : str or unicode 原文链接,临时链接 raw : bool True: 返回原始html False: 返回处理后的html del_qqmusic: bool True:微信原文中有插入的qq音乐,则删除 False:微信源文中有插入的qq音乐,则保留 del_mpvoice: bool True:微信原文中有插入的语音消息,则删除 False:微信源文中有插入的语音消息,则保留 unlock_callback : callable 处理 文章明细 的时候出现验证码的函数,参见 unlock_callback_example identify_image_callback : callable 处理 文章明细 的时候处理验证码函数,输入验证码二进制数据,输出文字,参见 identify_image_callback_example hosting_callback: callable 将微信采集的文章托管到7牛或者阿里云回调函数,输入微信图片源地址,返回托管后地址 Returns ------- content_html 原文内容 content_img_list 文章中图片列表 Raises ------ WechatSogouRequestsException """ resp = self.__get_by_unlock(url, unlock_platform=self.__unlock_wechat, unlock_callback=unlock_callback, identify_image_callback=identify_image_callback) resp.encoding = 'utf-8' if '链接已过期' in resp.text: raise WechatSogouException('get_article_content 链接 [{}] 已过期'.format(url)) if raw: return resp.text content_info = WechatSogouStructuring.get_article_detail(resp.text, del_qqmusic=del_qqmusic, del_voice=del_mpvoice) if hosting_callback: content_info = self.__hosting_wechat_img(content_info, hosting_callback) return content_info
def function[get_article_content, parameter[self, url, del_qqmusic, del_mpvoice, unlock_callback, identify_image_callback, hosting_callback, raw]]: constant[获取文章原文,避免临时链接失效 Parameters ---------- url : str or unicode 原文链接,临时链接 raw : bool True: 返回原始html False: 返回处理后的html del_qqmusic: bool True:微信原文中有插入的qq音乐,则删除 False:微信源文中有插入的qq音乐,则保留 del_mpvoice: bool True:微信原文中有插入的语音消息,则删除 False:微信源文中有插入的语音消息,则保留 unlock_callback : callable 处理 文章明细 的时候出现验证码的函数,参见 unlock_callback_example identify_image_callback : callable 处理 文章明细 的时候处理验证码函数,输入验证码二进制数据,输出文字,参见 identify_image_callback_example hosting_callback: callable 将微信采集的文章托管到7牛或者阿里云回调函数,输入微信图片源地址,返回托管后地址 Returns ------- content_html 原文内容 content_img_list 文章中图片列表 Raises ------ WechatSogouRequestsException ] variable[resp] assign[=] call[name[self].__get_by_unlock, parameter[name[url]]] name[resp].encoding assign[=] constant[utf-8] if compare[constant[链接已过期] in name[resp].text] begin[:] <ast.Raise object at 0x7da1b20470d0> if name[raw] begin[:] return[name[resp].text] variable[content_info] assign[=] call[name[WechatSogouStructuring].get_article_detail, parameter[name[resp].text]] if name[hosting_callback] begin[:] variable[content_info] assign[=] call[name[self].__hosting_wechat_img, parameter[name[content_info], name[hosting_callback]]] return[name[content_info]]
keyword[def] identifier[get_article_content] ( identifier[self] , identifier[url] , identifier[del_qqmusic] = keyword[True] , identifier[del_mpvoice] = keyword[True] , identifier[unlock_callback] = keyword[None] , identifier[identify_image_callback] = keyword[None] , identifier[hosting_callback] = keyword[None] , identifier[raw] = keyword[False] ): literal[string] identifier[resp] = identifier[self] . identifier[__get_by_unlock] ( identifier[url] , identifier[unlock_platform] = identifier[self] . identifier[__unlock_wechat] , identifier[unlock_callback] = identifier[unlock_callback] , identifier[identify_image_callback] = identifier[identify_image_callback] ) identifier[resp] . identifier[encoding] = literal[string] keyword[if] literal[string] keyword[in] identifier[resp] . identifier[text] : keyword[raise] identifier[WechatSogouException] ( literal[string] . identifier[format] ( identifier[url] )) keyword[if] identifier[raw] : keyword[return] identifier[resp] . identifier[text] identifier[content_info] = identifier[WechatSogouStructuring] . identifier[get_article_detail] ( identifier[resp] . identifier[text] , identifier[del_qqmusic] = identifier[del_qqmusic] , identifier[del_voice] = identifier[del_mpvoice] ) keyword[if] identifier[hosting_callback] : identifier[content_info] = identifier[self] . identifier[__hosting_wechat_img] ( identifier[content_info] , identifier[hosting_callback] ) keyword[return] identifier[content_info]
def get_article_content(self, url, del_qqmusic=True, del_mpvoice=True, unlock_callback=None, identify_image_callback=None, hosting_callback=None, raw=False): """获取文章原文,避免临时链接失效 Parameters ---------- url : str or unicode 原文链接,临时链接 raw : bool True: 返回原始html False: 返回处理后的html del_qqmusic: bool True:微信原文中有插入的qq音乐,则删除 False:微信源文中有插入的qq音乐,则保留 del_mpvoice: bool True:微信原文中有插入的语音消息,则删除 False:微信源文中有插入的语音消息,则保留 unlock_callback : callable 处理 文章明细 的时候出现验证码的函数,参见 unlock_callback_example identify_image_callback : callable 处理 文章明细 的时候处理验证码函数,输入验证码二进制数据,输出文字,参见 identify_image_callback_example hosting_callback: callable 将微信采集的文章托管到7牛或者阿里云回调函数,输入微信图片源地址,返回托管后地址 Returns ------- content_html 原文内容 content_img_list 文章中图片列表 Raises ------ WechatSogouRequestsException """ resp = self.__get_by_unlock(url, unlock_platform=self.__unlock_wechat, unlock_callback=unlock_callback, identify_image_callback=identify_image_callback) resp.encoding = 'utf-8' if '链接已过期' in resp.text: raise WechatSogouException('get_article_content 链接 [{}] 已过期'.format(url)) # depends on [control=['if'], data=[]] if raw: return resp.text # depends on [control=['if'], data=[]] content_info = WechatSogouStructuring.get_article_detail(resp.text, del_qqmusic=del_qqmusic, del_voice=del_mpvoice) if hosting_callback: content_info = self.__hosting_wechat_img(content_info, hosting_callback) # depends on [control=['if'], data=[]] return content_info
def _create_sentence_objects(self): '''Returns a list of Sentence objects from the raw text. ''' sentence_objects = [] sent_tokenizer = SentenceTokenizer(locale=self.language.code) seq = Sequence(self.raw) seq = sent_tokenizer.transform(seq) for start_index, end_index in zip(seq.idx[:-1], seq.idx[1:]): # Sentences share the same models as their parent blob sent = seq.text[start_index: end_index].strip() if not sent: continue s = Sentence(sent, start_index=start_index, end_index=end_index) s.detected_languages = self.detected_languages sentence_objects.append(s) return sentence_objects
def function[_create_sentence_objects, parameter[self]]: constant[Returns a list of Sentence objects from the raw text. ] variable[sentence_objects] assign[=] list[[]] variable[sent_tokenizer] assign[=] call[name[SentenceTokenizer], parameter[]] variable[seq] assign[=] call[name[Sequence], parameter[name[self].raw]] variable[seq] assign[=] call[name[sent_tokenizer].transform, parameter[name[seq]]] for taget[tuple[[<ast.Name object at 0x7da20c76d690>, <ast.Name object at 0x7da20c76ee60>]]] in starred[call[name[zip], parameter[call[name[seq].idx][<ast.Slice object at 0x7da20c76e3e0>], call[name[seq].idx][<ast.Slice object at 0x7da20c76fe50>]]]] begin[:] variable[sent] assign[=] call[call[name[seq].text][<ast.Slice object at 0x7da20c76dc30>].strip, parameter[]] if <ast.UnaryOp object at 0x7da20c76c4c0> begin[:] continue variable[s] assign[=] call[name[Sentence], parameter[name[sent]]] name[s].detected_languages assign[=] name[self].detected_languages call[name[sentence_objects].append, parameter[name[s]]] return[name[sentence_objects]]
keyword[def] identifier[_create_sentence_objects] ( identifier[self] ): literal[string] identifier[sentence_objects] =[] identifier[sent_tokenizer] = identifier[SentenceTokenizer] ( identifier[locale] = identifier[self] . identifier[language] . identifier[code] ) identifier[seq] = identifier[Sequence] ( identifier[self] . identifier[raw] ) identifier[seq] = identifier[sent_tokenizer] . identifier[transform] ( identifier[seq] ) keyword[for] identifier[start_index] , identifier[end_index] keyword[in] identifier[zip] ( identifier[seq] . identifier[idx] [:- literal[int] ], identifier[seq] . identifier[idx] [ literal[int] :]): identifier[sent] = identifier[seq] . identifier[text] [ identifier[start_index] : identifier[end_index] ]. identifier[strip] () keyword[if] keyword[not] identifier[sent] : keyword[continue] identifier[s] = identifier[Sentence] ( identifier[sent] , identifier[start_index] = identifier[start_index] , identifier[end_index] = identifier[end_index] ) identifier[s] . identifier[detected_languages] = identifier[self] . identifier[detected_languages] identifier[sentence_objects] . identifier[append] ( identifier[s] ) keyword[return] identifier[sentence_objects]
def _create_sentence_objects(self): """Returns a list of Sentence objects from the raw text. """ sentence_objects = [] sent_tokenizer = SentenceTokenizer(locale=self.language.code) seq = Sequence(self.raw) seq = sent_tokenizer.transform(seq) for (start_index, end_index) in zip(seq.idx[:-1], seq.idx[1:]): # Sentences share the same models as their parent blob sent = seq.text[start_index:end_index].strip() if not sent: continue # depends on [control=['if'], data=[]] s = Sentence(sent, start_index=start_index, end_index=end_index) s.detected_languages = self.detected_languages sentence_objects.append(s) # depends on [control=['for'], data=[]] return sentence_objects
def umi_transform(data): """ transform each read by identifying the barcode and UMI for each read and putting the information in the read name """ fqfiles = data["files"] fqfiles.extend(list(repeat("", 4-len(fqfiles)))) fq1, fq2, fq3, fq4 = fqfiles umi_dir = os.path.join(dd.get_work_dir(data), "umis") safe_makedir(umi_dir) transform = dd.get_umi_type(data) if not transform: logger.info("No UMI transform specified, assuming pre-transformed data.") if is_transformed(fq1): logger.info("%s detected as pre-transformed, passing it on unchanged." % fq1) data["files"] = [fq1] return [[data]] else: logger.error("No UMI transform was specified, but %s does not look " "pre-transformed." % fq1) sys.exit(1) if file_exists(transform): transform_file = transform else: transform_file = get_transform_file(transform) if not file_exists(transform_file): logger.error( "The UMI transform can be specified as either a file or a " "bcbio-supported transform. Either the file %s does not exist " "or the transform is not supported by bcbio. Supported " "transforms are %s." %(dd.get_umi_type(data), ", ".join(SUPPORTED_TRANSFORMS))) sys.exit(1) out_base = dd.get_sample_name(data) + ".umitransformed.fq.gz" out_file = os.path.join(umi_dir, out_base) if file_exists(out_file): data["files"] = [out_file] return [[data]] cellular_barcodes = get_cellular_barcodes(data) if len(cellular_barcodes) > 1: split_option = "--separate_cb" else: split_option = "" if dd.get_demultiplexed(data): demuxed_option = "--demuxed_cb %s" % dd.get_sample_name(data) split_option = "" else: demuxed_option = "" cores = dd.get_num_cores(data) # skip transformation if the file already looks transformed with open_fastq(fq1) as in_handle: read = next(in_handle) if "UMI_" in read: data["files"] = [out_file] return [[data]] locale_export = utils.locale_export() umis = _umis_cmd(data) cmd = ("{umis} fastqtransform {split_option} {transform_file} " "--cores {cores} {demuxed_option} " "{fq1} {fq2} {fq3} {fq4}" "| seqtk seq -L 20 - | gzip > {tx_out_file}") message = ("Inserting UMI and barcode information into the read name of %s" % fq1) with file_transaction(out_file) as tx_out_file: do.run(cmd.format(**locals()), message) data["files"] = [out_file] return [[data]]
def function[umi_transform, parameter[data]]: constant[ transform each read by identifying the barcode and UMI for each read and putting the information in the read name ] variable[fqfiles] assign[=] call[name[data]][constant[files]] call[name[fqfiles].extend, parameter[call[name[list], parameter[call[name[repeat], parameter[constant[], binary_operation[constant[4] - call[name[len], parameter[name[fqfiles]]]]]]]]]] <ast.Tuple object at 0x7da20c76e590> assign[=] name[fqfiles] variable[umi_dir] assign[=] call[name[os].path.join, parameter[call[name[dd].get_work_dir, parameter[name[data]]], constant[umis]]] call[name[safe_makedir], parameter[name[umi_dir]]] variable[transform] assign[=] call[name[dd].get_umi_type, parameter[name[data]]] if <ast.UnaryOp object at 0x7da20c76fc10> begin[:] call[name[logger].info, parameter[constant[No UMI transform specified, assuming pre-transformed data.]]] if call[name[is_transformed], parameter[name[fq1]]] begin[:] call[name[logger].info, parameter[binary_operation[constant[%s detected as pre-transformed, passing it on unchanged.] <ast.Mod object at 0x7da2590d6920> name[fq1]]]] call[name[data]][constant[files]] assign[=] list[[<ast.Name object at 0x7da1b18bc5e0>]] return[list[[<ast.List object at 0x7da1b18be050>]]] if call[name[file_exists], parameter[name[transform]]] begin[:] variable[transform_file] assign[=] name[transform] variable[out_base] assign[=] binary_operation[call[name[dd].get_sample_name, parameter[name[data]]] + constant[.umitransformed.fq.gz]] variable[out_file] assign[=] call[name[os].path.join, parameter[name[umi_dir], name[out_base]]] if call[name[file_exists], parameter[name[out_file]]] begin[:] call[name[data]][constant[files]] assign[=] list[[<ast.Name object at 0x7da1b18bc100>]] return[list[[<ast.List object at 0x7da1b18bc3a0>]]] variable[cellular_barcodes] assign[=] call[name[get_cellular_barcodes], parameter[name[data]]] if compare[call[name[len], parameter[name[cellular_barcodes]]] greater[>] constant[1]] begin[:] variable[split_option] assign[=] constant[--separate_cb] if call[name[dd].get_demultiplexed, parameter[name[data]]] begin[:] variable[demuxed_option] assign[=] binary_operation[constant[--demuxed_cb %s] <ast.Mod object at 0x7da2590d6920> call[name[dd].get_sample_name, parameter[name[data]]]] variable[split_option] assign[=] constant[] variable[cores] assign[=] call[name[dd].get_num_cores, parameter[name[data]]] with call[name[open_fastq], parameter[name[fq1]]] begin[:] variable[read] assign[=] call[name[next], parameter[name[in_handle]]] if compare[constant[UMI_] in name[read]] begin[:] call[name[data]][constant[files]] assign[=] list[[<ast.Name object at 0x7da1b18bd8d0>]] return[list[[<ast.List object at 0x7da1b18bd030>]]] variable[locale_export] assign[=] call[name[utils].locale_export, parameter[]] variable[umis] assign[=] call[name[_umis_cmd], parameter[name[data]]] variable[cmd] assign[=] constant[{umis} fastqtransform {split_option} {transform_file} --cores {cores} {demuxed_option} {fq1} {fq2} {fq3} {fq4}| seqtk seq -L 20 - | gzip > {tx_out_file}] variable[message] assign[=] binary_operation[constant[Inserting UMI and barcode information into the read name of %s] <ast.Mod object at 0x7da2590d6920> name[fq1]] with call[name[file_transaction], parameter[name[out_file]]] begin[:] call[name[do].run, parameter[call[name[cmd].format, parameter[]], name[message]]] call[name[data]][constant[files]] assign[=] list[[<ast.Name object at 0x7da1b1986f50>]] return[list[[<ast.List object at 0x7da1b19867d0>]]]
keyword[def] identifier[umi_transform] ( identifier[data] ): literal[string] identifier[fqfiles] = identifier[data] [ literal[string] ] identifier[fqfiles] . identifier[extend] ( identifier[list] ( identifier[repeat] ( literal[string] , literal[int] - identifier[len] ( identifier[fqfiles] )))) identifier[fq1] , identifier[fq2] , identifier[fq3] , identifier[fq4] = identifier[fqfiles] identifier[umi_dir] = identifier[os] . identifier[path] . identifier[join] ( identifier[dd] . identifier[get_work_dir] ( identifier[data] ), literal[string] ) identifier[safe_makedir] ( identifier[umi_dir] ) identifier[transform] = identifier[dd] . identifier[get_umi_type] ( identifier[data] ) keyword[if] keyword[not] identifier[transform] : identifier[logger] . identifier[info] ( literal[string] ) keyword[if] identifier[is_transformed] ( identifier[fq1] ): identifier[logger] . identifier[info] ( literal[string] % identifier[fq1] ) identifier[data] [ literal[string] ]=[ identifier[fq1] ] keyword[return] [[ identifier[data] ]] keyword[else] : identifier[logger] . identifier[error] ( literal[string] literal[string] % identifier[fq1] ) identifier[sys] . identifier[exit] ( literal[int] ) keyword[if] identifier[file_exists] ( identifier[transform] ): identifier[transform_file] = identifier[transform] keyword[else] : identifier[transform_file] = identifier[get_transform_file] ( identifier[transform] ) keyword[if] keyword[not] identifier[file_exists] ( identifier[transform_file] ): identifier[logger] . identifier[error] ( literal[string] literal[string] literal[string] literal[string] %( identifier[dd] . identifier[get_umi_type] ( identifier[data] ), literal[string] . identifier[join] ( identifier[SUPPORTED_TRANSFORMS] ))) identifier[sys] . identifier[exit] ( literal[int] ) identifier[out_base] = identifier[dd] . identifier[get_sample_name] ( identifier[data] )+ literal[string] identifier[out_file] = identifier[os] . identifier[path] . identifier[join] ( identifier[umi_dir] , identifier[out_base] ) keyword[if] identifier[file_exists] ( identifier[out_file] ): identifier[data] [ literal[string] ]=[ identifier[out_file] ] keyword[return] [[ identifier[data] ]] identifier[cellular_barcodes] = identifier[get_cellular_barcodes] ( identifier[data] ) keyword[if] identifier[len] ( identifier[cellular_barcodes] )> literal[int] : identifier[split_option] = literal[string] keyword[else] : identifier[split_option] = literal[string] keyword[if] identifier[dd] . identifier[get_demultiplexed] ( identifier[data] ): identifier[demuxed_option] = literal[string] % identifier[dd] . identifier[get_sample_name] ( identifier[data] ) identifier[split_option] = literal[string] keyword[else] : identifier[demuxed_option] = literal[string] identifier[cores] = identifier[dd] . identifier[get_num_cores] ( identifier[data] ) keyword[with] identifier[open_fastq] ( identifier[fq1] ) keyword[as] identifier[in_handle] : identifier[read] = identifier[next] ( identifier[in_handle] ) keyword[if] literal[string] keyword[in] identifier[read] : identifier[data] [ literal[string] ]=[ identifier[out_file] ] keyword[return] [[ identifier[data] ]] identifier[locale_export] = identifier[utils] . identifier[locale_export] () identifier[umis] = identifier[_umis_cmd] ( identifier[data] ) identifier[cmd] =( literal[string] literal[string] literal[string] literal[string] ) identifier[message] =( literal[string] % identifier[fq1] ) keyword[with] identifier[file_transaction] ( identifier[out_file] ) keyword[as] identifier[tx_out_file] : identifier[do] . identifier[run] ( identifier[cmd] . identifier[format] (** identifier[locals] ()), identifier[message] ) identifier[data] [ literal[string] ]=[ identifier[out_file] ] keyword[return] [[ identifier[data] ]]
def umi_transform(data): """ transform each read by identifying the barcode and UMI for each read and putting the information in the read name """ fqfiles = data['files'] fqfiles.extend(list(repeat('', 4 - len(fqfiles)))) (fq1, fq2, fq3, fq4) = fqfiles umi_dir = os.path.join(dd.get_work_dir(data), 'umis') safe_makedir(umi_dir) transform = dd.get_umi_type(data) if not transform: logger.info('No UMI transform specified, assuming pre-transformed data.') if is_transformed(fq1): logger.info('%s detected as pre-transformed, passing it on unchanged.' % fq1) data['files'] = [fq1] return [[data]] # depends on [control=['if'], data=[]] else: logger.error('No UMI transform was specified, but %s does not look pre-transformed.' % fq1) sys.exit(1) # depends on [control=['if'], data=[]] if file_exists(transform): transform_file = transform # depends on [control=['if'], data=[]] else: transform_file = get_transform_file(transform) if not file_exists(transform_file): logger.error('The UMI transform can be specified as either a file or a bcbio-supported transform. Either the file %s does not exist or the transform is not supported by bcbio. Supported transforms are %s.' % (dd.get_umi_type(data), ', '.join(SUPPORTED_TRANSFORMS))) sys.exit(1) # depends on [control=['if'], data=[]] out_base = dd.get_sample_name(data) + '.umitransformed.fq.gz' out_file = os.path.join(umi_dir, out_base) if file_exists(out_file): data['files'] = [out_file] return [[data]] # depends on [control=['if'], data=[]] cellular_barcodes = get_cellular_barcodes(data) if len(cellular_barcodes) > 1: split_option = '--separate_cb' # depends on [control=['if'], data=[]] else: split_option = '' if dd.get_demultiplexed(data): demuxed_option = '--demuxed_cb %s' % dd.get_sample_name(data) split_option = '' # depends on [control=['if'], data=[]] else: demuxed_option = '' cores = dd.get_num_cores(data) # skip transformation if the file already looks transformed with open_fastq(fq1) as in_handle: read = next(in_handle) if 'UMI_' in read: data['files'] = [out_file] return [[data]] # depends on [control=['if'], data=[]] # depends on [control=['with'], data=['in_handle']] locale_export = utils.locale_export() umis = _umis_cmd(data) cmd = '{umis} fastqtransform {split_option} {transform_file} --cores {cores} {demuxed_option} {fq1} {fq2} {fq3} {fq4}| seqtk seq -L 20 - | gzip > {tx_out_file}' message = 'Inserting UMI and barcode information into the read name of %s' % fq1 with file_transaction(out_file) as tx_out_file: do.run(cmd.format(**locals()), message) # depends on [control=['with'], data=[]] data['files'] = [out_file] return [[data]]
def snmp_server_v3host_use_vrf(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") snmp_server = ET.SubElement(config, "snmp-server", xmlns="urn:brocade.com:mgmt:brocade-snmp") v3host = ET.SubElement(snmp_server, "v3host") hostip_key = ET.SubElement(v3host, "hostip") hostip_key.text = kwargs.pop('hostip') username_key = ET.SubElement(v3host, "username") username_key.text = kwargs.pop('username') use_vrf = ET.SubElement(v3host, "use-vrf") use_vrf.text = kwargs.pop('use_vrf') callback = kwargs.pop('callback', self._callback) return callback(config)
def function[snmp_server_v3host_use_vrf, parameter[self]]: constant[Auto Generated Code ] variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]] variable[snmp_server] assign[=] call[name[ET].SubElement, parameter[name[config], constant[snmp-server]]] variable[v3host] assign[=] call[name[ET].SubElement, parameter[name[snmp_server], constant[v3host]]] variable[hostip_key] assign[=] call[name[ET].SubElement, parameter[name[v3host], constant[hostip]]] name[hostip_key].text assign[=] call[name[kwargs].pop, parameter[constant[hostip]]] variable[username_key] assign[=] call[name[ET].SubElement, parameter[name[v3host], constant[username]]] name[username_key].text assign[=] call[name[kwargs].pop, parameter[constant[username]]] variable[use_vrf] assign[=] call[name[ET].SubElement, parameter[name[v3host], constant[use-vrf]]] name[use_vrf].text assign[=] call[name[kwargs].pop, parameter[constant[use_vrf]]] variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]] return[call[name[callback], parameter[name[config]]]]
keyword[def] identifier[snmp_server_v3host_use_vrf] ( identifier[self] ,** identifier[kwargs] ): literal[string] identifier[config] = identifier[ET] . identifier[Element] ( literal[string] ) identifier[snmp_server] = identifier[ET] . identifier[SubElement] ( identifier[config] , literal[string] , identifier[xmlns] = literal[string] ) identifier[v3host] = identifier[ET] . identifier[SubElement] ( identifier[snmp_server] , literal[string] ) identifier[hostip_key] = identifier[ET] . identifier[SubElement] ( identifier[v3host] , literal[string] ) identifier[hostip_key] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] ) identifier[username_key] = identifier[ET] . identifier[SubElement] ( identifier[v3host] , literal[string] ) identifier[username_key] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] ) identifier[use_vrf] = identifier[ET] . identifier[SubElement] ( identifier[v3host] , literal[string] ) identifier[use_vrf] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] ) identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] ) keyword[return] identifier[callback] ( identifier[config] )
def snmp_server_v3host_use_vrf(self, **kwargs): """Auto Generated Code """ config = ET.Element('config') snmp_server = ET.SubElement(config, 'snmp-server', xmlns='urn:brocade.com:mgmt:brocade-snmp') v3host = ET.SubElement(snmp_server, 'v3host') hostip_key = ET.SubElement(v3host, 'hostip') hostip_key.text = kwargs.pop('hostip') username_key = ET.SubElement(v3host, 'username') username_key.text = kwargs.pop('username') use_vrf = ET.SubElement(v3host, 'use-vrf') use_vrf.text = kwargs.pop('use_vrf') callback = kwargs.pop('callback', self._callback) return callback(config)