code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def extract_cite_history(page, extractors): """ Extracts cites from the history of a `page` (`mwxml.Page`). :Parameters: page : `iterable`(`mwxml.Revision`) The page to extract cites from extractors : `list`(`extractor`) A list of extractors to apply to the text :Returns: `iterable` -- a generator of extracted cites """ appearances = {} # For tracking the first appearance of an ID ids = set() # For holding onto the ids in the last revision. for revision in page: ids = set(extract_ids(revision.text, extractors)) # For each ID, check to see if we have seen it before for id in ids: if id not in appearances: appearances[id] = (revision.id, revision.timestamp) for id in ids: #For the ids in the last version of the page rev_id, timestamp = appearances[id] yield (page.id, page.title, rev_id, timestamp, id.type, id.id)
def function[extract_cite_history, parameter[page, extractors]]: constant[ Extracts cites from the history of a `page` (`mwxml.Page`). :Parameters: page : `iterable`(`mwxml.Revision`) The page to extract cites from extractors : `list`(`extractor`) A list of extractors to apply to the text :Returns: `iterable` -- a generator of extracted cites ] variable[appearances] assign[=] dictionary[[], []] variable[ids] assign[=] call[name[set], parameter[]] for taget[name[revision]] in starred[name[page]] begin[:] variable[ids] assign[=] call[name[set], parameter[call[name[extract_ids], parameter[name[revision].text, name[extractors]]]]] for taget[name[id]] in starred[name[ids]] begin[:] if compare[name[id] <ast.NotIn object at 0x7da2590d7190> name[appearances]] begin[:] call[name[appearances]][name[id]] assign[=] tuple[[<ast.Attribute object at 0x7da18ede45e0>, <ast.Attribute object at 0x7da18ede64d0>]] for taget[name[id]] in starred[name[ids]] begin[:] <ast.Tuple object at 0x7da18ede6d10> assign[=] call[name[appearances]][name[id]] <ast.Yield object at 0x7da18ede4700>
keyword[def] identifier[extract_cite_history] ( identifier[page] , identifier[extractors] ): literal[string] identifier[appearances] ={} identifier[ids] = identifier[set] () keyword[for] identifier[revision] keyword[in] identifier[page] : identifier[ids] = identifier[set] ( identifier[extract_ids] ( identifier[revision] . identifier[text] , identifier[extractors] )) keyword[for] identifier[id] keyword[in] identifier[ids] : keyword[if] identifier[id] keyword[not] keyword[in] identifier[appearances] : identifier[appearances] [ identifier[id] ]=( identifier[revision] . identifier[id] , identifier[revision] . identifier[timestamp] ) keyword[for] identifier[id] keyword[in] identifier[ids] : identifier[rev_id] , identifier[timestamp] = identifier[appearances] [ identifier[id] ] keyword[yield] ( identifier[page] . identifier[id] , identifier[page] . identifier[title] , identifier[rev_id] , identifier[timestamp] , identifier[id] . identifier[type] , identifier[id] . identifier[id] )
def extract_cite_history(page, extractors): """ Extracts cites from the history of a `page` (`mwxml.Page`). :Parameters: page : `iterable`(`mwxml.Revision`) The page to extract cites from extractors : `list`(`extractor`) A list of extractors to apply to the text :Returns: `iterable` -- a generator of extracted cites """ appearances = {} # For tracking the first appearance of an ID ids = set() # For holding onto the ids in the last revision. for revision in page: ids = set(extract_ids(revision.text, extractors)) # For each ID, check to see if we have seen it before for id in ids: if id not in appearances: appearances[id] = (revision.id, revision.timestamp) # depends on [control=['if'], data=['id', 'appearances']] # depends on [control=['for'], data=['id']] # depends on [control=['for'], data=['revision']] for id in ids: #For the ids in the last version of the page (rev_id, timestamp) = appearances[id] yield (page.id, page.title, rev_id, timestamp, id.type, id.id) # depends on [control=['for'], data=['id']]
def linkcode_resolve(domain, info): """ Determine the URL corresponding to Python object This code is from https://github.com/numpy/numpy/blob/master/doc/source/conf.py#L290 and https://github.com/Lasagne/Lasagne/pull/262 """ if domain != 'py': return None modname = info['module'] fullname = info['fullname'] submod = sys.modules.get(modname) if submod is None: return None obj = submod for part in fullname.split('.'): try: obj = getattr(obj, part) except: return None try: fn = inspect.getsourcefile(obj) except: fn = None if not fn: return None try: source, lineno = inspect.getsourcelines(obj) except: lineno = None if lineno: linespec = "#L%d-L%d" % (lineno, lineno + len(source) - 1) else: linespec = "" filename = info['module'].replace('.', '/') return "http://github.com/allenai/allennlp/blob/master/%s.py%s" % (filename, linespec)
def function[linkcode_resolve, parameter[domain, info]]: constant[ Determine the URL corresponding to Python object This code is from https://github.com/numpy/numpy/blob/master/doc/source/conf.py#L290 and https://github.com/Lasagne/Lasagne/pull/262 ] if compare[name[domain] not_equal[!=] constant[py]] begin[:] return[constant[None]] variable[modname] assign[=] call[name[info]][constant[module]] variable[fullname] assign[=] call[name[info]][constant[fullname]] variable[submod] assign[=] call[name[sys].modules.get, parameter[name[modname]]] if compare[name[submod] is constant[None]] begin[:] return[constant[None]] variable[obj] assign[=] name[submod] for taget[name[part]] in starred[call[name[fullname].split, parameter[constant[.]]]] begin[:] <ast.Try object at 0x7da2043458a0> <ast.Try object at 0x7da204344520> if <ast.UnaryOp object at 0x7da2043452a0> begin[:] return[constant[None]] <ast.Try object at 0x7da2043449a0> if name[lineno] begin[:] variable[linespec] assign[=] binary_operation[constant[#L%d-L%d] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da204347a30>, <ast.BinOp object at 0x7da204344d90>]]] variable[filename] assign[=] call[call[name[info]][constant[module]].replace, parameter[constant[.], constant[/]]] return[binary_operation[constant[http://github.com/allenai/allennlp/blob/master/%s.py%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da20c6e78e0>, <ast.Name object at 0x7da20c6e7a60>]]]]
keyword[def] identifier[linkcode_resolve] ( identifier[domain] , identifier[info] ): literal[string] keyword[if] identifier[domain] != literal[string] : keyword[return] keyword[None] identifier[modname] = identifier[info] [ literal[string] ] identifier[fullname] = identifier[info] [ literal[string] ] identifier[submod] = identifier[sys] . identifier[modules] . identifier[get] ( identifier[modname] ) keyword[if] identifier[submod] keyword[is] keyword[None] : keyword[return] keyword[None] identifier[obj] = identifier[submod] keyword[for] identifier[part] keyword[in] identifier[fullname] . identifier[split] ( literal[string] ): keyword[try] : identifier[obj] = identifier[getattr] ( identifier[obj] , identifier[part] ) keyword[except] : keyword[return] keyword[None] keyword[try] : identifier[fn] = identifier[inspect] . identifier[getsourcefile] ( identifier[obj] ) keyword[except] : identifier[fn] = keyword[None] keyword[if] keyword[not] identifier[fn] : keyword[return] keyword[None] keyword[try] : identifier[source] , identifier[lineno] = identifier[inspect] . identifier[getsourcelines] ( identifier[obj] ) keyword[except] : identifier[lineno] = keyword[None] keyword[if] identifier[lineno] : identifier[linespec] = literal[string] %( identifier[lineno] , identifier[lineno] + identifier[len] ( identifier[source] )- literal[int] ) keyword[else] : identifier[linespec] = literal[string] identifier[filename] = identifier[info] [ literal[string] ]. identifier[replace] ( literal[string] , literal[string] ) keyword[return] literal[string] %( identifier[filename] , identifier[linespec] )
def linkcode_resolve(domain, info): """ Determine the URL corresponding to Python object This code is from https://github.com/numpy/numpy/blob/master/doc/source/conf.py#L290 and https://github.com/Lasagne/Lasagne/pull/262 """ if domain != 'py': return None # depends on [control=['if'], data=[]] modname = info['module'] fullname = info['fullname'] submod = sys.modules.get(modname) if submod is None: return None # depends on [control=['if'], data=[]] obj = submod for part in fullname.split('.'): try: obj = getattr(obj, part) # depends on [control=['try'], data=[]] except: return None # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['part']] try: fn = inspect.getsourcefile(obj) # depends on [control=['try'], data=[]] except: fn = None # depends on [control=['except'], data=[]] if not fn: return None # depends on [control=['if'], data=[]] try: (source, lineno) = inspect.getsourcelines(obj) # depends on [control=['try'], data=[]] except: lineno = None # depends on [control=['except'], data=[]] if lineno: linespec = '#L%d-L%d' % (lineno, lineno + len(source) - 1) # depends on [control=['if'], data=[]] else: linespec = '' filename = info['module'].replace('.', '/') return 'http://github.com/allenai/allennlp/blob/master/%s.py%s' % (filename, linespec)
def _parse_pot(pot,potforactions=False,potfortorus=False): """Parse the potential so it can be fed to C""" #Figure out what's in pot if not isinstance(pot,list): pot= [pot] #Initialize everything pot_type= [] pot_args= [] npot= len(pot) for p in pot: if isinstance(p,potential.LogarithmicHaloPotential): pot_type.append(0) if p.isNonAxi: pot_args.extend([p._amp,p._q,p._core2,p._1m1overb2]) else: pot_args.extend([p._amp,p._q,p._core2,2.]) # 1m1overb2 > 1: axi elif isinstance(p,potential.DehnenBarPotential): pot_type.append(1) pot_args.extend([p._amp*p._af,p._tform,p._tsteady,p._rb,p._omegab, p._barphi]) elif isinstance(p,potential.MiyamotoNagaiPotential): pot_type.append(5) pot_args.extend([p._amp,p._a,p._b]) elif isinstance(p,potential.PowerSphericalPotential): pot_type.append(7) pot_args.extend([p._amp,p.alpha]) elif isinstance(p,potential.HernquistPotential): pot_type.append(8) pot_args.extend([p._amp,p.a]) elif isinstance(p,potential.NFWPotential): pot_type.append(9) pot_args.extend([p._amp,p.a]) elif isinstance(p,potential.JaffePotential): pot_type.append(10) pot_args.extend([p._amp,p.a]) elif isinstance(p,potential.DoubleExponentialDiskPotential): pot_type.append(11) pot_args.extend([p._amp,p._alpha,p._beta,p._kmaxFac, p._nzeros,p._glorder]) pot_args.extend([p._glx[ii] for ii in range(p._glorder)]) pot_args.extend([p._glw[ii] for ii in range(p._glorder)]) pot_args.extend([p._j0zeros[ii] for ii in range(p._nzeros+1)]) pot_args.extend([p._dj0zeros[ii] for ii in range(p._nzeros+1)]) pot_args.extend([p._j1zeros[ii] for ii in range(p._nzeros+1)]) pot_args.extend([p._dj1zeros[ii] for ii in range(p._nzeros+1)]) pot_args.extend([p._kp._amp,p._kp.alpha]) elif isinstance(p,potential.FlattenedPowerPotential): pot_type.append(12) pot_args.extend([p._amp,p.alpha,p.q2,p.core2]) elif isinstance(p,potential.interpRZPotential): pot_type.append(13) pot_args.extend([len(p._rgrid),len(p._zgrid)]) if p._logR: pot_args.extend([p._logrgrid[ii] for ii in range(len(p._rgrid))]) else: pot_args.extend([p._rgrid[ii] for ii in range(len(p._rgrid))]) pot_args.extend([p._zgrid[ii] for ii in range(len(p._zgrid))]) if hasattr(p,'_potGrid_splinecoeffs'): pot_args.extend([x for x in p._potGrid_splinecoeffs.flatten(order='C')]) else: # pragma: no cover warnings.warn("You are attempting to use the C implementation of interpRZPotential, but have not interpolated the potential itself; if you think this is needed for what you want to do, initialize the interpRZPotential instance with interpPot=True", galpyWarning) pot_args.extend(list(nu.ones(len(p._rgrid)*len(p._zgrid)))) if hasattr(p,'_rforceGrid_splinecoeffs'): pot_args.extend([x for x in p._rforceGrid_splinecoeffs.flatten(order='C')]) else: # pragma: no cover warnings.warn("You are attempting to use the C implementation of interpRZPotential, but have not interpolated the Rforce; if you think this is needed for what you want to do, initialize the interpRZPotential instance with interpRforce=True", galpyWarning) pot_args.extend(list(nu.ones(len(p._rgrid)*len(p._zgrid)))) if hasattr(p,'_zforceGrid_splinecoeffs'): pot_args.extend([x for x in p._zforceGrid_splinecoeffs.flatten(order='C')]) else: # pragma: no cover warnings.warn("You are attempting to use the C implementation of interpRZPotential, but have not interpolated the zforce; if you think this is needed for what you want to do, initialize the interpRZPotential instance with interpzforce=True", galpyWarning) pot_args.extend(list(nu.ones(len(p._rgrid)*len(p._zgrid)))) pot_args.extend([p._amp,int(p._logR)]) elif isinstance(p,potential.IsochronePotential): pot_type.append(14) pot_args.extend([p._amp,p.b]) elif isinstance(p,potential.PowerSphericalPotentialwCutoff): pot_type.append(15) pot_args.extend([p._amp,p.alpha,p.rc]) elif isinstance(p,potential.MN3ExponentialDiskPotential): # Three Miyamoto-Nagai disks npot+= 2 pot_type.extend([5,5,5]) pot_args.extend([p._amp*p._mn3[0]._amp, p._mn3[0]._a,p._mn3[0]._b, p._amp*p._mn3[1]._amp, p._mn3[1]._a,p._mn3[1]._b, p._amp*p._mn3[2]._amp, p._mn3[2]._a,p._mn3[2]._b]) elif isinstance(p,potential.KuzminKutuzovStaeckelPotential): pot_type.append(16) pot_args.extend([p._amp,p._ac,p._Delta]) elif isinstance(p,potential.PlummerPotential): pot_type.append(17) pot_args.extend([p._amp,p._b]) elif isinstance(p,potential.PseudoIsothermalPotential): pot_type.append(18) pot_args.extend([p._amp,p._a]) elif isinstance(p,potential.KuzminDiskPotential): pot_type.append(19) pot_args.extend([p._amp,p._a]) elif isinstance(p,potential.BurkertPotential): pot_type.append(20) pot_args.extend([p._amp,p.a]) elif isinstance(p,potential.EllipsoidalPotential.EllipsoidalPotential): pot_args.append(p._amp) pot_args.extend([0.,0.,0.,0.,0.,0.]) # for caching # Potential specific parameters if isinstance(p,potential.TriaxialHernquistPotential): pot_type.append(21) pot_args.extend([2,p.a,p.a4]) # for psi, mdens, mdens_deriv elif isinstance(p,potential.TriaxialNFWPotential): pot_type.append(22) pot_args.extend([2,p.a,p.a3]) # for psi, mdens, mdens_deriv elif isinstance(p,potential.TriaxialJaffePotential): pot_type.append(23) pot_args.extend([2,p.a,p.a2]) # for psi, mdens, mdens_deriv elif isinstance(p,potential.PerfectEllipsoidPotential): pot_type.append(30) pot_args.extend([1,p.a2]) # for psi, mdens, mdens_deriv pot_args.extend([p._b2,p._c2,int(p._aligned)]) # Reg. Ellipsoidal if not p._aligned: pot_args.extend(list(p._rot.flatten())) else: pot_args.extend(list(nu.eye(3).flatten())) # not actually used pot_args.append(p._glorder) pot_args.extend([p._glx[ii] for ii in range(p._glorder)]) # this adds some common factors to the integration weights pot_args.extend([-4.*nu.pi*p._glw[ii]*p._b*p._c\ /nu.sqrt(( 1.+(p._b2-1.)*p._glx[ii]**2.) *(1.+(p._c2-1.)*p._glx[ii]**2.)) for ii in range(p._glorder)]) elif isinstance(p,potential.SCFPotential): # Type 24, see stand-alone parser below pt,pa= _parse_scf_pot(p) pot_type.append(pt) pot_args.extend(pa) elif isinstance(p,potential.SoftenedNeedleBarPotential): pot_type.append(25) pot_args.extend([p._amp,p._a,p._b,p._c2,p._pa,p._omegab]) pot_args.extend([0.,0.,0.,0.,0.,0.,0.]) # for caching elif isinstance(p,potential.DiskSCFPotential): # Need to pull this apart into: (a) SCF part, (b) constituent # [Sigma_i,h_i] parts # (a) SCF, multiply in any add'l amp pt,pa= _parse_scf_pot(p._scf,extra_amp=p._amp) pot_type.append(pt) pot_args.extend(pa) # (b) constituent [Sigma_i,h_i] parts for Sigma,hz in zip(p._Sigma_dict,p._hz_dict): npot+= 1 pot_type.append(26) stype= Sigma.get('type','exp') if stype == 'exp' \ or (stype == 'exp' and 'Rhole' in Sigma): pot_args.extend([3,0, 4.*nu.pi*Sigma.get('amp',1.)*p._amp, Sigma.get('h',1./3.)]) elif stype == 'expwhole' \ or (stype == 'exp' and 'Rhole' in Sigma): pot_args.extend([4,1, 4.*nu.pi*Sigma.get('amp',1.)*p._amp, Sigma.get('h',1./3.), Sigma.get('Rhole',0.5)]) hztype= hz.get('type','exp') if hztype == 'exp': pot_args.extend([0,hz.get('h',0.0375)]) elif hztype == 'sech2': pot_args.extend([1,hz.get('h',0.0375)]) elif isinstance(p, potential.SpiralArmsPotential): pot_type.append(27) pot_args.extend([len(p._Cs), p._amp, p._N, p._sin_alpha, p._tan_alpha, p._r_ref, p._phi_ref, p._Rs, p._H, p._omega]) pot_args.extend(p._Cs) # 30: PerfectEllipsoidPotential, done with others above ############################## WRAPPERS ############################### elif isinstance(p,potential.DehnenSmoothWrapperPotential): pot_type.append(-1) wrap_npot, wrap_pot_type, wrap_pot_args= \ _parse_pot(p._pot, potforactions=potforactions,potfortorus=potfortorus) pot_args.append(wrap_npot) pot_type.extend(wrap_pot_type) pot_args.extend(wrap_pot_args) pot_args.extend([p._amp,p._tform,p._tsteady,int(p._grow)]) elif isinstance(p,potential.SolidBodyRotationWrapperPotential): pot_type.append(-2) # Not sure how to easily avoid this duplication wrap_npot, wrap_pot_type, wrap_pot_args= \ _parse_pot(p._pot, potforactions=potforactions,potfortorus=potfortorus) pot_args.append(wrap_npot) pot_type.extend(wrap_pot_type) pot_args.extend(wrap_pot_args) pot_args.extend([p._amp,p._omega,p._pa]) elif isinstance(p,potential.CorotatingRotationWrapperPotential): pot_type.append(-4) # Not sure how to easily avoid this duplication wrap_npot, wrap_pot_type, wrap_pot_args= \ _parse_pot(p._pot, potforactions=potforactions,potfortorus=potfortorus) pot_args.append(wrap_npot) pot_type.extend(wrap_pot_type) pot_args.extend(wrap_pot_args) pot_args.extend([p._amp,p._vpo,p._beta,p._pa,p._to]) elif isinstance(p,potential.GaussianAmplitudeWrapperPotential): pot_type.append(-5) wrap_npot, wrap_pot_type, wrap_pot_args= \ _parse_pot(p._pot, potforactions=potforactions,potfortorus=potfortorus) pot_args.append(wrap_npot) pot_type.extend(wrap_pot_type) pot_args.extend(wrap_pot_args) pot_args.extend([p._amp,p._to,p._sigma2]) pot_type= nu.array(pot_type,dtype=nu.int32,order='C') pot_args= nu.array(pot_args,dtype=nu.float64,order='C') return (npot,pot_type,pot_args)
def function[_parse_pot, parameter[pot, potforactions, potfortorus]]: constant[Parse the potential so it can be fed to C] if <ast.UnaryOp object at 0x7da1b0c5b160> begin[:] variable[pot] assign[=] list[[<ast.Name object at 0x7da1b0c5afe0>]] variable[pot_type] assign[=] list[[]] variable[pot_args] assign[=] list[[]] variable[npot] assign[=] call[name[len], parameter[name[pot]]] for taget[name[p]] in starred[name[pot]] begin[:] if call[name[isinstance], parameter[name[p], name[potential].LogarithmicHaloPotential]] begin[:] call[name[pot_type].append, parameter[constant[0]]] if name[p].isNonAxi begin[:] call[name[pot_args].extend, parameter[list[[<ast.Attribute object at 0x7da1b0c5a950>, <ast.Attribute object at 0x7da1b0c5a8f0>, <ast.Attribute object at 0x7da1b0c5a890>, <ast.Attribute object at 0x7da1b0c5a830>]]]] variable[pot_type] assign[=] call[name[nu].array, parameter[name[pot_type]]] variable[pot_args] assign[=] call[name[nu].array, parameter[name[pot_args]]] return[tuple[[<ast.Name object at 0x7da1b0da2ce0>, <ast.Name object at 0x7da1b0da1150>, <ast.Name object at 0x7da1b0da10c0>]]]
keyword[def] identifier[_parse_pot] ( identifier[pot] , identifier[potforactions] = keyword[False] , identifier[potfortorus] = keyword[False] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[pot] , identifier[list] ): identifier[pot] =[ identifier[pot] ] identifier[pot_type] =[] identifier[pot_args] =[] identifier[npot] = identifier[len] ( identifier[pot] ) keyword[for] identifier[p] keyword[in] identifier[pot] : keyword[if] identifier[isinstance] ( identifier[p] , identifier[potential] . identifier[LogarithmicHaloPotential] ): identifier[pot_type] . identifier[append] ( literal[int] ) keyword[if] identifier[p] . identifier[isNonAxi] : identifier[pot_args] . identifier[extend] ([ identifier[p] . identifier[_amp] , identifier[p] . identifier[_q] , identifier[p] . identifier[_core2] , identifier[p] . identifier[_1m1overb2] ]) keyword[else] : identifier[pot_args] . identifier[extend] ([ identifier[p] . identifier[_amp] , identifier[p] . identifier[_q] , identifier[p] . identifier[_core2] , literal[int] ]) keyword[elif] identifier[isinstance] ( identifier[p] , identifier[potential] . identifier[DehnenBarPotential] ): identifier[pot_type] . identifier[append] ( literal[int] ) identifier[pot_args] . identifier[extend] ([ identifier[p] . identifier[_amp] * identifier[p] . identifier[_af] , identifier[p] . identifier[_tform] , identifier[p] . identifier[_tsteady] , identifier[p] . identifier[_rb] , identifier[p] . identifier[_omegab] , identifier[p] . identifier[_barphi] ]) keyword[elif] identifier[isinstance] ( identifier[p] , identifier[potential] . identifier[MiyamotoNagaiPotential] ): identifier[pot_type] . identifier[append] ( literal[int] ) identifier[pot_args] . identifier[extend] ([ identifier[p] . identifier[_amp] , identifier[p] . identifier[_a] , identifier[p] . identifier[_b] ]) keyword[elif] identifier[isinstance] ( identifier[p] , identifier[potential] . identifier[PowerSphericalPotential] ): identifier[pot_type] . identifier[append] ( literal[int] ) identifier[pot_args] . identifier[extend] ([ identifier[p] . identifier[_amp] , identifier[p] . identifier[alpha] ]) keyword[elif] identifier[isinstance] ( identifier[p] , identifier[potential] . identifier[HernquistPotential] ): identifier[pot_type] . identifier[append] ( literal[int] ) identifier[pot_args] . identifier[extend] ([ identifier[p] . identifier[_amp] , identifier[p] . identifier[a] ]) keyword[elif] identifier[isinstance] ( identifier[p] , identifier[potential] . identifier[NFWPotential] ): identifier[pot_type] . identifier[append] ( literal[int] ) identifier[pot_args] . identifier[extend] ([ identifier[p] . identifier[_amp] , identifier[p] . identifier[a] ]) keyword[elif] identifier[isinstance] ( identifier[p] , identifier[potential] . identifier[JaffePotential] ): identifier[pot_type] . identifier[append] ( literal[int] ) identifier[pot_args] . identifier[extend] ([ identifier[p] . identifier[_amp] , identifier[p] . identifier[a] ]) keyword[elif] identifier[isinstance] ( identifier[p] , identifier[potential] . identifier[DoubleExponentialDiskPotential] ): identifier[pot_type] . identifier[append] ( literal[int] ) identifier[pot_args] . identifier[extend] ([ identifier[p] . identifier[_amp] , identifier[p] . identifier[_alpha] , identifier[p] . identifier[_beta] , identifier[p] . identifier[_kmaxFac] , identifier[p] . identifier[_nzeros] , identifier[p] . identifier[_glorder] ]) identifier[pot_args] . identifier[extend] ([ identifier[p] . identifier[_glx] [ identifier[ii] ] keyword[for] identifier[ii] keyword[in] identifier[range] ( identifier[p] . identifier[_glorder] )]) identifier[pot_args] . identifier[extend] ([ identifier[p] . identifier[_glw] [ identifier[ii] ] keyword[for] identifier[ii] keyword[in] identifier[range] ( identifier[p] . identifier[_glorder] )]) identifier[pot_args] . identifier[extend] ([ identifier[p] . identifier[_j0zeros] [ identifier[ii] ] keyword[for] identifier[ii] keyword[in] identifier[range] ( identifier[p] . identifier[_nzeros] + literal[int] )]) identifier[pot_args] . identifier[extend] ([ identifier[p] . identifier[_dj0zeros] [ identifier[ii] ] keyword[for] identifier[ii] keyword[in] identifier[range] ( identifier[p] . identifier[_nzeros] + literal[int] )]) identifier[pot_args] . identifier[extend] ([ identifier[p] . identifier[_j1zeros] [ identifier[ii] ] keyword[for] identifier[ii] keyword[in] identifier[range] ( identifier[p] . identifier[_nzeros] + literal[int] )]) identifier[pot_args] . identifier[extend] ([ identifier[p] . identifier[_dj1zeros] [ identifier[ii] ] keyword[for] identifier[ii] keyword[in] identifier[range] ( identifier[p] . identifier[_nzeros] + literal[int] )]) identifier[pot_args] . identifier[extend] ([ identifier[p] . identifier[_kp] . identifier[_amp] , identifier[p] . identifier[_kp] . identifier[alpha] ]) keyword[elif] identifier[isinstance] ( identifier[p] , identifier[potential] . identifier[FlattenedPowerPotential] ): identifier[pot_type] . identifier[append] ( literal[int] ) identifier[pot_args] . identifier[extend] ([ identifier[p] . identifier[_amp] , identifier[p] . identifier[alpha] , identifier[p] . identifier[q2] , identifier[p] . identifier[core2] ]) keyword[elif] identifier[isinstance] ( identifier[p] , identifier[potential] . identifier[interpRZPotential] ): identifier[pot_type] . identifier[append] ( literal[int] ) identifier[pot_args] . identifier[extend] ([ identifier[len] ( identifier[p] . identifier[_rgrid] ), identifier[len] ( identifier[p] . identifier[_zgrid] )]) keyword[if] identifier[p] . identifier[_logR] : identifier[pot_args] . identifier[extend] ([ identifier[p] . identifier[_logrgrid] [ identifier[ii] ] keyword[for] identifier[ii] keyword[in] identifier[range] ( identifier[len] ( identifier[p] . identifier[_rgrid] ))]) keyword[else] : identifier[pot_args] . identifier[extend] ([ identifier[p] . identifier[_rgrid] [ identifier[ii] ] keyword[for] identifier[ii] keyword[in] identifier[range] ( identifier[len] ( identifier[p] . identifier[_rgrid] ))]) identifier[pot_args] . identifier[extend] ([ identifier[p] . identifier[_zgrid] [ identifier[ii] ] keyword[for] identifier[ii] keyword[in] identifier[range] ( identifier[len] ( identifier[p] . identifier[_zgrid] ))]) keyword[if] identifier[hasattr] ( identifier[p] , literal[string] ): identifier[pot_args] . identifier[extend] ([ identifier[x] keyword[for] identifier[x] keyword[in] identifier[p] . identifier[_potGrid_splinecoeffs] . identifier[flatten] ( identifier[order] = literal[string] )]) keyword[else] : identifier[warnings] . identifier[warn] ( literal[string] , identifier[galpyWarning] ) identifier[pot_args] . identifier[extend] ( identifier[list] ( identifier[nu] . identifier[ones] ( identifier[len] ( identifier[p] . identifier[_rgrid] )* identifier[len] ( identifier[p] . identifier[_zgrid] )))) keyword[if] identifier[hasattr] ( identifier[p] , literal[string] ): identifier[pot_args] . identifier[extend] ([ identifier[x] keyword[for] identifier[x] keyword[in] identifier[p] . identifier[_rforceGrid_splinecoeffs] . identifier[flatten] ( identifier[order] = literal[string] )]) keyword[else] : identifier[warnings] . identifier[warn] ( literal[string] , identifier[galpyWarning] ) identifier[pot_args] . identifier[extend] ( identifier[list] ( identifier[nu] . identifier[ones] ( identifier[len] ( identifier[p] . identifier[_rgrid] )* identifier[len] ( identifier[p] . identifier[_zgrid] )))) keyword[if] identifier[hasattr] ( identifier[p] , literal[string] ): identifier[pot_args] . identifier[extend] ([ identifier[x] keyword[for] identifier[x] keyword[in] identifier[p] . identifier[_zforceGrid_splinecoeffs] . identifier[flatten] ( identifier[order] = literal[string] )]) keyword[else] : identifier[warnings] . identifier[warn] ( literal[string] , identifier[galpyWarning] ) identifier[pot_args] . identifier[extend] ( identifier[list] ( identifier[nu] . identifier[ones] ( identifier[len] ( identifier[p] . identifier[_rgrid] )* identifier[len] ( identifier[p] . identifier[_zgrid] )))) identifier[pot_args] . identifier[extend] ([ identifier[p] . identifier[_amp] , identifier[int] ( identifier[p] . identifier[_logR] )]) keyword[elif] identifier[isinstance] ( identifier[p] , identifier[potential] . identifier[IsochronePotential] ): identifier[pot_type] . identifier[append] ( literal[int] ) identifier[pot_args] . identifier[extend] ([ identifier[p] . identifier[_amp] , identifier[p] . identifier[b] ]) keyword[elif] identifier[isinstance] ( identifier[p] , identifier[potential] . identifier[PowerSphericalPotentialwCutoff] ): identifier[pot_type] . identifier[append] ( literal[int] ) identifier[pot_args] . identifier[extend] ([ identifier[p] . identifier[_amp] , identifier[p] . identifier[alpha] , identifier[p] . identifier[rc] ]) keyword[elif] identifier[isinstance] ( identifier[p] , identifier[potential] . identifier[MN3ExponentialDiskPotential] ): identifier[npot] += literal[int] identifier[pot_type] . identifier[extend] ([ literal[int] , literal[int] , literal[int] ]) identifier[pot_args] . identifier[extend] ([ identifier[p] . identifier[_amp] * identifier[p] . identifier[_mn3] [ literal[int] ]. identifier[_amp] , identifier[p] . identifier[_mn3] [ literal[int] ]. identifier[_a] , identifier[p] . identifier[_mn3] [ literal[int] ]. identifier[_b] , identifier[p] . identifier[_amp] * identifier[p] . identifier[_mn3] [ literal[int] ]. identifier[_amp] , identifier[p] . identifier[_mn3] [ literal[int] ]. identifier[_a] , identifier[p] . identifier[_mn3] [ literal[int] ]. identifier[_b] , identifier[p] . identifier[_amp] * identifier[p] . identifier[_mn3] [ literal[int] ]. identifier[_amp] , identifier[p] . identifier[_mn3] [ literal[int] ]. identifier[_a] , identifier[p] . identifier[_mn3] [ literal[int] ]. identifier[_b] ]) keyword[elif] identifier[isinstance] ( identifier[p] , identifier[potential] . identifier[KuzminKutuzovStaeckelPotential] ): identifier[pot_type] . identifier[append] ( literal[int] ) identifier[pot_args] . identifier[extend] ([ identifier[p] . identifier[_amp] , identifier[p] . identifier[_ac] , identifier[p] . identifier[_Delta] ]) keyword[elif] identifier[isinstance] ( identifier[p] , identifier[potential] . identifier[PlummerPotential] ): identifier[pot_type] . identifier[append] ( literal[int] ) identifier[pot_args] . identifier[extend] ([ identifier[p] . identifier[_amp] , identifier[p] . identifier[_b] ]) keyword[elif] identifier[isinstance] ( identifier[p] , identifier[potential] . identifier[PseudoIsothermalPotential] ): identifier[pot_type] . identifier[append] ( literal[int] ) identifier[pot_args] . identifier[extend] ([ identifier[p] . identifier[_amp] , identifier[p] . identifier[_a] ]) keyword[elif] identifier[isinstance] ( identifier[p] , identifier[potential] . identifier[KuzminDiskPotential] ): identifier[pot_type] . identifier[append] ( literal[int] ) identifier[pot_args] . identifier[extend] ([ identifier[p] . identifier[_amp] , identifier[p] . identifier[_a] ]) keyword[elif] identifier[isinstance] ( identifier[p] , identifier[potential] . identifier[BurkertPotential] ): identifier[pot_type] . identifier[append] ( literal[int] ) identifier[pot_args] . identifier[extend] ([ identifier[p] . identifier[_amp] , identifier[p] . identifier[a] ]) keyword[elif] identifier[isinstance] ( identifier[p] , identifier[potential] . identifier[EllipsoidalPotential] . identifier[EllipsoidalPotential] ): identifier[pot_args] . identifier[append] ( identifier[p] . identifier[_amp] ) identifier[pot_args] . identifier[extend] ([ literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] ]) keyword[if] identifier[isinstance] ( identifier[p] , identifier[potential] . identifier[TriaxialHernquistPotential] ): identifier[pot_type] . identifier[append] ( literal[int] ) identifier[pot_args] . identifier[extend] ([ literal[int] , identifier[p] . identifier[a] , identifier[p] . identifier[a4] ]) keyword[elif] identifier[isinstance] ( identifier[p] , identifier[potential] . identifier[TriaxialNFWPotential] ): identifier[pot_type] . identifier[append] ( literal[int] ) identifier[pot_args] . identifier[extend] ([ literal[int] , identifier[p] . identifier[a] , identifier[p] . identifier[a3] ]) keyword[elif] identifier[isinstance] ( identifier[p] , identifier[potential] . identifier[TriaxialJaffePotential] ): identifier[pot_type] . identifier[append] ( literal[int] ) identifier[pot_args] . identifier[extend] ([ literal[int] , identifier[p] . identifier[a] , identifier[p] . identifier[a2] ]) keyword[elif] identifier[isinstance] ( identifier[p] , identifier[potential] . identifier[PerfectEllipsoidPotential] ): identifier[pot_type] . identifier[append] ( literal[int] ) identifier[pot_args] . identifier[extend] ([ literal[int] , identifier[p] . identifier[a2] ]) identifier[pot_args] . identifier[extend] ([ identifier[p] . identifier[_b2] , identifier[p] . identifier[_c2] , identifier[int] ( identifier[p] . identifier[_aligned] )]) keyword[if] keyword[not] identifier[p] . identifier[_aligned] : identifier[pot_args] . identifier[extend] ( identifier[list] ( identifier[p] . identifier[_rot] . identifier[flatten] ())) keyword[else] : identifier[pot_args] . identifier[extend] ( identifier[list] ( identifier[nu] . identifier[eye] ( literal[int] ). identifier[flatten] ())) identifier[pot_args] . identifier[append] ( identifier[p] . identifier[_glorder] ) identifier[pot_args] . identifier[extend] ([ identifier[p] . identifier[_glx] [ identifier[ii] ] keyword[for] identifier[ii] keyword[in] identifier[range] ( identifier[p] . identifier[_glorder] )]) identifier[pot_args] . identifier[extend] ([- literal[int] * identifier[nu] . identifier[pi] * identifier[p] . identifier[_glw] [ identifier[ii] ]* identifier[p] . identifier[_b] * identifier[p] . identifier[_c] / identifier[nu] . identifier[sqrt] (( literal[int] +( identifier[p] . identifier[_b2] - literal[int] )* identifier[p] . identifier[_glx] [ identifier[ii] ]** literal[int] ) *( literal[int] +( identifier[p] . identifier[_c2] - literal[int] )* identifier[p] . identifier[_glx] [ identifier[ii] ]** literal[int] )) keyword[for] identifier[ii] keyword[in] identifier[range] ( identifier[p] . identifier[_glorder] )]) keyword[elif] identifier[isinstance] ( identifier[p] , identifier[potential] . identifier[SCFPotential] ): identifier[pt] , identifier[pa] = identifier[_parse_scf_pot] ( identifier[p] ) identifier[pot_type] . identifier[append] ( identifier[pt] ) identifier[pot_args] . identifier[extend] ( identifier[pa] ) keyword[elif] identifier[isinstance] ( identifier[p] , identifier[potential] . identifier[SoftenedNeedleBarPotential] ): identifier[pot_type] . identifier[append] ( literal[int] ) identifier[pot_args] . identifier[extend] ([ identifier[p] . identifier[_amp] , identifier[p] . identifier[_a] , identifier[p] . identifier[_b] , identifier[p] . identifier[_c2] , identifier[p] . identifier[_pa] , identifier[p] . identifier[_omegab] ]) identifier[pot_args] . identifier[extend] ([ literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] ]) keyword[elif] identifier[isinstance] ( identifier[p] , identifier[potential] . identifier[DiskSCFPotential] ): identifier[pt] , identifier[pa] = identifier[_parse_scf_pot] ( identifier[p] . identifier[_scf] , identifier[extra_amp] = identifier[p] . identifier[_amp] ) identifier[pot_type] . identifier[append] ( identifier[pt] ) identifier[pot_args] . identifier[extend] ( identifier[pa] ) keyword[for] identifier[Sigma] , identifier[hz] keyword[in] identifier[zip] ( identifier[p] . identifier[_Sigma_dict] , identifier[p] . identifier[_hz_dict] ): identifier[npot] += literal[int] identifier[pot_type] . identifier[append] ( literal[int] ) identifier[stype] = identifier[Sigma] . identifier[get] ( literal[string] , literal[string] ) keyword[if] identifier[stype] == literal[string] keyword[or] ( identifier[stype] == literal[string] keyword[and] literal[string] keyword[in] identifier[Sigma] ): identifier[pot_args] . identifier[extend] ([ literal[int] , literal[int] , literal[int] * identifier[nu] . identifier[pi] * identifier[Sigma] . identifier[get] ( literal[string] , literal[int] )* identifier[p] . identifier[_amp] , identifier[Sigma] . identifier[get] ( literal[string] , literal[int] / literal[int] )]) keyword[elif] identifier[stype] == literal[string] keyword[or] ( identifier[stype] == literal[string] keyword[and] literal[string] keyword[in] identifier[Sigma] ): identifier[pot_args] . identifier[extend] ([ literal[int] , literal[int] , literal[int] * identifier[nu] . identifier[pi] * identifier[Sigma] . identifier[get] ( literal[string] , literal[int] )* identifier[p] . identifier[_amp] , identifier[Sigma] . identifier[get] ( literal[string] , literal[int] / literal[int] ), identifier[Sigma] . identifier[get] ( literal[string] , literal[int] )]) identifier[hztype] = identifier[hz] . identifier[get] ( literal[string] , literal[string] ) keyword[if] identifier[hztype] == literal[string] : identifier[pot_args] . identifier[extend] ([ literal[int] , identifier[hz] . identifier[get] ( literal[string] , literal[int] )]) keyword[elif] identifier[hztype] == literal[string] : identifier[pot_args] . identifier[extend] ([ literal[int] , identifier[hz] . identifier[get] ( literal[string] , literal[int] )]) keyword[elif] identifier[isinstance] ( identifier[p] , identifier[potential] . identifier[SpiralArmsPotential] ): identifier[pot_type] . identifier[append] ( literal[int] ) identifier[pot_args] . identifier[extend] ([ identifier[len] ( identifier[p] . identifier[_Cs] ), identifier[p] . identifier[_amp] , identifier[p] . identifier[_N] , identifier[p] . identifier[_sin_alpha] , identifier[p] . identifier[_tan_alpha] , identifier[p] . identifier[_r_ref] , identifier[p] . identifier[_phi_ref] , identifier[p] . identifier[_Rs] , identifier[p] . identifier[_H] , identifier[p] . identifier[_omega] ]) identifier[pot_args] . identifier[extend] ( identifier[p] . identifier[_Cs] ) keyword[elif] identifier[isinstance] ( identifier[p] , identifier[potential] . identifier[DehnenSmoothWrapperPotential] ): identifier[pot_type] . identifier[append] (- literal[int] ) identifier[wrap_npot] , identifier[wrap_pot_type] , identifier[wrap_pot_args] = identifier[_parse_pot] ( identifier[p] . identifier[_pot] , identifier[potforactions] = identifier[potforactions] , identifier[potfortorus] = identifier[potfortorus] ) identifier[pot_args] . identifier[append] ( identifier[wrap_npot] ) identifier[pot_type] . identifier[extend] ( identifier[wrap_pot_type] ) identifier[pot_args] . identifier[extend] ( identifier[wrap_pot_args] ) identifier[pot_args] . identifier[extend] ([ identifier[p] . identifier[_amp] , identifier[p] . identifier[_tform] , identifier[p] . identifier[_tsteady] , identifier[int] ( identifier[p] . identifier[_grow] )]) keyword[elif] identifier[isinstance] ( identifier[p] , identifier[potential] . identifier[SolidBodyRotationWrapperPotential] ): identifier[pot_type] . identifier[append] (- literal[int] ) identifier[wrap_npot] , identifier[wrap_pot_type] , identifier[wrap_pot_args] = identifier[_parse_pot] ( identifier[p] . identifier[_pot] , identifier[potforactions] = identifier[potforactions] , identifier[potfortorus] = identifier[potfortorus] ) identifier[pot_args] . identifier[append] ( identifier[wrap_npot] ) identifier[pot_type] . identifier[extend] ( identifier[wrap_pot_type] ) identifier[pot_args] . identifier[extend] ( identifier[wrap_pot_args] ) identifier[pot_args] . identifier[extend] ([ identifier[p] . identifier[_amp] , identifier[p] . identifier[_omega] , identifier[p] . identifier[_pa] ]) keyword[elif] identifier[isinstance] ( identifier[p] , identifier[potential] . identifier[CorotatingRotationWrapperPotential] ): identifier[pot_type] . identifier[append] (- literal[int] ) identifier[wrap_npot] , identifier[wrap_pot_type] , identifier[wrap_pot_args] = identifier[_parse_pot] ( identifier[p] . identifier[_pot] , identifier[potforactions] = identifier[potforactions] , identifier[potfortorus] = identifier[potfortorus] ) identifier[pot_args] . identifier[append] ( identifier[wrap_npot] ) identifier[pot_type] . identifier[extend] ( identifier[wrap_pot_type] ) identifier[pot_args] . identifier[extend] ( identifier[wrap_pot_args] ) identifier[pot_args] . identifier[extend] ([ identifier[p] . identifier[_amp] , identifier[p] . identifier[_vpo] , identifier[p] . identifier[_beta] , identifier[p] . identifier[_pa] , identifier[p] . identifier[_to] ]) keyword[elif] identifier[isinstance] ( identifier[p] , identifier[potential] . identifier[GaussianAmplitudeWrapperPotential] ): identifier[pot_type] . identifier[append] (- literal[int] ) identifier[wrap_npot] , identifier[wrap_pot_type] , identifier[wrap_pot_args] = identifier[_parse_pot] ( identifier[p] . identifier[_pot] , identifier[potforactions] = identifier[potforactions] , identifier[potfortorus] = identifier[potfortorus] ) identifier[pot_args] . identifier[append] ( identifier[wrap_npot] ) identifier[pot_type] . identifier[extend] ( identifier[wrap_pot_type] ) identifier[pot_args] . identifier[extend] ( identifier[wrap_pot_args] ) identifier[pot_args] . identifier[extend] ([ identifier[p] . identifier[_amp] , identifier[p] . identifier[_to] , identifier[p] . identifier[_sigma2] ]) identifier[pot_type] = identifier[nu] . identifier[array] ( identifier[pot_type] , identifier[dtype] = identifier[nu] . identifier[int32] , identifier[order] = literal[string] ) identifier[pot_args] = identifier[nu] . identifier[array] ( identifier[pot_args] , identifier[dtype] = identifier[nu] . identifier[float64] , identifier[order] = literal[string] ) keyword[return] ( identifier[npot] , identifier[pot_type] , identifier[pot_args] )
def _parse_pot(pot, potforactions=False, potfortorus=False): """Parse the potential so it can be fed to C""" #Figure out what's in pot if not isinstance(pot, list): pot = [pot] # depends on [control=['if'], data=[]] #Initialize everything pot_type = [] pot_args = [] npot = len(pot) for p in pot: if isinstance(p, potential.LogarithmicHaloPotential): pot_type.append(0) if p.isNonAxi: pot_args.extend([p._amp, p._q, p._core2, p._1m1overb2]) # depends on [control=['if'], data=[]] else: pot_args.extend([p._amp, p._q, p._core2, 2.0]) # 1m1overb2 > 1: axi # depends on [control=['if'], data=[]] elif isinstance(p, potential.DehnenBarPotential): pot_type.append(1) pot_args.extend([p._amp * p._af, p._tform, p._tsteady, p._rb, p._omegab, p._barphi]) # depends on [control=['if'], data=[]] elif isinstance(p, potential.MiyamotoNagaiPotential): pot_type.append(5) pot_args.extend([p._amp, p._a, p._b]) # depends on [control=['if'], data=[]] elif isinstance(p, potential.PowerSphericalPotential): pot_type.append(7) pot_args.extend([p._amp, p.alpha]) # depends on [control=['if'], data=[]] elif isinstance(p, potential.HernquistPotential): pot_type.append(8) pot_args.extend([p._amp, p.a]) # depends on [control=['if'], data=[]] elif isinstance(p, potential.NFWPotential): pot_type.append(9) pot_args.extend([p._amp, p.a]) # depends on [control=['if'], data=[]] elif isinstance(p, potential.JaffePotential): pot_type.append(10) pot_args.extend([p._amp, p.a]) # depends on [control=['if'], data=[]] elif isinstance(p, potential.DoubleExponentialDiskPotential): pot_type.append(11) pot_args.extend([p._amp, p._alpha, p._beta, p._kmaxFac, p._nzeros, p._glorder]) pot_args.extend([p._glx[ii] for ii in range(p._glorder)]) pot_args.extend([p._glw[ii] for ii in range(p._glorder)]) pot_args.extend([p._j0zeros[ii] for ii in range(p._nzeros + 1)]) pot_args.extend([p._dj0zeros[ii] for ii in range(p._nzeros + 1)]) pot_args.extend([p._j1zeros[ii] for ii in range(p._nzeros + 1)]) pot_args.extend([p._dj1zeros[ii] for ii in range(p._nzeros + 1)]) pot_args.extend([p._kp._amp, p._kp.alpha]) # depends on [control=['if'], data=[]] elif isinstance(p, potential.FlattenedPowerPotential): pot_type.append(12) pot_args.extend([p._amp, p.alpha, p.q2, p.core2]) # depends on [control=['if'], data=[]] elif isinstance(p, potential.interpRZPotential): pot_type.append(13) pot_args.extend([len(p._rgrid), len(p._zgrid)]) if p._logR: pot_args.extend([p._logrgrid[ii] for ii in range(len(p._rgrid))]) # depends on [control=['if'], data=[]] else: pot_args.extend([p._rgrid[ii] for ii in range(len(p._rgrid))]) pot_args.extend([p._zgrid[ii] for ii in range(len(p._zgrid))]) if hasattr(p, '_potGrid_splinecoeffs'): pot_args.extend([x for x in p._potGrid_splinecoeffs.flatten(order='C')]) # depends on [control=['if'], data=[]] else: # pragma: no cover warnings.warn('You are attempting to use the C implementation of interpRZPotential, but have not interpolated the potential itself; if you think this is needed for what you want to do, initialize the interpRZPotential instance with interpPot=True', galpyWarning) pot_args.extend(list(nu.ones(len(p._rgrid) * len(p._zgrid)))) if hasattr(p, '_rforceGrid_splinecoeffs'): pot_args.extend([x for x in p._rforceGrid_splinecoeffs.flatten(order='C')]) # depends on [control=['if'], data=[]] else: # pragma: no cover warnings.warn('You are attempting to use the C implementation of interpRZPotential, but have not interpolated the Rforce; if you think this is needed for what you want to do, initialize the interpRZPotential instance with interpRforce=True', galpyWarning) pot_args.extend(list(nu.ones(len(p._rgrid) * len(p._zgrid)))) if hasattr(p, '_zforceGrid_splinecoeffs'): pot_args.extend([x for x in p._zforceGrid_splinecoeffs.flatten(order='C')]) # depends on [control=['if'], data=[]] else: # pragma: no cover warnings.warn('You are attempting to use the C implementation of interpRZPotential, but have not interpolated the zforce; if you think this is needed for what you want to do, initialize the interpRZPotential instance with interpzforce=True', galpyWarning) pot_args.extend(list(nu.ones(len(p._rgrid) * len(p._zgrid)))) pot_args.extend([p._amp, int(p._logR)]) # depends on [control=['if'], data=[]] elif isinstance(p, potential.IsochronePotential): pot_type.append(14) pot_args.extend([p._amp, p.b]) # depends on [control=['if'], data=[]] elif isinstance(p, potential.PowerSphericalPotentialwCutoff): pot_type.append(15) pot_args.extend([p._amp, p.alpha, p.rc]) # depends on [control=['if'], data=[]] elif isinstance(p, potential.MN3ExponentialDiskPotential): # Three Miyamoto-Nagai disks npot += 2 pot_type.extend([5, 5, 5]) pot_args.extend([p._amp * p._mn3[0]._amp, p._mn3[0]._a, p._mn3[0]._b, p._amp * p._mn3[1]._amp, p._mn3[1]._a, p._mn3[1]._b, p._amp * p._mn3[2]._amp, p._mn3[2]._a, p._mn3[2]._b]) # depends on [control=['if'], data=[]] elif isinstance(p, potential.KuzminKutuzovStaeckelPotential): pot_type.append(16) pot_args.extend([p._amp, p._ac, p._Delta]) # depends on [control=['if'], data=[]] elif isinstance(p, potential.PlummerPotential): pot_type.append(17) pot_args.extend([p._amp, p._b]) # depends on [control=['if'], data=[]] elif isinstance(p, potential.PseudoIsothermalPotential): pot_type.append(18) pot_args.extend([p._amp, p._a]) # depends on [control=['if'], data=[]] elif isinstance(p, potential.KuzminDiskPotential): pot_type.append(19) pot_args.extend([p._amp, p._a]) # depends on [control=['if'], data=[]] elif isinstance(p, potential.BurkertPotential): pot_type.append(20) pot_args.extend([p._amp, p.a]) # depends on [control=['if'], data=[]] elif isinstance(p, potential.EllipsoidalPotential.EllipsoidalPotential): pot_args.append(p._amp) pot_args.extend([0.0, 0.0, 0.0, 0.0, 0.0, 0.0]) # for caching # Potential specific parameters if isinstance(p, potential.TriaxialHernquistPotential): pot_type.append(21) pot_args.extend([2, p.a, p.a4]) # for psi, mdens, mdens_deriv # depends on [control=['if'], data=[]] elif isinstance(p, potential.TriaxialNFWPotential): pot_type.append(22) pot_args.extend([2, p.a, p.a3]) # for psi, mdens, mdens_deriv # depends on [control=['if'], data=[]] elif isinstance(p, potential.TriaxialJaffePotential): pot_type.append(23) pot_args.extend([2, p.a, p.a2]) # for psi, mdens, mdens_deriv # depends on [control=['if'], data=[]] elif isinstance(p, potential.PerfectEllipsoidPotential): pot_type.append(30) pot_args.extend([1, p.a2]) # for psi, mdens, mdens_deriv # depends on [control=['if'], data=[]] pot_args.extend([p._b2, p._c2, int(p._aligned)]) # Reg. Ellipsoidal if not p._aligned: pot_args.extend(list(p._rot.flatten())) # depends on [control=['if'], data=[]] else: pot_args.extend(list(nu.eye(3).flatten())) # not actually used pot_args.append(p._glorder) pot_args.extend([p._glx[ii] for ii in range(p._glorder)]) # this adds some common factors to the integration weights pot_args.extend([-4.0 * nu.pi * p._glw[ii] * p._b * p._c / nu.sqrt((1.0 + (p._b2 - 1.0) * p._glx[ii] ** 2.0) * (1.0 + (p._c2 - 1.0) * p._glx[ii] ** 2.0)) for ii in range(p._glorder)]) # depends on [control=['if'], data=[]] elif isinstance(p, potential.SCFPotential): # Type 24, see stand-alone parser below (pt, pa) = _parse_scf_pot(p) pot_type.append(pt) pot_args.extend(pa) # depends on [control=['if'], data=[]] elif isinstance(p, potential.SoftenedNeedleBarPotential): pot_type.append(25) pot_args.extend([p._amp, p._a, p._b, p._c2, p._pa, p._omegab]) pot_args.extend([0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]) # for caching # depends on [control=['if'], data=[]] elif isinstance(p, potential.DiskSCFPotential): # Need to pull this apart into: (a) SCF part, (b) constituent # [Sigma_i,h_i] parts # (a) SCF, multiply in any add'l amp (pt, pa) = _parse_scf_pot(p._scf, extra_amp=p._amp) pot_type.append(pt) pot_args.extend(pa) # (b) constituent [Sigma_i,h_i] parts for (Sigma, hz) in zip(p._Sigma_dict, p._hz_dict): npot += 1 pot_type.append(26) stype = Sigma.get('type', 'exp') if stype == 'exp' or (stype == 'exp' and 'Rhole' in Sigma): pot_args.extend([3, 0, 4.0 * nu.pi * Sigma.get('amp', 1.0) * p._amp, Sigma.get('h', 1.0 / 3.0)]) # depends on [control=['if'], data=[]] elif stype == 'expwhole' or (stype == 'exp' and 'Rhole' in Sigma): pot_args.extend([4, 1, 4.0 * nu.pi * Sigma.get('amp', 1.0) * p._amp, Sigma.get('h', 1.0 / 3.0), Sigma.get('Rhole', 0.5)]) # depends on [control=['if'], data=[]] hztype = hz.get('type', 'exp') if hztype == 'exp': pot_args.extend([0, hz.get('h', 0.0375)]) # depends on [control=['if'], data=[]] elif hztype == 'sech2': pot_args.extend([1, hz.get('h', 0.0375)]) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] elif isinstance(p, potential.SpiralArmsPotential): pot_type.append(27) pot_args.extend([len(p._Cs), p._amp, p._N, p._sin_alpha, p._tan_alpha, p._r_ref, p._phi_ref, p._Rs, p._H, p._omega]) pot_args.extend(p._Cs) # depends on [control=['if'], data=[]] # 30: PerfectEllipsoidPotential, done with others above ############################## WRAPPERS ############################### elif isinstance(p, potential.DehnenSmoothWrapperPotential): pot_type.append(-1) (wrap_npot, wrap_pot_type, wrap_pot_args) = _parse_pot(p._pot, potforactions=potforactions, potfortorus=potfortorus) pot_args.append(wrap_npot) pot_type.extend(wrap_pot_type) pot_args.extend(wrap_pot_args) pot_args.extend([p._amp, p._tform, p._tsteady, int(p._grow)]) # depends on [control=['if'], data=[]] elif isinstance(p, potential.SolidBodyRotationWrapperPotential): pot_type.append(-2) # Not sure how to easily avoid this duplication (wrap_npot, wrap_pot_type, wrap_pot_args) = _parse_pot(p._pot, potforactions=potforactions, potfortorus=potfortorus) pot_args.append(wrap_npot) pot_type.extend(wrap_pot_type) pot_args.extend(wrap_pot_args) pot_args.extend([p._amp, p._omega, p._pa]) # depends on [control=['if'], data=[]] elif isinstance(p, potential.CorotatingRotationWrapperPotential): pot_type.append(-4) # Not sure how to easily avoid this duplication (wrap_npot, wrap_pot_type, wrap_pot_args) = _parse_pot(p._pot, potforactions=potforactions, potfortorus=potfortorus) pot_args.append(wrap_npot) pot_type.extend(wrap_pot_type) pot_args.extend(wrap_pot_args) pot_args.extend([p._amp, p._vpo, p._beta, p._pa, p._to]) # depends on [control=['if'], data=[]] elif isinstance(p, potential.GaussianAmplitudeWrapperPotential): pot_type.append(-5) (wrap_npot, wrap_pot_type, wrap_pot_args) = _parse_pot(p._pot, potforactions=potforactions, potfortorus=potfortorus) pot_args.append(wrap_npot) pot_type.extend(wrap_pot_type) pot_args.extend(wrap_pot_args) pot_args.extend([p._amp, p._to, p._sigma2]) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['p']] pot_type = nu.array(pot_type, dtype=nu.int32, order='C') pot_args = nu.array(pot_args, dtype=nu.float64, order='C') return (npot, pot_type, pot_args)
def get_jids(): ''' List all the jobs that we have.. ''' options = _get_options(ret=None) _response = _request("GET", options['url'] + options['db'] + "/_all_docs?include_docs=true") # Make sure the 'total_rows' is returned.. if not error out. if 'total_rows' not in _response: log.error('Didn\'t get valid response from requesting all docs: %s', _response) return {} # Return the rows. ret = {} for row in _response['rows']: # Because this shows all the documents in the database, including the # design documents, verify the id is salt jid jid = row['id'] if not salt.utils.jid.is_jid(jid): continue ret[jid] = salt.utils.jid.format_jid_instance(jid, row['doc']) return ret
def function[get_jids, parameter[]]: constant[ List all the jobs that we have.. ] variable[options] assign[=] call[name[_get_options], parameter[]] variable[_response] assign[=] call[name[_request], parameter[constant[GET], binary_operation[binary_operation[call[name[options]][constant[url]] + call[name[options]][constant[db]]] + constant[/_all_docs?include_docs=true]]]] if compare[constant[total_rows] <ast.NotIn object at 0x7da2590d7190> name[_response]] begin[:] call[name[log].error, parameter[constant[Didn't get valid response from requesting all docs: %s], name[_response]]] return[dictionary[[], []]] variable[ret] assign[=] dictionary[[], []] for taget[name[row]] in starred[call[name[_response]][constant[rows]]] begin[:] variable[jid] assign[=] call[name[row]][constant[id]] if <ast.UnaryOp object at 0x7da1b23441f0> begin[:] continue call[name[ret]][name[jid]] assign[=] call[name[salt].utils.jid.format_jid_instance, parameter[name[jid], call[name[row]][constant[doc]]]] return[name[ret]]
keyword[def] identifier[get_jids] (): literal[string] identifier[options] = identifier[_get_options] ( identifier[ret] = keyword[None] ) identifier[_response] = identifier[_request] ( literal[string] , identifier[options] [ literal[string] ]+ identifier[options] [ literal[string] ]+ literal[string] ) keyword[if] literal[string] keyword[not] keyword[in] identifier[_response] : identifier[log] . identifier[error] ( literal[string] , identifier[_response] ) keyword[return] {} identifier[ret] ={} keyword[for] identifier[row] keyword[in] identifier[_response] [ literal[string] ]: identifier[jid] = identifier[row] [ literal[string] ] keyword[if] keyword[not] identifier[salt] . identifier[utils] . identifier[jid] . identifier[is_jid] ( identifier[jid] ): keyword[continue] identifier[ret] [ identifier[jid] ]= identifier[salt] . identifier[utils] . identifier[jid] . identifier[format_jid_instance] ( identifier[jid] , identifier[row] [ literal[string] ]) keyword[return] identifier[ret]
def get_jids(): """ List all the jobs that we have.. """ options = _get_options(ret=None) _response = _request('GET', options['url'] + options['db'] + '/_all_docs?include_docs=true') # Make sure the 'total_rows' is returned.. if not error out. if 'total_rows' not in _response: log.error("Didn't get valid response from requesting all docs: %s", _response) return {} # depends on [control=['if'], data=['_response']] # Return the rows. ret = {} for row in _response['rows']: # Because this shows all the documents in the database, including the # design documents, verify the id is salt jid jid = row['id'] if not salt.utils.jid.is_jid(jid): continue # depends on [control=['if'], data=[]] ret[jid] = salt.utils.jid.format_jid_instance(jid, row['doc']) # depends on [control=['for'], data=['row']] return ret
def p_variable_declaration_list_noin(self, p): """ variable_declaration_list_noin \ : variable_declaration_noin | variable_declaration_list_noin COMMA variable_declaration_noin """ if len(p) == 2: p[0] = [p[1]] else: p[1].append(p[3]) p[0] = p[1]
def function[p_variable_declaration_list_noin, parameter[self, p]]: constant[ variable_declaration_list_noin : variable_declaration_noin | variable_declaration_list_noin COMMA variable_declaration_noin ] if compare[call[name[len], parameter[name[p]]] equal[==] constant[2]] begin[:] call[name[p]][constant[0]] assign[=] list[[<ast.Subscript object at 0x7da20e9b3d60>]]
keyword[def] identifier[p_variable_declaration_list_noin] ( identifier[self] , identifier[p] ): literal[string] keyword[if] identifier[len] ( identifier[p] )== literal[int] : identifier[p] [ literal[int] ]=[ identifier[p] [ literal[int] ]] keyword[else] : identifier[p] [ literal[int] ]. identifier[append] ( identifier[p] [ literal[int] ]) identifier[p] [ literal[int] ]= identifier[p] [ literal[int] ]
def p_variable_declaration_list_noin(self, p): """ variable_declaration_list_noin : variable_declaration_noin | variable_declaration_list_noin COMMA variable_declaration_noin """ if len(p) == 2: p[0] = [p[1]] # depends on [control=['if'], data=[]] else: p[1].append(p[3]) p[0] = p[1]
def stop_cluster(self, profile): """Stop a cluster for a given profile.""" self.check_profile(profile) data = self.profiles[profile] if data['status'] == 'stopped': raise web.HTTPError(409, u'cluster not running') data = self.profiles[profile] cl = data['controller_launcher'] esl = data['engine_set_launcher'] if cl.running: cl.stop() if esl.running: esl.stop() # Return a temp info dict, the real one is updated in the on_stop # logic above. result = { 'profile': data['profile'], 'profile_dir': data['profile_dir'], 'status': 'stopped' } return result
def function[stop_cluster, parameter[self, profile]]: constant[Stop a cluster for a given profile.] call[name[self].check_profile, parameter[name[profile]]] variable[data] assign[=] call[name[self].profiles][name[profile]] if compare[call[name[data]][constant[status]] equal[==] constant[stopped]] begin[:] <ast.Raise object at 0x7da20c795bd0> variable[data] assign[=] call[name[self].profiles][name[profile]] variable[cl] assign[=] call[name[data]][constant[controller_launcher]] variable[esl] assign[=] call[name[data]][constant[engine_set_launcher]] if name[cl].running begin[:] call[name[cl].stop, parameter[]] if name[esl].running begin[:] call[name[esl].stop, parameter[]] variable[result] assign[=] dictionary[[<ast.Constant object at 0x7da20c7962c0>, <ast.Constant object at 0x7da20c7957b0>, <ast.Constant object at 0x7da20c794610>], [<ast.Subscript object at 0x7da20c795d80>, <ast.Subscript object at 0x7da20c794d30>, <ast.Constant object at 0x7da20c795b10>]] return[name[result]]
keyword[def] identifier[stop_cluster] ( identifier[self] , identifier[profile] ): literal[string] identifier[self] . identifier[check_profile] ( identifier[profile] ) identifier[data] = identifier[self] . identifier[profiles] [ identifier[profile] ] keyword[if] identifier[data] [ literal[string] ]== literal[string] : keyword[raise] identifier[web] . identifier[HTTPError] ( literal[int] , literal[string] ) identifier[data] = identifier[self] . identifier[profiles] [ identifier[profile] ] identifier[cl] = identifier[data] [ literal[string] ] identifier[esl] = identifier[data] [ literal[string] ] keyword[if] identifier[cl] . identifier[running] : identifier[cl] . identifier[stop] () keyword[if] identifier[esl] . identifier[running] : identifier[esl] . identifier[stop] () identifier[result] ={ literal[string] : identifier[data] [ literal[string] ], literal[string] : identifier[data] [ literal[string] ], literal[string] : literal[string] } keyword[return] identifier[result]
def stop_cluster(self, profile): """Stop a cluster for a given profile.""" self.check_profile(profile) data = self.profiles[profile] if data['status'] == 'stopped': raise web.HTTPError(409, u'cluster not running') # depends on [control=['if'], data=[]] data = self.profiles[profile] cl = data['controller_launcher'] esl = data['engine_set_launcher'] if cl.running: cl.stop() # depends on [control=['if'], data=[]] if esl.running: esl.stop() # depends on [control=['if'], data=[]] # Return a temp info dict, the real one is updated in the on_stop # logic above. result = {'profile': data['profile'], 'profile_dir': data['profile_dir'], 'status': 'stopped'} return result
def _string_from_cmd_list(cmd_list): """Takes a list of command line arguments and returns a pretty representation for printing.""" cl = [] for arg in map(str, cmd_list): if ' ' in arg or '\t' in arg: arg = '"' + arg + '"' cl.append(arg) return ' '.join(cl)
def function[_string_from_cmd_list, parameter[cmd_list]]: constant[Takes a list of command line arguments and returns a pretty representation for printing.] variable[cl] assign[=] list[[]] for taget[name[arg]] in starred[call[name[map], parameter[name[str], name[cmd_list]]]] begin[:] if <ast.BoolOp object at 0x7da20c6c70d0> begin[:] variable[arg] assign[=] binary_operation[binary_operation[constant["] + name[arg]] + constant["]] call[name[cl].append, parameter[name[arg]]] return[call[constant[ ].join, parameter[name[cl]]]]
keyword[def] identifier[_string_from_cmd_list] ( identifier[cmd_list] ): literal[string] identifier[cl] =[] keyword[for] identifier[arg] keyword[in] identifier[map] ( identifier[str] , identifier[cmd_list] ): keyword[if] literal[string] keyword[in] identifier[arg] keyword[or] literal[string] keyword[in] identifier[arg] : identifier[arg] = literal[string] + identifier[arg] + literal[string] identifier[cl] . identifier[append] ( identifier[arg] ) keyword[return] literal[string] . identifier[join] ( identifier[cl] )
def _string_from_cmd_list(cmd_list): """Takes a list of command line arguments and returns a pretty representation for printing.""" cl = [] for arg in map(str, cmd_list): if ' ' in arg or '\t' in arg: arg = '"' + arg + '"' # depends on [control=['if'], data=[]] cl.append(arg) # depends on [control=['for'], data=['arg']] return ' '.join(cl)
def edge_lengths(self): """ Compute the edge-lengths of each triangle in the triangulation. """ simplex = self.simplices.T # simplex is vectors a, b, c defining the corners a = self.points[simplex[0]] b = self.points[simplex[1]] c = self.points[simplex[2]] ## dot products to obtain angles ab = np.arccos((a * b).sum(axis=1)) bc = np.arccos((b * c).sum(axis=1)) ac = np.arccos((a * c).sum(axis=1)) ## As this is a unit sphere, angle = length so ... return ab, bc, ac
def function[edge_lengths, parameter[self]]: constant[ Compute the edge-lengths of each triangle in the triangulation. ] variable[simplex] assign[=] name[self].simplices.T variable[a] assign[=] call[name[self].points][call[name[simplex]][constant[0]]] variable[b] assign[=] call[name[self].points][call[name[simplex]][constant[1]]] variable[c] assign[=] call[name[self].points][call[name[simplex]][constant[2]]] variable[ab] assign[=] call[name[np].arccos, parameter[call[binary_operation[name[a] * name[b]].sum, parameter[]]]] variable[bc] assign[=] call[name[np].arccos, parameter[call[binary_operation[name[b] * name[c]].sum, parameter[]]]] variable[ac] assign[=] call[name[np].arccos, parameter[call[binary_operation[name[a] * name[c]].sum, parameter[]]]] return[tuple[[<ast.Name object at 0x7da1b24b3550>, <ast.Name object at 0x7da1b24b24a0>, <ast.Name object at 0x7da1b24b2cb0>]]]
keyword[def] identifier[edge_lengths] ( identifier[self] ): literal[string] identifier[simplex] = identifier[self] . identifier[simplices] . identifier[T] identifier[a] = identifier[self] . identifier[points] [ identifier[simplex] [ literal[int] ]] identifier[b] = identifier[self] . identifier[points] [ identifier[simplex] [ literal[int] ]] identifier[c] = identifier[self] . identifier[points] [ identifier[simplex] [ literal[int] ]] identifier[ab] = identifier[np] . identifier[arccos] (( identifier[a] * identifier[b] ). identifier[sum] ( identifier[axis] = literal[int] )) identifier[bc] = identifier[np] . identifier[arccos] (( identifier[b] * identifier[c] ). identifier[sum] ( identifier[axis] = literal[int] )) identifier[ac] = identifier[np] . identifier[arccos] (( identifier[a] * identifier[c] ). identifier[sum] ( identifier[axis] = literal[int] )) keyword[return] identifier[ab] , identifier[bc] , identifier[ac]
def edge_lengths(self): """ Compute the edge-lengths of each triangle in the triangulation. """ simplex = self.simplices.T # simplex is vectors a, b, c defining the corners a = self.points[simplex[0]] b = self.points[simplex[1]] c = self.points[simplex[2]] ## dot products to obtain angles ab = np.arccos((a * b).sum(axis=1)) bc = np.arccos((b * c).sum(axis=1)) ac = np.arccos((a * c).sum(axis=1)) ## As this is a unit sphere, angle = length so ... return (ab, bc, ac)
def extract_indexes(coords): """Yields the name & index of valid indexes from a mapping of coords""" for name, variable in coords.items(): variable = as_variable(variable, name=name) if variable.dims == (name,): yield name, variable.to_index()
def function[extract_indexes, parameter[coords]]: constant[Yields the name & index of valid indexes from a mapping of coords] for taget[tuple[[<ast.Name object at 0x7da1b26ada80>, <ast.Name object at 0x7da1b26af670>]]] in starred[call[name[coords].items, parameter[]]] begin[:] variable[variable] assign[=] call[name[as_variable], parameter[name[variable]]] if compare[name[variable].dims equal[==] tuple[[<ast.Name object at 0x7da1b1f95c30>]]] begin[:] <ast.Yield object at 0x7da1b1f955d0>
keyword[def] identifier[extract_indexes] ( identifier[coords] ): literal[string] keyword[for] identifier[name] , identifier[variable] keyword[in] identifier[coords] . identifier[items] (): identifier[variable] = identifier[as_variable] ( identifier[variable] , identifier[name] = identifier[name] ) keyword[if] identifier[variable] . identifier[dims] ==( identifier[name] ,): keyword[yield] identifier[name] , identifier[variable] . identifier[to_index] ()
def extract_indexes(coords): """Yields the name & index of valid indexes from a mapping of coords""" for (name, variable) in coords.items(): variable = as_variable(variable, name=name) if variable.dims == (name,): yield (name, variable.to_index()) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
def get_msg(self): """This method is used to prepare the preamble text to display to the user in non-batch mode. If your policy sets self.distro that text will be substituted accordingly. You can also override this method to do something more complicated.""" width = 72 _msg = self.msg % {'distro': self.distro, 'vendor': self.vendor, 'vendor_url': self.vendor_url, 'vendor_text': self.vendor_text, 'tmpdir': self.commons['tmpdir']} _fmt = "" for line in _msg.splitlines(): _fmt = _fmt + fill(line, width, replace_whitespace=False) + '\n' return _fmt
def function[get_msg, parameter[self]]: constant[This method is used to prepare the preamble text to display to the user in non-batch mode. If your policy sets self.distro that text will be substituted accordingly. You can also override this method to do something more complicated.] variable[width] assign[=] constant[72] variable[_msg] assign[=] binary_operation[name[self].msg <ast.Mod object at 0x7da2590d6920> dictionary[[<ast.Constant object at 0x7da18c4cd810>, <ast.Constant object at 0x7da18c4cf340>, <ast.Constant object at 0x7da18c4cc340>, <ast.Constant object at 0x7da18c4cc130>, <ast.Constant object at 0x7da18c4cc370>], [<ast.Attribute object at 0x7da18c4cd300>, <ast.Attribute object at 0x7da18c4ccbb0>, <ast.Attribute object at 0x7da18c4cdd50>, <ast.Attribute object at 0x7da18c4cc5b0>, <ast.Subscript object at 0x7da18c4cffa0>]]] variable[_fmt] assign[=] constant[] for taget[name[line]] in starred[call[name[_msg].splitlines, parameter[]]] begin[:] variable[_fmt] assign[=] binary_operation[binary_operation[name[_fmt] + call[name[fill], parameter[name[line], name[width]]]] + constant[ ]] return[name[_fmt]]
keyword[def] identifier[get_msg] ( identifier[self] ): literal[string] identifier[width] = literal[int] identifier[_msg] = identifier[self] . identifier[msg] %{ literal[string] : identifier[self] . identifier[distro] , literal[string] : identifier[self] . identifier[vendor] , literal[string] : identifier[self] . identifier[vendor_url] , literal[string] : identifier[self] . identifier[vendor_text] , literal[string] : identifier[self] . identifier[commons] [ literal[string] ]} identifier[_fmt] = literal[string] keyword[for] identifier[line] keyword[in] identifier[_msg] . identifier[splitlines] (): identifier[_fmt] = identifier[_fmt] + identifier[fill] ( identifier[line] , identifier[width] , identifier[replace_whitespace] = keyword[False] )+ literal[string] keyword[return] identifier[_fmt]
def get_msg(self): """This method is used to prepare the preamble text to display to the user in non-batch mode. If your policy sets self.distro that text will be substituted accordingly. You can also override this method to do something more complicated.""" width = 72 _msg = self.msg % {'distro': self.distro, 'vendor': self.vendor, 'vendor_url': self.vendor_url, 'vendor_text': self.vendor_text, 'tmpdir': self.commons['tmpdir']} _fmt = '' for line in _msg.splitlines(): _fmt = _fmt + fill(line, width, replace_whitespace=False) + '\n' # depends on [control=['for'], data=['line']] return _fmt
def mad(y_true, y_pred): """Median absolute deviation """ y_true, y_pred = _mask_nan(y_true, y_pred) return np.mean(np.abs(y_true - y_pred))
def function[mad, parameter[y_true, y_pred]]: constant[Median absolute deviation ] <ast.Tuple object at 0x7da1b0415990> assign[=] call[name[_mask_nan], parameter[name[y_true], name[y_pred]]] return[call[name[np].mean, parameter[call[name[np].abs, parameter[binary_operation[name[y_true] - name[y_pred]]]]]]]
keyword[def] identifier[mad] ( identifier[y_true] , identifier[y_pred] ): literal[string] identifier[y_true] , identifier[y_pred] = identifier[_mask_nan] ( identifier[y_true] , identifier[y_pred] ) keyword[return] identifier[np] . identifier[mean] ( identifier[np] . identifier[abs] ( identifier[y_true] - identifier[y_pred] ))
def mad(y_true, y_pred): """Median absolute deviation """ (y_true, y_pred) = _mask_nan(y_true, y_pred) return np.mean(np.abs(y_true - y_pred))
def get_dependencies(variable, context, only_unprotected=False): ''' Args: variable context (Contract|Function) only_unprotected (bool): True only unprotected function are considered Returns: list(Variable) ''' assert isinstance(context, (Contract, Function)) assert isinstance(only_unprotected, bool) if only_unprotected: return context.context[KEY_NON_SSA].get(variable, []) return context.context[KEY_NON_SSA_UNPROTECTED].get(variable, [])
def function[get_dependencies, parameter[variable, context, only_unprotected]]: constant[ Args: variable context (Contract|Function) only_unprotected (bool): True only unprotected function are considered Returns: list(Variable) ] assert[call[name[isinstance], parameter[name[context], tuple[[<ast.Name object at 0x7da204345180>, <ast.Name object at 0x7da204347280>]]]]] assert[call[name[isinstance], parameter[name[only_unprotected], name[bool]]]] if name[only_unprotected] begin[:] return[call[call[name[context].context][name[KEY_NON_SSA]].get, parameter[name[variable], list[[]]]]] return[call[call[name[context].context][name[KEY_NON_SSA_UNPROTECTED]].get, parameter[name[variable], list[[]]]]]
keyword[def] identifier[get_dependencies] ( identifier[variable] , identifier[context] , identifier[only_unprotected] = keyword[False] ): literal[string] keyword[assert] identifier[isinstance] ( identifier[context] ,( identifier[Contract] , identifier[Function] )) keyword[assert] identifier[isinstance] ( identifier[only_unprotected] , identifier[bool] ) keyword[if] identifier[only_unprotected] : keyword[return] identifier[context] . identifier[context] [ identifier[KEY_NON_SSA] ]. identifier[get] ( identifier[variable] ,[]) keyword[return] identifier[context] . identifier[context] [ identifier[KEY_NON_SSA_UNPROTECTED] ]. identifier[get] ( identifier[variable] ,[])
def get_dependencies(variable, context, only_unprotected=False): """ Args: variable context (Contract|Function) only_unprotected (bool): True only unprotected function are considered Returns: list(Variable) """ assert isinstance(context, (Contract, Function)) assert isinstance(only_unprotected, bool) if only_unprotected: return context.context[KEY_NON_SSA].get(variable, []) # depends on [control=['if'], data=[]] return context.context[KEY_NON_SSA_UNPROTECTED].get(variable, [])
def yield_param_completion(self, param, last_word): """ yields a parameter """ return Completion(param, -len(last_word), display_meta=self.param_description.get( self.current_command + " " + str(param), '').replace(os.linesep, ''))
def function[yield_param_completion, parameter[self, param, last_word]]: constant[ yields a parameter ] return[call[name[Completion], parameter[name[param], <ast.UnaryOp object at 0x7da20c6c78e0>]]]
keyword[def] identifier[yield_param_completion] ( identifier[self] , identifier[param] , identifier[last_word] ): literal[string] keyword[return] identifier[Completion] ( identifier[param] ,- identifier[len] ( identifier[last_word] ), identifier[display_meta] = identifier[self] . identifier[param_description] . identifier[get] ( identifier[self] . identifier[current_command] + literal[string] + identifier[str] ( identifier[param] ), literal[string] ). identifier[replace] ( identifier[os] . identifier[linesep] , literal[string] ))
def yield_param_completion(self, param, last_word): """ yields a parameter """ return Completion(param, -len(last_word), display_meta=self.param_description.get(self.current_command + ' ' + str(param), '').replace(os.linesep, ''))
def bumpversion(self, part, **kwargs): """ Run bump2version.main() with the specified arguments. """ import bumpversion args = ['--verbose'] if self.verbose > 1 else [] for k, v in kwargs.items(): arg = "--{}".format(k.replace("_", "-")) if isinstance(v, bool): if v is False: continue args.append(arg) else: args.extend([arg, str(v)]) args.append(part) log.debug( "$ bumpversion %s" % " ".join(a.replace(" ", "\\ ") for a in args)) bumpversion.main(args)
def function[bumpversion, parameter[self, part]]: constant[ Run bump2version.main() with the specified arguments. ] import module[bumpversion] variable[args] assign[=] <ast.IfExp object at 0x7da18ede77f0> for taget[tuple[[<ast.Name object at 0x7da18ede5e40>, <ast.Name object at 0x7da18ede4490>]]] in starred[call[name[kwargs].items, parameter[]]] begin[:] variable[arg] assign[=] call[constant[--{}].format, parameter[call[name[k].replace, parameter[constant[_], constant[-]]]]] if call[name[isinstance], parameter[name[v], name[bool]]] begin[:] if compare[name[v] is constant[False]] begin[:] continue call[name[args].append, parameter[name[arg]]] call[name[args].append, parameter[name[part]]] call[name[log].debug, parameter[binary_operation[constant[$ bumpversion %s] <ast.Mod object at 0x7da2590d6920> call[constant[ ].join, parameter[<ast.GeneratorExp object at 0x7da2041db2e0>]]]]] call[name[bumpversion].main, parameter[name[args]]]
keyword[def] identifier[bumpversion] ( identifier[self] , identifier[part] ,** identifier[kwargs] ): literal[string] keyword[import] identifier[bumpversion] identifier[args] =[ literal[string] ] keyword[if] identifier[self] . identifier[verbose] > literal[int] keyword[else] [] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[kwargs] . identifier[items] (): identifier[arg] = literal[string] . identifier[format] ( identifier[k] . identifier[replace] ( literal[string] , literal[string] )) keyword[if] identifier[isinstance] ( identifier[v] , identifier[bool] ): keyword[if] identifier[v] keyword[is] keyword[False] : keyword[continue] identifier[args] . identifier[append] ( identifier[arg] ) keyword[else] : identifier[args] . identifier[extend] ([ identifier[arg] , identifier[str] ( identifier[v] )]) identifier[args] . identifier[append] ( identifier[part] ) identifier[log] . identifier[debug] ( literal[string] % literal[string] . identifier[join] ( identifier[a] . identifier[replace] ( literal[string] , literal[string] ) keyword[for] identifier[a] keyword[in] identifier[args] )) identifier[bumpversion] . identifier[main] ( identifier[args] )
def bumpversion(self, part, **kwargs): """ Run bump2version.main() with the specified arguments. """ import bumpversion args = ['--verbose'] if self.verbose > 1 else [] for (k, v) in kwargs.items(): arg = '--{}'.format(k.replace('_', '-')) if isinstance(v, bool): if v is False: continue # depends on [control=['if'], data=[]] args.append(arg) # depends on [control=['if'], data=[]] else: args.extend([arg, str(v)]) # depends on [control=['for'], data=[]] args.append(part) log.debug('$ bumpversion %s' % ' '.join((a.replace(' ', '\\ ') for a in args))) bumpversion.main(args)
def isocurve(data, level, connected=False, extend_to_edge=False): """ Generate isocurve from 2D data using marching squares algorithm. Parameters ---------- data : ndarray 2D numpy array of scalar values level : float The level at which to generate an isosurface connected : bool If False, return a single long list of point pairs If True, return multiple long lists of connected point locations. (This is slower but better for drawing continuous lines) extend_to_edge : bool If True, extend the curves to reach the exact edges of the data. """ # This function is SLOW; plenty of room for optimization here. if extend_to_edge: d2 = np.empty((data.shape[0]+2, data.shape[1]+2), dtype=data.dtype) d2[1:-1, 1:-1] = data d2[0, 1:-1] = data[0] d2[-1, 1:-1] = data[-1] d2[1:-1, 0] = data[:, 0] d2[1:-1, -1] = data[:, -1] d2[0, 0] = d2[0, 1] d2[0, -1] = d2[1, -1] d2[-1, 0] = d2[-1, 1] d2[-1, -1] = d2[-1, -2] data = d2 side_table = [ [], [0, 1], [1, 2], [0, 2], [0, 3], [1, 3], [0, 1, 2, 3], [2, 3], [2, 3], [0, 1, 2, 3], [1, 3], [0, 3], [0, 2], [1, 2], [0, 1], [] ] edge_key = [ [(0, 1), (0, 0)], [(0, 0), (1, 0)], [(1, 0), (1, 1)], [(1, 1), (0, 1)] ] level = float(level) lines = [] # mark everything below the isosurface level mask = data < level ## make four sub-fields and compute indexes for grid cells index = np.zeros([x-1 for x in data.shape], dtype=np.ubyte) fields = np.empty((2, 2), dtype=object) slices = [slice(0, -1), slice(1, None)] for i in [0, 1]: for j in [0, 1]: fields[i, j] = mask[slices[i], slices[j]] vertIndex = i+2*j index += (fields[i, j] * 2**vertIndex).astype(np.ubyte) # add lines for i in range(index.shape[0]): # data x-axis for j in range(index.shape[1]): # data y-axis sides = side_table[index[i, j]] for l in range(0, len(sides), 2): # faces for this grid cell edges = sides[l:l+2] pts = [] for m in [0, 1]: # points in this face # p1, p2 are points at either side of an edge p1 = edge_key[edges[m]][0] p2 = edge_key[edges[m]][1] # v1 and v2 are the values at p1 and p2 v1 = data[i+p1[0], j+p1[1]] v2 = data[i+p2[0], j+p2[1]] f = (level-v1) / (v2-v1) fi = 1.0 - f # interpolate between corners p = (p1[0]*fi + p2[0]*f + i + 0.5, p1[1]*fi + p2[1]*f + j + 0.5) if extend_to_edge: # check bounds p = (min(data.shape[0]-2, max(0, p[0]-1)), min(data.shape[1]-2, max(0, p[1]-1))) if connected: gridKey = (i + (1 if edges[m] == 2 else 0), j + (1 if edges[m] == 3 else 0), edges[m] % 2) # give the actual position and a key identifying the # grid location (for connecting segments) pts.append((p, gridKey)) else: pts.append(p) lines.append(pts) if not connected: return lines # turn disjoint list of segments into continuous lines points = {} # maps each point to its connections for a, b in lines: if a[1] not in points: points[a[1]] = [] points[a[1]].append([a, b]) if b[1] not in points: points[b[1]] = [] points[b[1]].append([b, a]) # rearrange into chains for k in list(points.keys()): try: chains = points[k] except KeyError: # already used this point elsewhere continue for chain in chains: x = None while True: if x == chain[-1][1]: break # nothing left to do on this chain x = chain[-1][1] if x == k: # chain has looped; we're done and can ignore the opposite # chain break y = chain[-2][1] connects = points[x] for conn in connects[:]: if conn[1][1] != y: chain.extend(conn[1:]) del points[x] if chain[0][1] == chain[-1][1]: # looped chain; no need to continue the other direction chains.pop() break # extract point locations lines = [] for chain in points.values(): if len(chain) == 2: # join together ends of chain chain = chain[1][1:][::-1] + chain[0] else: chain = chain[0] lines.append([pt[0] for pt in chain]) return lines
def function[isocurve, parameter[data, level, connected, extend_to_edge]]: constant[ Generate isocurve from 2D data using marching squares algorithm. Parameters ---------- data : ndarray 2D numpy array of scalar values level : float The level at which to generate an isosurface connected : bool If False, return a single long list of point pairs If True, return multiple long lists of connected point locations. (This is slower but better for drawing continuous lines) extend_to_edge : bool If True, extend the curves to reach the exact edges of the data. ] if name[extend_to_edge] begin[:] variable[d2] assign[=] call[name[np].empty, parameter[tuple[[<ast.BinOp object at 0x7da1b0e9bc10>, <ast.BinOp object at 0x7da1b0e9baf0>]]]] call[name[d2]][tuple[[<ast.Slice object at 0x7da1b0e9b880>, <ast.Slice object at 0x7da1b0e9b7c0>]]] assign[=] name[data] call[name[d2]][tuple[[<ast.Constant object at 0x7da1b0e9b610>, <ast.Slice object at 0x7da1b0e9b5e0>]]] assign[=] call[name[data]][constant[0]] call[name[d2]][tuple[[<ast.UnaryOp object at 0x7da1b0e9b3d0>, <ast.Slice object at 0x7da1b0e9b370>]]] assign[=] call[name[data]][<ast.UnaryOp object at 0x7da1b0e9b250>] call[name[d2]][tuple[[<ast.Slice object at 0x7da1b0e9b130>, <ast.Constant object at 0x7da1b0e9b070>]]] assign[=] call[name[data]][tuple[[<ast.Slice object at 0x7da1b0e9afb0>, <ast.Constant object at 0x7da1b0e9af80>]]] call[name[d2]][tuple[[<ast.Slice object at 0x7da1b0e9ae90>, <ast.UnaryOp object at 0x7da1b0e9add0>]]] assign[=] call[name[data]][tuple[[<ast.Slice object at 0x7da1b0e9ace0>, <ast.UnaryOp object at 0x7da1b0e9acb0>]]] call[name[d2]][tuple[[<ast.Constant object at 0x7da1b0e9ab90>, <ast.Constant object at 0x7da1b0e9ab60>]]] assign[=] call[name[d2]][tuple[[<ast.Constant object at 0x7da1b0e9aaa0>, <ast.Constant object at 0x7da1b0e9aa70>]]] call[name[d2]][tuple[[<ast.Constant object at 0x7da1b0eadb40>, <ast.UnaryOp object at 0x7da1b0eadf30>]]] assign[=] call[name[d2]][tuple[[<ast.Constant object at 0x7da1b0eaddb0>, <ast.UnaryOp object at 0x7da1b0eae0b0>]]] call[name[d2]][tuple[[<ast.UnaryOp object at 0x7da1b0eadea0>, <ast.Constant object at 0x7da1b0eadfc0>]]] assign[=] call[name[d2]][tuple[[<ast.UnaryOp object at 0x7da1b0febdf0>, <ast.Constant object at 0x7da1b0feb970>]]] call[name[d2]][tuple[[<ast.UnaryOp object at 0x7da18c4cc970>, <ast.UnaryOp object at 0x7da18c4cdb70>]]] assign[=] call[name[d2]][tuple[[<ast.UnaryOp object at 0x7da18c4cf700>, <ast.UnaryOp object at 0x7da18c4ce380>]]] variable[data] assign[=] name[d2] variable[side_table] assign[=] list[[<ast.List object at 0x7da1b0e9a980>, <ast.List object at 0x7da1b0e9a950>, <ast.List object at 0x7da1b0e9a8c0>, <ast.List object at 0x7da1b0e9a830>, <ast.List object at 0x7da1b0e9a7a0>, <ast.List object at 0x7da1b0e9a710>, <ast.List object at 0x7da1b0e9a680>, <ast.List object at 0x7da1b0e9a590>, <ast.List object at 0x7da1b0e9a500>, <ast.List object at 0x7da1b0e9a470>, <ast.List object at 0x7da1b0e9a380>, <ast.List object at 0x7da1b0e9a2f0>, <ast.List object at 0x7da1b0e9a260>, <ast.List object at 0x7da1b0e9a1d0>, <ast.List object at 0x7da1b0e9a140>, <ast.List object at 0x7da1b0e9a0b0>]] variable[edge_key] assign[=] list[[<ast.List object at 0x7da1b0e99ff0>, <ast.List object at 0x7da1b0e99ea0>, <ast.List object at 0x7da1b0e99d50>, <ast.List object at 0x7da1b0e99c00>]] variable[level] assign[=] call[name[float], parameter[name[level]]] variable[lines] assign[=] list[[]] variable[mask] assign[=] compare[name[data] less[<] name[level]] variable[index] assign[=] call[name[np].zeros, parameter[<ast.ListComp object at 0x7da1b0e99750>]] variable[fields] assign[=] call[name[np].empty, parameter[tuple[[<ast.Constant object at 0x7da1b0e993f0>, <ast.Constant object at 0x7da1b0e993c0>]]]] variable[slices] assign[=] list[[<ast.Call object at 0x7da1b0fb3730>, <ast.Call object at 0x7da1b0fb2e90>]] for taget[name[i]] in starred[list[[<ast.Constant object at 0x7da1b0fb07f0>, <ast.Constant object at 0x7da1b0fb3850>]]] begin[:] for taget[name[j]] in starred[list[[<ast.Constant object at 0x7da1b0fb3a90>, <ast.Constant object at 0x7da1b0fb1390>]]] begin[:] call[name[fields]][tuple[[<ast.Name object at 0x7da1b0fb1d80>, <ast.Name object at 0x7da1b0fb2770>]]] assign[=] call[name[mask]][tuple[[<ast.Subscript object at 0x7da1b0fb29e0>, <ast.Subscript object at 0x7da1b0fb2c80>]]] variable[vertIndex] assign[=] binary_operation[name[i] + binary_operation[constant[2] * name[j]]] <ast.AugAssign object at 0x7da1b0fb2c50> for taget[name[i]] in starred[call[name[range], parameter[call[name[index].shape][constant[0]]]]] begin[:] for taget[name[j]] in starred[call[name[range], parameter[call[name[index].shape][constant[1]]]]] begin[:] variable[sides] assign[=] call[name[side_table]][call[name[index]][tuple[[<ast.Name object at 0x7da1b0e40400>, <ast.Name object at 0x7da1b0e40a30>]]]] for taget[name[l]] in starred[call[name[range], parameter[constant[0], call[name[len], parameter[name[sides]]], constant[2]]]] begin[:] variable[edges] assign[=] call[name[sides]][<ast.Slice object at 0x7da1b0e42380>] variable[pts] assign[=] list[[]] for taget[name[m]] in starred[list[[<ast.Constant object at 0x7da1b0e41840>, <ast.Constant object at 0x7da1b0e40a90>]]] begin[:] variable[p1] assign[=] call[call[name[edge_key]][call[name[edges]][name[m]]]][constant[0]] variable[p2] assign[=] call[call[name[edge_key]][call[name[edges]][name[m]]]][constant[1]] variable[v1] assign[=] call[name[data]][tuple[[<ast.BinOp object at 0x7da1b0e426b0>, <ast.BinOp object at 0x7da1b0e42a40>]]] variable[v2] assign[=] call[name[data]][tuple[[<ast.BinOp object at 0x7da1b0e41030>, <ast.BinOp object at 0x7da1b0e42b60>]]] variable[f] assign[=] binary_operation[binary_operation[name[level] - name[v1]] / binary_operation[name[v2] - name[v1]]] variable[fi] assign[=] binary_operation[constant[1.0] - name[f]] variable[p] assign[=] tuple[[<ast.BinOp object at 0x7da1b0e40820>, <ast.BinOp object at 0x7da1b0e401f0>]] if name[extend_to_edge] begin[:] variable[p] assign[=] tuple[[<ast.Call object at 0x7da1b0e41720>, <ast.Call object at 0x7da1b0e40460>]] if name[connected] begin[:] variable[gridKey] assign[=] tuple[[<ast.BinOp object at 0x7da1b0e435b0>, <ast.BinOp object at 0x7da1b0e43220>, <ast.BinOp object at 0x7da1b0e43e50>]] call[name[pts].append, parameter[tuple[[<ast.Name object at 0x7da1b0e42110>, <ast.Name object at 0x7da1b0e40040>]]]] call[name[lines].append, parameter[name[pts]]] if <ast.UnaryOp object at 0x7da1b0e42a10> begin[:] return[name[lines]] variable[points] assign[=] dictionary[[], []] for taget[tuple[[<ast.Name object at 0x7da1b0e42260>, <ast.Name object at 0x7da1b0e41c00>]]] in starred[name[lines]] begin[:] if compare[call[name[a]][constant[1]] <ast.NotIn object at 0x7da2590d7190> name[points]] begin[:] call[name[points]][call[name[a]][constant[1]]] assign[=] list[[]] call[call[name[points]][call[name[a]][constant[1]]].append, parameter[list[[<ast.Name object at 0x7da1b0e40ee0>, <ast.Name object at 0x7da1b0e402b0>]]]] if compare[call[name[b]][constant[1]] <ast.NotIn object at 0x7da2590d7190> name[points]] begin[:] call[name[points]][call[name[b]][constant[1]]] assign[=] list[[]] call[call[name[points]][call[name[b]][constant[1]]].append, parameter[list[[<ast.Name object at 0x7da1b0e41090>, <ast.Name object at 0x7da1b0e41210>]]]] for taget[name[k]] in starred[call[name[list], parameter[call[name[points].keys, parameter[]]]]] begin[:] <ast.Try object at 0x7da1b0e42470> for taget[name[chain]] in starred[name[chains]] begin[:] variable[x] assign[=] constant[None] while constant[True] begin[:] if compare[name[x] equal[==] call[call[name[chain]][<ast.UnaryOp object at 0x7da1b0e43130>]][constant[1]]] begin[:] break variable[x] assign[=] call[call[name[chain]][<ast.UnaryOp object at 0x7da1b0e43b80>]][constant[1]] if compare[name[x] equal[==] name[k]] begin[:] break variable[y] assign[=] call[call[name[chain]][<ast.UnaryOp object at 0x7da18dc9b550>]][constant[1]] variable[connects] assign[=] call[name[points]][name[x]] for taget[name[conn]] in starred[call[name[connects]][<ast.Slice object at 0x7da18dc99db0>]] begin[:] if compare[call[call[name[conn]][constant[1]]][constant[1]] not_equal[!=] name[y]] begin[:] call[name[chain].extend, parameter[call[name[conn]][<ast.Slice object at 0x7da18dc99570>]]] <ast.Delete object at 0x7da18dc9be50> if compare[call[call[name[chain]][constant[0]]][constant[1]] equal[==] call[call[name[chain]][<ast.UnaryOp object at 0x7da18dc9a170>]][constant[1]]] begin[:] call[name[chains].pop, parameter[]] break variable[lines] assign[=] list[[]] for taget[name[chain]] in starred[call[name[points].values, parameter[]]] begin[:] if compare[call[name[len], parameter[name[chain]]] equal[==] constant[2]] begin[:] variable[chain] assign[=] binary_operation[call[call[call[name[chain]][constant[1]]][<ast.Slice object at 0x7da18dc99930>]][<ast.Slice object at 0x7da18dc99f00>] + call[name[chain]][constant[0]]] call[name[lines].append, parameter[<ast.ListComp object at 0x7da18dc9b3a0>]] return[name[lines]]
keyword[def] identifier[isocurve] ( identifier[data] , identifier[level] , identifier[connected] = keyword[False] , identifier[extend_to_edge] = keyword[False] ): literal[string] keyword[if] identifier[extend_to_edge] : identifier[d2] = identifier[np] . identifier[empty] (( identifier[data] . identifier[shape] [ literal[int] ]+ literal[int] , identifier[data] . identifier[shape] [ literal[int] ]+ literal[int] ), identifier[dtype] = identifier[data] . identifier[dtype] ) identifier[d2] [ literal[int] :- literal[int] , literal[int] :- literal[int] ]= identifier[data] identifier[d2] [ literal[int] , literal[int] :- literal[int] ]= identifier[data] [ literal[int] ] identifier[d2] [- literal[int] , literal[int] :- literal[int] ]= identifier[data] [- literal[int] ] identifier[d2] [ literal[int] :- literal[int] , literal[int] ]= identifier[data] [:, literal[int] ] identifier[d2] [ literal[int] :- literal[int] ,- literal[int] ]= identifier[data] [:,- literal[int] ] identifier[d2] [ literal[int] , literal[int] ]= identifier[d2] [ literal[int] , literal[int] ] identifier[d2] [ literal[int] ,- literal[int] ]= identifier[d2] [ literal[int] ,- literal[int] ] identifier[d2] [- literal[int] , literal[int] ]= identifier[d2] [- literal[int] , literal[int] ] identifier[d2] [- literal[int] ,- literal[int] ]= identifier[d2] [- literal[int] ,- literal[int] ] identifier[data] = identifier[d2] identifier[side_table] =[ [], [ literal[int] , literal[int] ], [ literal[int] , literal[int] ], [ literal[int] , literal[int] ], [ literal[int] , literal[int] ], [ literal[int] , literal[int] ], [ literal[int] , literal[int] , literal[int] , literal[int] ], [ literal[int] , literal[int] ], [ literal[int] , literal[int] ], [ literal[int] , literal[int] , literal[int] , literal[int] ], [ literal[int] , literal[int] ], [ literal[int] , literal[int] ], [ literal[int] , literal[int] ], [ literal[int] , literal[int] ], [ literal[int] , literal[int] ], [] ] identifier[edge_key] =[ [( literal[int] , literal[int] ),( literal[int] , literal[int] )], [( literal[int] , literal[int] ),( literal[int] , literal[int] )], [( literal[int] , literal[int] ),( literal[int] , literal[int] )], [( literal[int] , literal[int] ),( literal[int] , literal[int] )] ] identifier[level] = identifier[float] ( identifier[level] ) identifier[lines] =[] identifier[mask] = identifier[data] < identifier[level] identifier[index] = identifier[np] . identifier[zeros] ([ identifier[x] - literal[int] keyword[for] identifier[x] keyword[in] identifier[data] . identifier[shape] ], identifier[dtype] = identifier[np] . identifier[ubyte] ) identifier[fields] = identifier[np] . identifier[empty] (( literal[int] , literal[int] ), identifier[dtype] = identifier[object] ) identifier[slices] =[ identifier[slice] ( literal[int] ,- literal[int] ), identifier[slice] ( literal[int] , keyword[None] )] keyword[for] identifier[i] keyword[in] [ literal[int] , literal[int] ]: keyword[for] identifier[j] keyword[in] [ literal[int] , literal[int] ]: identifier[fields] [ identifier[i] , identifier[j] ]= identifier[mask] [ identifier[slices] [ identifier[i] ], identifier[slices] [ identifier[j] ]] identifier[vertIndex] = identifier[i] + literal[int] * identifier[j] identifier[index] +=( identifier[fields] [ identifier[i] , identifier[j] ]* literal[int] ** identifier[vertIndex] ). identifier[astype] ( identifier[np] . identifier[ubyte] ) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[index] . identifier[shape] [ literal[int] ]): keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[index] . identifier[shape] [ literal[int] ]): identifier[sides] = identifier[side_table] [ identifier[index] [ identifier[i] , identifier[j] ]] keyword[for] identifier[l] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[sides] ), literal[int] ): identifier[edges] = identifier[sides] [ identifier[l] : identifier[l] + literal[int] ] identifier[pts] =[] keyword[for] identifier[m] keyword[in] [ literal[int] , literal[int] ]: identifier[p1] = identifier[edge_key] [ identifier[edges] [ identifier[m] ]][ literal[int] ] identifier[p2] = identifier[edge_key] [ identifier[edges] [ identifier[m] ]][ literal[int] ] identifier[v1] = identifier[data] [ identifier[i] + identifier[p1] [ literal[int] ], identifier[j] + identifier[p1] [ literal[int] ]] identifier[v2] = identifier[data] [ identifier[i] + identifier[p2] [ literal[int] ], identifier[j] + identifier[p2] [ literal[int] ]] identifier[f] =( identifier[level] - identifier[v1] )/( identifier[v2] - identifier[v1] ) identifier[fi] = literal[int] - identifier[f] identifier[p] =( identifier[p1] [ literal[int] ]* identifier[fi] + identifier[p2] [ literal[int] ]* identifier[f] + identifier[i] + literal[int] , identifier[p1] [ literal[int] ]* identifier[fi] + identifier[p2] [ literal[int] ]* identifier[f] + identifier[j] + literal[int] ) keyword[if] identifier[extend_to_edge] : identifier[p] =( identifier[min] ( identifier[data] . identifier[shape] [ literal[int] ]- literal[int] , identifier[max] ( literal[int] , identifier[p] [ literal[int] ]- literal[int] )), identifier[min] ( identifier[data] . identifier[shape] [ literal[int] ]- literal[int] , identifier[max] ( literal[int] , identifier[p] [ literal[int] ]- literal[int] ))) keyword[if] identifier[connected] : identifier[gridKey] =( identifier[i] +( literal[int] keyword[if] identifier[edges] [ identifier[m] ]== literal[int] keyword[else] literal[int] ), identifier[j] +( literal[int] keyword[if] identifier[edges] [ identifier[m] ]== literal[int] keyword[else] literal[int] ), identifier[edges] [ identifier[m] ]% literal[int] ) identifier[pts] . identifier[append] (( identifier[p] , identifier[gridKey] )) keyword[else] : identifier[pts] . identifier[append] ( identifier[p] ) identifier[lines] . identifier[append] ( identifier[pts] ) keyword[if] keyword[not] identifier[connected] : keyword[return] identifier[lines] identifier[points] ={} keyword[for] identifier[a] , identifier[b] keyword[in] identifier[lines] : keyword[if] identifier[a] [ literal[int] ] keyword[not] keyword[in] identifier[points] : identifier[points] [ identifier[a] [ literal[int] ]]=[] identifier[points] [ identifier[a] [ literal[int] ]]. identifier[append] ([ identifier[a] , identifier[b] ]) keyword[if] identifier[b] [ literal[int] ] keyword[not] keyword[in] identifier[points] : identifier[points] [ identifier[b] [ literal[int] ]]=[] identifier[points] [ identifier[b] [ literal[int] ]]. identifier[append] ([ identifier[b] , identifier[a] ]) keyword[for] identifier[k] keyword[in] identifier[list] ( identifier[points] . identifier[keys] ()): keyword[try] : identifier[chains] = identifier[points] [ identifier[k] ] keyword[except] identifier[KeyError] : keyword[continue] keyword[for] identifier[chain] keyword[in] identifier[chains] : identifier[x] = keyword[None] keyword[while] keyword[True] : keyword[if] identifier[x] == identifier[chain] [- literal[int] ][ literal[int] ]: keyword[break] identifier[x] = identifier[chain] [- literal[int] ][ literal[int] ] keyword[if] identifier[x] == identifier[k] : keyword[break] identifier[y] = identifier[chain] [- literal[int] ][ literal[int] ] identifier[connects] = identifier[points] [ identifier[x] ] keyword[for] identifier[conn] keyword[in] identifier[connects] [:]: keyword[if] identifier[conn] [ literal[int] ][ literal[int] ]!= identifier[y] : identifier[chain] . identifier[extend] ( identifier[conn] [ literal[int] :]) keyword[del] identifier[points] [ identifier[x] ] keyword[if] identifier[chain] [ literal[int] ][ literal[int] ]== identifier[chain] [- literal[int] ][ literal[int] ]: identifier[chains] . identifier[pop] () keyword[break] identifier[lines] =[] keyword[for] identifier[chain] keyword[in] identifier[points] . identifier[values] (): keyword[if] identifier[len] ( identifier[chain] )== literal[int] : identifier[chain] = identifier[chain] [ literal[int] ][ literal[int] :][::- literal[int] ]+ identifier[chain] [ literal[int] ] keyword[else] : identifier[chain] = identifier[chain] [ literal[int] ] identifier[lines] . identifier[append] ([ identifier[pt] [ literal[int] ] keyword[for] identifier[pt] keyword[in] identifier[chain] ]) keyword[return] identifier[lines]
def isocurve(data, level, connected=False, extend_to_edge=False): """ Generate isocurve from 2D data using marching squares algorithm. Parameters ---------- data : ndarray 2D numpy array of scalar values level : float The level at which to generate an isosurface connected : bool If False, return a single long list of point pairs If True, return multiple long lists of connected point locations. (This is slower but better for drawing continuous lines) extend_to_edge : bool If True, extend the curves to reach the exact edges of the data. """ # This function is SLOW; plenty of room for optimization here. if extend_to_edge: d2 = np.empty((data.shape[0] + 2, data.shape[1] + 2), dtype=data.dtype) d2[1:-1, 1:-1] = data d2[0, 1:-1] = data[0] d2[-1, 1:-1] = data[-1] d2[1:-1, 0] = data[:, 0] d2[1:-1, -1] = data[:, -1] d2[0, 0] = d2[0, 1] d2[0, -1] = d2[1, -1] d2[-1, 0] = d2[-1, 1] d2[-1, -1] = d2[-1, -2] data = d2 # depends on [control=['if'], data=[]] side_table = [[], [0, 1], [1, 2], [0, 2], [0, 3], [1, 3], [0, 1, 2, 3], [2, 3], [2, 3], [0, 1, 2, 3], [1, 3], [0, 3], [0, 2], [1, 2], [0, 1], []] edge_key = [[(0, 1), (0, 0)], [(0, 0), (1, 0)], [(1, 0), (1, 1)], [(1, 1), (0, 1)]] level = float(level) lines = [] # mark everything below the isosurface level mask = data < level ## make four sub-fields and compute indexes for grid cells index = np.zeros([x - 1 for x in data.shape], dtype=np.ubyte) fields = np.empty((2, 2), dtype=object) slices = [slice(0, -1), slice(1, None)] for i in [0, 1]: for j in [0, 1]: fields[i, j] = mask[slices[i], slices[j]] vertIndex = i + 2 * j index += (fields[i, j] * 2 ** vertIndex).astype(np.ubyte) # depends on [control=['for'], data=['j']] # depends on [control=['for'], data=['i']] # add lines for i in range(index.shape[0]): # data x-axis for j in range(index.shape[1]): # data y-axis sides = side_table[index[i, j]] for l in range(0, len(sides), 2): # faces for this grid cell edges = sides[l:l + 2] pts = [] for m in [0, 1]: # points in this face # p1, p2 are points at either side of an edge p1 = edge_key[edges[m]][0] p2 = edge_key[edges[m]][1] # v1 and v2 are the values at p1 and p2 v1 = data[i + p1[0], j + p1[1]] v2 = data[i + p2[0], j + p2[1]] f = (level - v1) / (v2 - v1) fi = 1.0 - f # interpolate between corners p = (p1[0] * fi + p2[0] * f + i + 0.5, p1[1] * fi + p2[1] * f + j + 0.5) if extend_to_edge: # check bounds p = (min(data.shape[0] - 2, max(0, p[0] - 1)), min(data.shape[1] - 2, max(0, p[1] - 1))) # depends on [control=['if'], data=[]] if connected: gridKey = (i + (1 if edges[m] == 2 else 0), j + (1 if edges[m] == 3 else 0), edges[m] % 2) # give the actual position and a key identifying the # grid location (for connecting segments) pts.append((p, gridKey)) # depends on [control=['if'], data=[]] else: pts.append(p) # depends on [control=['for'], data=['m']] lines.append(pts) # depends on [control=['for'], data=['l']] # depends on [control=['for'], data=['j']] # depends on [control=['for'], data=['i']] if not connected: return lines # depends on [control=['if'], data=[]] # turn disjoint list of segments into continuous lines points = {} # maps each point to its connections for (a, b) in lines: if a[1] not in points: points[a[1]] = [] # depends on [control=['if'], data=['points']] points[a[1]].append([a, b]) if b[1] not in points: points[b[1]] = [] # depends on [control=['if'], data=['points']] points[b[1]].append([b, a]) # depends on [control=['for'], data=[]] # rearrange into chains for k in list(points.keys()): try: chains = points[k] # depends on [control=['try'], data=[]] except KeyError: # already used this point elsewhere continue # depends on [control=['except'], data=[]] for chain in chains: x = None while True: if x == chain[-1][1]: break # nothing left to do on this chain # depends on [control=['if'], data=[]] x = chain[-1][1] if x == k: # chain has looped; we're done and can ignore the opposite # chain break # depends on [control=['if'], data=[]] y = chain[-2][1] connects = points[x] for conn in connects[:]: if conn[1][1] != y: chain.extend(conn[1:]) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['conn']] del points[x] # depends on [control=['while'], data=[]] if chain[0][1] == chain[-1][1]: # looped chain; no need to continue the other direction chains.pop() break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['chain']] # depends on [control=['for'], data=['k']] # extract point locations lines = [] for chain in points.values(): if len(chain) == 2: # join together ends of chain chain = chain[1][1:][::-1] + chain[0] # depends on [control=['if'], data=[]] else: chain = chain[0] lines.append([pt[0] for pt in chain]) # depends on [control=['for'], data=['chain']] return lines
def match(self, dom, act): """ Check if the given `domain` and `act` are allowed by this capability """ return self.match_domain(dom) and self.match_action(act)
def function[match, parameter[self, dom, act]]: constant[ Check if the given `domain` and `act` are allowed by this capability ] return[<ast.BoolOp object at 0x7da1b26aef80>]
keyword[def] identifier[match] ( identifier[self] , identifier[dom] , identifier[act] ): literal[string] keyword[return] identifier[self] . identifier[match_domain] ( identifier[dom] ) keyword[and] identifier[self] . identifier[match_action] ( identifier[act] )
def match(self, dom, act): """ Check if the given `domain` and `act` are allowed by this capability """ return self.match_domain(dom) and self.match_action(act)
def get_events(object_key: str) -> List[Event]: """Get list of events for the object with the specified key.""" events_data = _get_events_data(object_key) return [Event.from_config(event_dict) for event_dict in events_data]
def function[get_events, parameter[object_key]]: constant[Get list of events for the object with the specified key.] variable[events_data] assign[=] call[name[_get_events_data], parameter[name[object_key]]] return[<ast.ListComp object at 0x7da18f00fca0>]
keyword[def] identifier[get_events] ( identifier[object_key] : identifier[str] )-> identifier[List] [ identifier[Event] ]: literal[string] identifier[events_data] = identifier[_get_events_data] ( identifier[object_key] ) keyword[return] [ identifier[Event] . identifier[from_config] ( identifier[event_dict] ) keyword[for] identifier[event_dict] keyword[in] identifier[events_data] ]
def get_events(object_key: str) -> List[Event]: """Get list of events for the object with the specified key.""" events_data = _get_events_data(object_key) return [Event.from_config(event_dict) for event_dict in events_data]
def Write(packer_type, buf, head, n): """ Write encodes `n` at buf[head] using `packer_type`. """ packer_type.pack_into(buf, head, n)
def function[Write, parameter[packer_type, buf, head, n]]: constant[ Write encodes `n` at buf[head] using `packer_type`. ] call[name[packer_type].pack_into, parameter[name[buf], name[head], name[n]]]
keyword[def] identifier[Write] ( identifier[packer_type] , identifier[buf] , identifier[head] , identifier[n] ): literal[string] identifier[packer_type] . identifier[pack_into] ( identifier[buf] , identifier[head] , identifier[n] )
def Write(packer_type, buf, head, n): """ Write encodes `n` at buf[head] using `packer_type`. """ packer_type.pack_into(buf, head, n)
def add_info_widget(self, widget): ''' add right panel widget ''' if not self.screen: self.log.debug("No screen instance to add widget") else: self.screen.add_info_widget(widget)
def function[add_info_widget, parameter[self, widget]]: constant[ add right panel widget ] if <ast.UnaryOp object at 0x7da1b0352cb0> begin[:] call[name[self].log.debug, parameter[constant[No screen instance to add widget]]]
keyword[def] identifier[add_info_widget] ( identifier[self] , identifier[widget] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[screen] : identifier[self] . identifier[log] . identifier[debug] ( literal[string] ) keyword[else] : identifier[self] . identifier[screen] . identifier[add_info_widget] ( identifier[widget] )
def add_info_widget(self, widget): """ add right panel widget """ if not self.screen: self.log.debug('No screen instance to add widget') # depends on [control=['if'], data=[]] else: self.screen.add_info_widget(widget)
def _normalize_check_url(self, check_url): """ Normalizes check_url by: * Adding the `http` scheme if missing * Adding or replacing port with `self.port` """ # TODO: Write tests for this method split_url = urlsplit(check_url) host = splitport(split_url.path or split_url.netloc)[0] return '{0}://{1}:{2}'.format(self.scheme, host, self.port)
def function[_normalize_check_url, parameter[self, check_url]]: constant[ Normalizes check_url by: * Adding the `http` scheme if missing * Adding or replacing port with `self.port` ] variable[split_url] assign[=] call[name[urlsplit], parameter[name[check_url]]] variable[host] assign[=] call[call[name[splitport], parameter[<ast.BoolOp object at 0x7da1b0aa70a0>]]][constant[0]] return[call[constant[{0}://{1}:{2}].format, parameter[name[self].scheme, name[host], name[self].port]]]
keyword[def] identifier[_normalize_check_url] ( identifier[self] , identifier[check_url] ): literal[string] identifier[split_url] = identifier[urlsplit] ( identifier[check_url] ) identifier[host] = identifier[splitport] ( identifier[split_url] . identifier[path] keyword[or] identifier[split_url] . identifier[netloc] )[ literal[int] ] keyword[return] literal[string] . identifier[format] ( identifier[self] . identifier[scheme] , identifier[host] , identifier[self] . identifier[port] )
def _normalize_check_url(self, check_url): """ Normalizes check_url by: * Adding the `http` scheme if missing * Adding or replacing port with `self.port` """ # TODO: Write tests for this method split_url = urlsplit(check_url) host = splitport(split_url.path or split_url.netloc)[0] return '{0}://{1}:{2}'.format(self.scheme, host, self.port)
def get_object(self, request, object_id): """ Returns an instance matching the primary key provided. ``None`` is returned if no match is found (or the object_id failed validation against the primary key field). """ queryset = self.queryset(request) model = queryset.model try: object_id = model._meta.pk.to_python(object_id) return queryset.get(pk=object_id) except (model.DoesNotExist, ValidationError): return None
def function[get_object, parameter[self, request, object_id]]: constant[ Returns an instance matching the primary key provided. ``None`` is returned if no match is found (or the object_id failed validation against the primary key field). ] variable[queryset] assign[=] call[name[self].queryset, parameter[name[request]]] variable[model] assign[=] name[queryset].model <ast.Try object at 0x7da18dc04130>
keyword[def] identifier[get_object] ( identifier[self] , identifier[request] , identifier[object_id] ): literal[string] identifier[queryset] = identifier[self] . identifier[queryset] ( identifier[request] ) identifier[model] = identifier[queryset] . identifier[model] keyword[try] : identifier[object_id] = identifier[model] . identifier[_meta] . identifier[pk] . identifier[to_python] ( identifier[object_id] ) keyword[return] identifier[queryset] . identifier[get] ( identifier[pk] = identifier[object_id] ) keyword[except] ( identifier[model] . identifier[DoesNotExist] , identifier[ValidationError] ): keyword[return] keyword[None]
def get_object(self, request, object_id): """ Returns an instance matching the primary key provided. ``None`` is returned if no match is found (or the object_id failed validation against the primary key field). """ queryset = self.queryset(request) model = queryset.model try: object_id = model._meta.pk.to_python(object_id) return queryset.get(pk=object_id) # depends on [control=['try'], data=[]] except (model.DoesNotExist, ValidationError): return None # depends on [control=['except'], data=[]]
def encode(self, object_): """ Encodes an object. Args: object_ (object): Object to encode. Returns: object: Encoding of the object. """ if self.enforce_reversible: self.enforce_reversible = False if self.decode(self.encode(object_)) != object_: raise ValueError('Encoding is not reversible for "%s"' % object_) self.enforce_reversible = True return object_
def function[encode, parameter[self, object_]]: constant[ Encodes an object. Args: object_ (object): Object to encode. Returns: object: Encoding of the object. ] if name[self].enforce_reversible begin[:] name[self].enforce_reversible assign[=] constant[False] if compare[call[name[self].decode, parameter[call[name[self].encode, parameter[name[object_]]]]] not_equal[!=] name[object_]] begin[:] <ast.Raise object at 0x7da18dc05270> name[self].enforce_reversible assign[=] constant[True] return[name[object_]]
keyword[def] identifier[encode] ( identifier[self] , identifier[object_] ): literal[string] keyword[if] identifier[self] . identifier[enforce_reversible] : identifier[self] . identifier[enforce_reversible] = keyword[False] keyword[if] identifier[self] . identifier[decode] ( identifier[self] . identifier[encode] ( identifier[object_] ))!= identifier[object_] : keyword[raise] identifier[ValueError] ( literal[string] % identifier[object_] ) identifier[self] . identifier[enforce_reversible] = keyword[True] keyword[return] identifier[object_]
def encode(self, object_): """ Encodes an object. Args: object_ (object): Object to encode. Returns: object: Encoding of the object. """ if self.enforce_reversible: self.enforce_reversible = False if self.decode(self.encode(object_)) != object_: raise ValueError('Encoding is not reversible for "%s"' % object_) # depends on [control=['if'], data=['object_']] self.enforce_reversible = True # depends on [control=['if'], data=[]] return object_
def update_atom_members(old, new): """ Update an atom member """ old_keys = old.members().keys() new_keys = new.members().keys() for key in old_keys: old_obj = getattr(old, key) try: new_obj = getattr(new, key) if old_obj == new_obj: continue except AttributeError: # Remove any obsolete members try: delattr(old, key) except (AttributeError, TypeError): pass continue try: #: Update any changed members #: TODO: We have to somehow know if this was changed by the user or the code! #: and ONLY update if it's due to the code changing! Without this, the entire concept #: is broken and useless... setattr(old, key, getattr(new, key)) except (AttributeError, TypeError): pass # skip non-writable attributes #: Add any new members for key in set(new_keys)-set(old_keys): try: setattr(old, key, getattr(new, key)) except (AttributeError, TypeError): pass
def function[update_atom_members, parameter[old, new]]: constant[ Update an atom member ] variable[old_keys] assign[=] call[call[name[old].members, parameter[]].keys, parameter[]] variable[new_keys] assign[=] call[call[name[new].members, parameter[]].keys, parameter[]] for taget[name[key]] in starred[name[old_keys]] begin[:] variable[old_obj] assign[=] call[name[getattr], parameter[name[old], name[key]]] <ast.Try object at 0x7da2041db670> <ast.Try object at 0x7da2041d8a30> for taget[name[key]] in starred[binary_operation[call[name[set], parameter[name[new_keys]]] - call[name[set], parameter[name[old_keys]]]]] begin[:] <ast.Try object at 0x7da2041dae90>
keyword[def] identifier[update_atom_members] ( identifier[old] , identifier[new] ): literal[string] identifier[old_keys] = identifier[old] . identifier[members] (). identifier[keys] () identifier[new_keys] = identifier[new] . identifier[members] (). identifier[keys] () keyword[for] identifier[key] keyword[in] identifier[old_keys] : identifier[old_obj] = identifier[getattr] ( identifier[old] , identifier[key] ) keyword[try] : identifier[new_obj] = identifier[getattr] ( identifier[new] , identifier[key] ) keyword[if] identifier[old_obj] == identifier[new_obj] : keyword[continue] keyword[except] identifier[AttributeError] : keyword[try] : identifier[delattr] ( identifier[old] , identifier[key] ) keyword[except] ( identifier[AttributeError] , identifier[TypeError] ): keyword[pass] keyword[continue] keyword[try] : identifier[setattr] ( identifier[old] , identifier[key] , identifier[getattr] ( identifier[new] , identifier[key] )) keyword[except] ( identifier[AttributeError] , identifier[TypeError] ): keyword[pass] keyword[for] identifier[key] keyword[in] identifier[set] ( identifier[new_keys] )- identifier[set] ( identifier[old_keys] ): keyword[try] : identifier[setattr] ( identifier[old] , identifier[key] , identifier[getattr] ( identifier[new] , identifier[key] )) keyword[except] ( identifier[AttributeError] , identifier[TypeError] ): keyword[pass]
def update_atom_members(old, new): """ Update an atom member """ old_keys = old.members().keys() new_keys = new.members().keys() for key in old_keys: old_obj = getattr(old, key) try: new_obj = getattr(new, key) if old_obj == new_obj: continue # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except AttributeError: # Remove any obsolete members try: delattr(old, key) # depends on [control=['try'], data=[]] except (AttributeError, TypeError): pass # depends on [control=['except'], data=[]] continue # depends on [control=['except'], data=[]] try: #: Update any changed members #: TODO: We have to somehow know if this was changed by the user or the code! #: and ONLY update if it's due to the code changing! Without this, the entire concept #: is broken and useless... setattr(old, key, getattr(new, key)) # depends on [control=['try'], data=[]] except (AttributeError, TypeError): pass # skip non-writable attributes # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['key']] #: Add any new members for key in set(new_keys) - set(old_keys): try: setattr(old, key, getattr(new, key)) # depends on [control=['try'], data=[]] except (AttributeError, TypeError): pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['key']]
def one_step(self, current_state, previous_kernel_results): """Takes one step of the TransitionKernel. Args: current_state: `Tensor` or Python `list` of `Tensor`s representing the current state(s) of the Markov chain(s). previous_kernel_results: A (possibly nested) `tuple`, `namedtuple` or `list` of `Tensor`s representing internal calculations made within the previous call to this function (or as returned by `bootstrap_results`). Returns: next_state: `Tensor` or Python `list` of `Tensor`s representing the next state(s) of the Markov chain(s). kernel_results: A (possibly nested) `tuple`, `namedtuple` or `list` of `Tensor`s representing internal calculations made within this function. Raises: ValueError: if `inner_kernel` results doesn't contain the member "target_log_prob". """ with tf.compat.v1.name_scope( name=mcmc_util.make_name(self.name, 'mh', 'one_step'), values=[current_state, previous_kernel_results]): # Take one inner step. [ proposed_state, proposed_results, ] = self.inner_kernel.one_step( current_state, previous_kernel_results.accepted_results) if (not has_target_log_prob(proposed_results) or not has_target_log_prob(previous_kernel_results.accepted_results)): raise ValueError('"target_log_prob" must be a member of ' '`inner_kernel` results.') # Compute log(acceptance_ratio). to_sum = [proposed_results.target_log_prob, -previous_kernel_results.accepted_results.target_log_prob] try: if (not mcmc_util.is_list_like( proposed_results.log_acceptance_correction) or proposed_results.log_acceptance_correction): to_sum.append(proposed_results.log_acceptance_correction) except AttributeError: warnings.warn('Supplied inner `TransitionKernel` does not have a ' '`log_acceptance_correction`. Assuming its value is `0.`') log_accept_ratio = mcmc_util.safe_sum( to_sum, name='compute_log_accept_ratio') # If proposed state reduces likelihood: randomly accept. # If proposed state increases likelihood: always accept. # I.e., u < min(1, accept_ratio), where u ~ Uniform[0,1) # ==> log(u) < log_accept_ratio log_uniform = tf.math.log( tf.random.uniform( shape=tf.shape(input=proposed_results.target_log_prob), dtype=proposed_results.target_log_prob.dtype.base_dtype, seed=self._seed_stream())) is_accepted = log_uniform < log_accept_ratio next_state = mcmc_util.choose( is_accepted, proposed_state, current_state, name='choose_next_state') kernel_results = MetropolisHastingsKernelResults( accepted_results=mcmc_util.choose( is_accepted, proposed_results, previous_kernel_results.accepted_results, name='choose_inner_results'), is_accepted=is_accepted, log_accept_ratio=log_accept_ratio, proposed_state=proposed_state, proposed_results=proposed_results, extra=[], ) return next_state, kernel_results
def function[one_step, parameter[self, current_state, previous_kernel_results]]: constant[Takes one step of the TransitionKernel. Args: current_state: `Tensor` or Python `list` of `Tensor`s representing the current state(s) of the Markov chain(s). previous_kernel_results: A (possibly nested) `tuple`, `namedtuple` or `list` of `Tensor`s representing internal calculations made within the previous call to this function (or as returned by `bootstrap_results`). Returns: next_state: `Tensor` or Python `list` of `Tensor`s representing the next state(s) of the Markov chain(s). kernel_results: A (possibly nested) `tuple`, `namedtuple` or `list` of `Tensor`s representing internal calculations made within this function. Raises: ValueError: if `inner_kernel` results doesn't contain the member "target_log_prob". ] with call[name[tf].compat.v1.name_scope, parameter[]] begin[:] <ast.List object at 0x7da1b022a9b0> assign[=] call[name[self].inner_kernel.one_step, parameter[name[current_state], name[previous_kernel_results].accepted_results]] if <ast.BoolOp object at 0x7da1b02296c0> begin[:] <ast.Raise object at 0x7da1b0228a60> variable[to_sum] assign[=] list[[<ast.Attribute object at 0x7da1b022b670>, <ast.UnaryOp object at 0x7da1b0229a80>]] <ast.Try object at 0x7da1b022b520> variable[log_accept_ratio] assign[=] call[name[mcmc_util].safe_sum, parameter[name[to_sum]]] variable[log_uniform] assign[=] call[name[tf].math.log, parameter[call[name[tf].random.uniform, parameter[]]]] variable[is_accepted] assign[=] compare[name[log_uniform] less[<] name[log_accept_ratio]] variable[next_state] assign[=] call[name[mcmc_util].choose, parameter[name[is_accepted], name[proposed_state], name[current_state]]] variable[kernel_results] assign[=] call[name[MetropolisHastingsKernelResults], parameter[]] return[tuple[[<ast.Name object at 0x7da1b02c8850>, <ast.Name object at 0x7da1b02ca170>]]]
keyword[def] identifier[one_step] ( identifier[self] , identifier[current_state] , identifier[previous_kernel_results] ): literal[string] keyword[with] identifier[tf] . identifier[compat] . identifier[v1] . identifier[name_scope] ( identifier[name] = identifier[mcmc_util] . identifier[make_name] ( identifier[self] . identifier[name] , literal[string] , literal[string] ), identifier[values] =[ identifier[current_state] , identifier[previous_kernel_results] ]): [ identifier[proposed_state] , identifier[proposed_results] , ]= identifier[self] . identifier[inner_kernel] . identifier[one_step] ( identifier[current_state] , identifier[previous_kernel_results] . identifier[accepted_results] ) keyword[if] ( keyword[not] identifier[has_target_log_prob] ( identifier[proposed_results] ) keyword[or] keyword[not] identifier[has_target_log_prob] ( identifier[previous_kernel_results] . identifier[accepted_results] )): keyword[raise] identifier[ValueError] ( literal[string] literal[string] ) identifier[to_sum] =[ identifier[proposed_results] . identifier[target_log_prob] , - identifier[previous_kernel_results] . identifier[accepted_results] . identifier[target_log_prob] ] keyword[try] : keyword[if] ( keyword[not] identifier[mcmc_util] . identifier[is_list_like] ( identifier[proposed_results] . identifier[log_acceptance_correction] ) keyword[or] identifier[proposed_results] . identifier[log_acceptance_correction] ): identifier[to_sum] . identifier[append] ( identifier[proposed_results] . identifier[log_acceptance_correction] ) keyword[except] identifier[AttributeError] : identifier[warnings] . identifier[warn] ( literal[string] literal[string] ) identifier[log_accept_ratio] = identifier[mcmc_util] . identifier[safe_sum] ( identifier[to_sum] , identifier[name] = literal[string] ) identifier[log_uniform] = identifier[tf] . identifier[math] . identifier[log] ( identifier[tf] . identifier[random] . identifier[uniform] ( identifier[shape] = identifier[tf] . identifier[shape] ( identifier[input] = identifier[proposed_results] . identifier[target_log_prob] ), identifier[dtype] = identifier[proposed_results] . identifier[target_log_prob] . identifier[dtype] . identifier[base_dtype] , identifier[seed] = identifier[self] . identifier[_seed_stream] ())) identifier[is_accepted] = identifier[log_uniform] < identifier[log_accept_ratio] identifier[next_state] = identifier[mcmc_util] . identifier[choose] ( identifier[is_accepted] , identifier[proposed_state] , identifier[current_state] , identifier[name] = literal[string] ) identifier[kernel_results] = identifier[MetropolisHastingsKernelResults] ( identifier[accepted_results] = identifier[mcmc_util] . identifier[choose] ( identifier[is_accepted] , identifier[proposed_results] , identifier[previous_kernel_results] . identifier[accepted_results] , identifier[name] = literal[string] ), identifier[is_accepted] = identifier[is_accepted] , identifier[log_accept_ratio] = identifier[log_accept_ratio] , identifier[proposed_state] = identifier[proposed_state] , identifier[proposed_results] = identifier[proposed_results] , identifier[extra] =[], ) keyword[return] identifier[next_state] , identifier[kernel_results]
def one_step(self, current_state, previous_kernel_results): """Takes one step of the TransitionKernel. Args: current_state: `Tensor` or Python `list` of `Tensor`s representing the current state(s) of the Markov chain(s). previous_kernel_results: A (possibly nested) `tuple`, `namedtuple` or `list` of `Tensor`s representing internal calculations made within the previous call to this function (or as returned by `bootstrap_results`). Returns: next_state: `Tensor` or Python `list` of `Tensor`s representing the next state(s) of the Markov chain(s). kernel_results: A (possibly nested) `tuple`, `namedtuple` or `list` of `Tensor`s representing internal calculations made within this function. Raises: ValueError: if `inner_kernel` results doesn't contain the member "target_log_prob". """ with tf.compat.v1.name_scope(name=mcmc_util.make_name(self.name, 'mh', 'one_step'), values=[current_state, previous_kernel_results]): # Take one inner step. [proposed_state, proposed_results] = self.inner_kernel.one_step(current_state, previous_kernel_results.accepted_results) if not has_target_log_prob(proposed_results) or not has_target_log_prob(previous_kernel_results.accepted_results): raise ValueError('"target_log_prob" must be a member of `inner_kernel` results.') # depends on [control=['if'], data=[]] # Compute log(acceptance_ratio). to_sum = [proposed_results.target_log_prob, -previous_kernel_results.accepted_results.target_log_prob] try: if not mcmc_util.is_list_like(proposed_results.log_acceptance_correction) or proposed_results.log_acceptance_correction: to_sum.append(proposed_results.log_acceptance_correction) # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except AttributeError: warnings.warn('Supplied inner `TransitionKernel` does not have a `log_acceptance_correction`. Assuming its value is `0.`') # depends on [control=['except'], data=[]] log_accept_ratio = mcmc_util.safe_sum(to_sum, name='compute_log_accept_ratio') # If proposed state reduces likelihood: randomly accept. # If proposed state increases likelihood: always accept. # I.e., u < min(1, accept_ratio), where u ~ Uniform[0,1) # ==> log(u) < log_accept_ratio log_uniform = tf.math.log(tf.random.uniform(shape=tf.shape(input=proposed_results.target_log_prob), dtype=proposed_results.target_log_prob.dtype.base_dtype, seed=self._seed_stream())) is_accepted = log_uniform < log_accept_ratio next_state = mcmc_util.choose(is_accepted, proposed_state, current_state, name='choose_next_state') kernel_results = MetropolisHastingsKernelResults(accepted_results=mcmc_util.choose(is_accepted, proposed_results, previous_kernel_results.accepted_results, name='choose_inner_results'), is_accepted=is_accepted, log_accept_ratio=log_accept_ratio, proposed_state=proposed_state, proposed_results=proposed_results, extra=[]) return (next_state, kernel_results) # depends on [control=['with'], data=[]]
def build_engine_session(connection: str, echo: bool = False, autoflush: Optional[bool] = None, autocommit: Optional[bool] = None, expire_on_commit: Optional[bool] = None, scopefunc=None) -> Tuple: """Build an engine and a session. :param connection: An RFC-1738 database connection string :param echo: Turn on echoing SQL :param autoflush: Defaults to True if not specified in kwargs or configuration. :param autocommit: Defaults to False if not specified in kwargs or configuration. :param expire_on_commit: Defaults to False if not specified in kwargs or configuration. :param scopefunc: Scoped function to pass to :func:`sqlalchemy.orm.scoped_session` :rtype: tuple[Engine,Session] From the Flask-SQLAlchemy documentation: An extra key ``'scopefunc'`` can be set on the ``options`` dict to specify a custom scope function. If it's not provided, Flask's app context stack identity is used. This will ensure that sessions are created and removed with the request/response cycle, and should be fine in most cases. """ if connection is None: raise ValueError('can not build engine when connection is None') engine = create_engine(connection, echo=echo) if autoflush is None: autoflush = config.get('PYBEL_MANAGER_AUTOFLUSH', False) if autocommit is None: autocommit = config.get('PYBEL_MANAGER_AUTOCOMMIT', False) if expire_on_commit is None: expire_on_commit = config.get('PYBEL_MANAGER_AUTOEXPIRE', True) log.debug('auto flush: %s, auto commit: %s, expire on commmit: %s', autoflush, autocommit, expire_on_commit) #: A SQLAlchemy session maker session_maker = sessionmaker( bind=engine, autoflush=autoflush, autocommit=autocommit, expire_on_commit=expire_on_commit, ) #: A SQLAlchemy session object session = scoped_session( session_maker, scopefunc=scopefunc, ) return engine, session
def function[build_engine_session, parameter[connection, echo, autoflush, autocommit, expire_on_commit, scopefunc]]: constant[Build an engine and a session. :param connection: An RFC-1738 database connection string :param echo: Turn on echoing SQL :param autoflush: Defaults to True if not specified in kwargs or configuration. :param autocommit: Defaults to False if not specified in kwargs or configuration. :param expire_on_commit: Defaults to False if not specified in kwargs or configuration. :param scopefunc: Scoped function to pass to :func:`sqlalchemy.orm.scoped_session` :rtype: tuple[Engine,Session] From the Flask-SQLAlchemy documentation: An extra key ``'scopefunc'`` can be set on the ``options`` dict to specify a custom scope function. If it's not provided, Flask's app context stack identity is used. This will ensure that sessions are created and removed with the request/response cycle, and should be fine in most cases. ] if compare[name[connection] is constant[None]] begin[:] <ast.Raise object at 0x7da1b0c42980> variable[engine] assign[=] call[name[create_engine], parameter[name[connection]]] if compare[name[autoflush] is constant[None]] begin[:] variable[autoflush] assign[=] call[name[config].get, parameter[constant[PYBEL_MANAGER_AUTOFLUSH], constant[False]]] if compare[name[autocommit] is constant[None]] begin[:] variable[autocommit] assign[=] call[name[config].get, parameter[constant[PYBEL_MANAGER_AUTOCOMMIT], constant[False]]] if compare[name[expire_on_commit] is constant[None]] begin[:] variable[expire_on_commit] assign[=] call[name[config].get, parameter[constant[PYBEL_MANAGER_AUTOEXPIRE], constant[True]]] call[name[log].debug, parameter[constant[auto flush: %s, auto commit: %s, expire on commmit: %s], name[autoflush], name[autocommit], name[expire_on_commit]]] variable[session_maker] assign[=] call[name[sessionmaker], parameter[]] variable[session] assign[=] call[name[scoped_session], parameter[name[session_maker]]] return[tuple[[<ast.Name object at 0x7da20c76d540>, <ast.Name object at 0x7da20c76df30>]]]
keyword[def] identifier[build_engine_session] ( identifier[connection] : identifier[str] , identifier[echo] : identifier[bool] = keyword[False] , identifier[autoflush] : identifier[Optional] [ identifier[bool] ]= keyword[None] , identifier[autocommit] : identifier[Optional] [ identifier[bool] ]= keyword[None] , identifier[expire_on_commit] : identifier[Optional] [ identifier[bool] ]= keyword[None] , identifier[scopefunc] = keyword[None] )-> identifier[Tuple] : literal[string] keyword[if] identifier[connection] keyword[is] keyword[None] : keyword[raise] identifier[ValueError] ( literal[string] ) identifier[engine] = identifier[create_engine] ( identifier[connection] , identifier[echo] = identifier[echo] ) keyword[if] identifier[autoflush] keyword[is] keyword[None] : identifier[autoflush] = identifier[config] . identifier[get] ( literal[string] , keyword[False] ) keyword[if] identifier[autocommit] keyword[is] keyword[None] : identifier[autocommit] = identifier[config] . identifier[get] ( literal[string] , keyword[False] ) keyword[if] identifier[expire_on_commit] keyword[is] keyword[None] : identifier[expire_on_commit] = identifier[config] . identifier[get] ( literal[string] , keyword[True] ) identifier[log] . identifier[debug] ( literal[string] , identifier[autoflush] , identifier[autocommit] , identifier[expire_on_commit] ) identifier[session_maker] = identifier[sessionmaker] ( identifier[bind] = identifier[engine] , identifier[autoflush] = identifier[autoflush] , identifier[autocommit] = identifier[autocommit] , identifier[expire_on_commit] = identifier[expire_on_commit] , ) identifier[session] = identifier[scoped_session] ( identifier[session_maker] , identifier[scopefunc] = identifier[scopefunc] , ) keyword[return] identifier[engine] , identifier[session]
def build_engine_session(connection: str, echo: bool=False, autoflush: Optional[bool]=None, autocommit: Optional[bool]=None, expire_on_commit: Optional[bool]=None, scopefunc=None) -> Tuple: """Build an engine and a session. :param connection: An RFC-1738 database connection string :param echo: Turn on echoing SQL :param autoflush: Defaults to True if not specified in kwargs or configuration. :param autocommit: Defaults to False if not specified in kwargs or configuration. :param expire_on_commit: Defaults to False if not specified in kwargs or configuration. :param scopefunc: Scoped function to pass to :func:`sqlalchemy.orm.scoped_session` :rtype: tuple[Engine,Session] From the Flask-SQLAlchemy documentation: An extra key ``'scopefunc'`` can be set on the ``options`` dict to specify a custom scope function. If it's not provided, Flask's app context stack identity is used. This will ensure that sessions are created and removed with the request/response cycle, and should be fine in most cases. """ if connection is None: raise ValueError('can not build engine when connection is None') # depends on [control=['if'], data=[]] engine = create_engine(connection, echo=echo) if autoflush is None: autoflush = config.get('PYBEL_MANAGER_AUTOFLUSH', False) # depends on [control=['if'], data=['autoflush']] if autocommit is None: autocommit = config.get('PYBEL_MANAGER_AUTOCOMMIT', False) # depends on [control=['if'], data=['autocommit']] if expire_on_commit is None: expire_on_commit = config.get('PYBEL_MANAGER_AUTOEXPIRE', True) # depends on [control=['if'], data=['expire_on_commit']] log.debug('auto flush: %s, auto commit: %s, expire on commmit: %s', autoflush, autocommit, expire_on_commit) #: A SQLAlchemy session maker session_maker = sessionmaker(bind=engine, autoflush=autoflush, autocommit=autocommit, expire_on_commit=expire_on_commit) #: A SQLAlchemy session object session = scoped_session(session_maker, scopefunc=scopefunc) return (engine, session)
def _simulate_mixture(self, op: ops.Operation, data: _StateAndBuffer, indices: List[int]) -> None: """Simulate an op that is a mixtures of unitaries.""" probs, unitaries = zip(*protocols.mixture(op)) # We work around numpy barfing on choosing from a list of # numpy arrays (which is not `one-dimensional`) by selecting # the index of the unitary. index = np.random.choice(range(len(unitaries)), p=probs) shape = (2,) * (2 * len(indices)) unitary = unitaries[index].astype(self._dtype).reshape(shape) result = linalg.targeted_left_multiply(unitary, data.state, indices, out=data.buffer) data.buffer = data.state data.state = result
def function[_simulate_mixture, parameter[self, op, data, indices]]: constant[Simulate an op that is a mixtures of unitaries.] <ast.Tuple object at 0x7da20c7cb880> assign[=] call[name[zip], parameter[<ast.Starred object at 0x7da20c7c8dc0>]] variable[index] assign[=] call[name[np].random.choice, parameter[call[name[range], parameter[call[name[len], parameter[name[unitaries]]]]]]] variable[shape] assign[=] binary_operation[tuple[[<ast.Constant object at 0x7da20c7cbd90>]] * binary_operation[constant[2] * call[name[len], parameter[name[indices]]]]] variable[unitary] assign[=] call[call[call[name[unitaries]][name[index]].astype, parameter[name[self]._dtype]].reshape, parameter[name[shape]]] variable[result] assign[=] call[name[linalg].targeted_left_multiply, parameter[name[unitary], name[data].state, name[indices]]] name[data].buffer assign[=] name[data].state name[data].state assign[=] name[result]
keyword[def] identifier[_simulate_mixture] ( identifier[self] , identifier[op] : identifier[ops] . identifier[Operation] , identifier[data] : identifier[_StateAndBuffer] , identifier[indices] : identifier[List] [ identifier[int] ])-> keyword[None] : literal[string] identifier[probs] , identifier[unitaries] = identifier[zip] (* identifier[protocols] . identifier[mixture] ( identifier[op] )) identifier[index] = identifier[np] . identifier[random] . identifier[choice] ( identifier[range] ( identifier[len] ( identifier[unitaries] )), identifier[p] = identifier[probs] ) identifier[shape] =( literal[int] ,)*( literal[int] * identifier[len] ( identifier[indices] )) identifier[unitary] = identifier[unitaries] [ identifier[index] ]. identifier[astype] ( identifier[self] . identifier[_dtype] ). identifier[reshape] ( identifier[shape] ) identifier[result] = identifier[linalg] . identifier[targeted_left_multiply] ( identifier[unitary] , identifier[data] . identifier[state] , identifier[indices] , identifier[out] = identifier[data] . identifier[buffer] ) identifier[data] . identifier[buffer] = identifier[data] . identifier[state] identifier[data] . identifier[state] = identifier[result]
def _simulate_mixture(self, op: ops.Operation, data: _StateAndBuffer, indices: List[int]) -> None: """Simulate an op that is a mixtures of unitaries.""" (probs, unitaries) = zip(*protocols.mixture(op)) # We work around numpy barfing on choosing from a list of # numpy arrays (which is not `one-dimensional`) by selecting # the index of the unitary. index = np.random.choice(range(len(unitaries)), p=probs) shape = (2,) * (2 * len(indices)) unitary = unitaries[index].astype(self._dtype).reshape(shape) result = linalg.targeted_left_multiply(unitary, data.state, indices, out=data.buffer) data.buffer = data.state data.state = result
def attributes(self): """tuple: Attributes defined in the schema and the data package. """ # Deprecate warnings.warn( 'Property "package.attributes" is deprecated.', UserWarning) # Get attributes attributes = set(self.to_dict().keys()) try: attributes.update(self.profile.properties.keys()) except AttributeError: pass return tuple(attributes)
def function[attributes, parameter[self]]: constant[tuple: Attributes defined in the schema and the data package. ] call[name[warnings].warn, parameter[constant[Property "package.attributes" is deprecated.], name[UserWarning]]] variable[attributes] assign[=] call[name[set], parameter[call[call[name[self].to_dict, parameter[]].keys, parameter[]]]] <ast.Try object at 0x7da1b012e920> return[call[name[tuple], parameter[name[attributes]]]]
keyword[def] identifier[attributes] ( identifier[self] ): literal[string] identifier[warnings] . identifier[warn] ( literal[string] , identifier[UserWarning] ) identifier[attributes] = identifier[set] ( identifier[self] . identifier[to_dict] (). identifier[keys] ()) keyword[try] : identifier[attributes] . identifier[update] ( identifier[self] . identifier[profile] . identifier[properties] . identifier[keys] ()) keyword[except] identifier[AttributeError] : keyword[pass] keyword[return] identifier[tuple] ( identifier[attributes] )
def attributes(self): """tuple: Attributes defined in the schema and the data package. """ # Deprecate warnings.warn('Property "package.attributes" is deprecated.', UserWarning) # Get attributes attributes = set(self.to_dict().keys()) try: attributes.update(self.profile.properties.keys()) # depends on [control=['try'], data=[]] except AttributeError: pass # depends on [control=['except'], data=[]] return tuple(attributes)
def _build_tree_string(root, curr_index, index=False, delimiter='-'): """Recursively walk down the binary tree and build a pretty-print string. In each recursive call, a "box" of characters visually representing the current (sub)tree is constructed line by line. Each line is padded with whitespaces to ensure all lines in the box have the same length. Then the box, its width, and start-end positions of its root node value repr string (required for drawing branches) are sent up to the parent call. The parent call then combines its left and right sub-boxes to build a larger box etc. :param root: Root node of the binary tree. :type root: binarytree.Node | None :param curr_index: Level-order_ index of the current node (root node is 0). :type curr_index: int :param index: If set to True, include the level-order_ node indexes using the following format: ``{index}{delimiter}{value}`` (default: False). :type index: bool :param delimiter: Delimiter character between the node index and the node value (default: '-'). :type delimiter: :return: Box of characters visually representing the current subtree, width of the box, and start-end positions of the repr string of the new root node value. :rtype: ([str], int, int, int) .. _Level-order: https://en.wikipedia.org/wiki/Tree_traversal#Breadth-first_search """ if root is None: return [], 0, 0, 0 line1 = [] line2 = [] if index: node_repr = '{}{}{}'.format(curr_index, delimiter, root.value) else: node_repr = str(root.value) new_root_width = gap_size = len(node_repr) # Get the left and right sub-boxes, their widths, and root repr positions l_box, l_box_width, l_root_start, l_root_end = \ _build_tree_string(root.left, 2 * curr_index + 1, index, delimiter) r_box, r_box_width, r_root_start, r_root_end = \ _build_tree_string(root.right, 2 * curr_index + 2, index, delimiter) # Draw the branch connecting the current root node to the left sub-box # Pad the line with whitespaces where necessary if l_box_width > 0: l_root = (l_root_start + l_root_end) // 2 + 1 line1.append(' ' * (l_root + 1)) line1.append('_' * (l_box_width - l_root)) line2.append(' ' * l_root + '/') line2.append(' ' * (l_box_width - l_root)) new_root_start = l_box_width + 1 gap_size += 1 else: new_root_start = 0 # Draw the representation of the current root node line1.append(node_repr) line2.append(' ' * new_root_width) # Draw the branch connecting the current root node to the right sub-box # Pad the line with whitespaces where necessary if r_box_width > 0: r_root = (r_root_start + r_root_end) // 2 line1.append('_' * r_root) line1.append(' ' * (r_box_width - r_root + 1)) line2.append(' ' * r_root + '\\') line2.append(' ' * (r_box_width - r_root)) gap_size += 1 new_root_end = new_root_start + new_root_width - 1 # Combine the left and right sub-boxes with the branches drawn above gap = ' ' * gap_size new_box = [''.join(line1), ''.join(line2)] for i in range(max(len(l_box), len(r_box))): l_line = l_box[i] if i < len(l_box) else ' ' * l_box_width r_line = r_box[i] if i < len(r_box) else ' ' * r_box_width new_box.append(l_line + gap + r_line) # Return the new box, its width and its root repr positions return new_box, len(new_box[0]), new_root_start, new_root_end
def function[_build_tree_string, parameter[root, curr_index, index, delimiter]]: constant[Recursively walk down the binary tree and build a pretty-print string. In each recursive call, a "box" of characters visually representing the current (sub)tree is constructed line by line. Each line is padded with whitespaces to ensure all lines in the box have the same length. Then the box, its width, and start-end positions of its root node value repr string (required for drawing branches) are sent up to the parent call. The parent call then combines its left and right sub-boxes to build a larger box etc. :param root: Root node of the binary tree. :type root: binarytree.Node | None :param curr_index: Level-order_ index of the current node (root node is 0). :type curr_index: int :param index: If set to True, include the level-order_ node indexes using the following format: ``{index}{delimiter}{value}`` (default: False). :type index: bool :param delimiter: Delimiter character between the node index and the node value (default: '-'). :type delimiter: :return: Box of characters visually representing the current subtree, width of the box, and start-end positions of the repr string of the new root node value. :rtype: ([str], int, int, int) .. _Level-order: https://en.wikipedia.org/wiki/Tree_traversal#Breadth-first_search ] if compare[name[root] is constant[None]] begin[:] return[tuple[[<ast.List object at 0x7da20c6c64a0>, <ast.Constant object at 0x7da20c6c6da0>, <ast.Constant object at 0x7da20c6c6f50>, <ast.Constant object at 0x7da20c6c7610>]]] variable[line1] assign[=] list[[]] variable[line2] assign[=] list[[]] if name[index] begin[:] variable[node_repr] assign[=] call[constant[{}{}{}].format, parameter[name[curr_index], name[delimiter], name[root].value]] variable[new_root_width] assign[=] call[name[len], parameter[name[node_repr]]] <ast.Tuple object at 0x7da20c6c4e80> assign[=] call[name[_build_tree_string], parameter[name[root].left, binary_operation[binary_operation[constant[2] * name[curr_index]] + constant[1]], name[index], name[delimiter]]] <ast.Tuple object at 0x7da20c76ffd0> assign[=] call[name[_build_tree_string], parameter[name[root].right, binary_operation[binary_operation[constant[2] * name[curr_index]] + constant[2]], name[index], name[delimiter]]] if compare[name[l_box_width] greater[>] constant[0]] begin[:] variable[l_root] assign[=] binary_operation[binary_operation[binary_operation[name[l_root_start] + name[l_root_end]] <ast.FloorDiv object at 0x7da2590d6bc0> constant[2]] + constant[1]] call[name[line1].append, parameter[binary_operation[constant[ ] * binary_operation[name[l_root] + constant[1]]]]] call[name[line1].append, parameter[binary_operation[constant[_] * binary_operation[name[l_box_width] - name[l_root]]]]] call[name[line2].append, parameter[binary_operation[binary_operation[constant[ ] * name[l_root]] + constant[/]]]] call[name[line2].append, parameter[binary_operation[constant[ ] * binary_operation[name[l_box_width] - name[l_root]]]]] variable[new_root_start] assign[=] binary_operation[name[l_box_width] + constant[1]] <ast.AugAssign object at 0x7da20c6e65c0> call[name[line1].append, parameter[name[node_repr]]] call[name[line2].append, parameter[binary_operation[constant[ ] * name[new_root_width]]]] if compare[name[r_box_width] greater[>] constant[0]] begin[:] variable[r_root] assign[=] binary_operation[binary_operation[name[r_root_start] + name[r_root_end]] <ast.FloorDiv object at 0x7da2590d6bc0> constant[2]] call[name[line1].append, parameter[binary_operation[constant[_] * name[r_root]]]] call[name[line1].append, parameter[binary_operation[constant[ ] * binary_operation[binary_operation[name[r_box_width] - name[r_root]] + constant[1]]]]] call[name[line2].append, parameter[binary_operation[binary_operation[constant[ ] * name[r_root]] + constant[\]]]] call[name[line2].append, parameter[binary_operation[constant[ ] * binary_operation[name[r_box_width] - name[r_root]]]]] <ast.AugAssign object at 0x7da20c6e7730> variable[new_root_end] assign[=] binary_operation[binary_operation[name[new_root_start] + name[new_root_width]] - constant[1]] variable[gap] assign[=] binary_operation[constant[ ] * name[gap_size]] variable[new_box] assign[=] list[[<ast.Call object at 0x7da20c6c48e0>, <ast.Call object at 0x7da20c6c55d0>]] for taget[name[i]] in starred[call[name[range], parameter[call[name[max], parameter[call[name[len], parameter[name[l_box]]], call[name[len], parameter[name[r_box]]]]]]]] begin[:] variable[l_line] assign[=] <ast.IfExp object at 0x7da20c6c6500> variable[r_line] assign[=] <ast.IfExp object at 0x7da20c6c76d0> call[name[new_box].append, parameter[binary_operation[binary_operation[name[l_line] + name[gap]] + name[r_line]]]] return[tuple[[<ast.Name object at 0x7da20c6c7f10>, <ast.Call object at 0x7da20c6c7940>, <ast.Name object at 0x7da20c6c5c00>, <ast.Name object at 0x7da20c6c6050>]]]
keyword[def] identifier[_build_tree_string] ( identifier[root] , identifier[curr_index] , identifier[index] = keyword[False] , identifier[delimiter] = literal[string] ): literal[string] keyword[if] identifier[root] keyword[is] keyword[None] : keyword[return] [], literal[int] , literal[int] , literal[int] identifier[line1] =[] identifier[line2] =[] keyword[if] identifier[index] : identifier[node_repr] = literal[string] . identifier[format] ( identifier[curr_index] , identifier[delimiter] , identifier[root] . identifier[value] ) keyword[else] : identifier[node_repr] = identifier[str] ( identifier[root] . identifier[value] ) identifier[new_root_width] = identifier[gap_size] = identifier[len] ( identifier[node_repr] ) identifier[l_box] , identifier[l_box_width] , identifier[l_root_start] , identifier[l_root_end] = identifier[_build_tree_string] ( identifier[root] . identifier[left] , literal[int] * identifier[curr_index] + literal[int] , identifier[index] , identifier[delimiter] ) identifier[r_box] , identifier[r_box_width] , identifier[r_root_start] , identifier[r_root_end] = identifier[_build_tree_string] ( identifier[root] . identifier[right] , literal[int] * identifier[curr_index] + literal[int] , identifier[index] , identifier[delimiter] ) keyword[if] identifier[l_box_width] > literal[int] : identifier[l_root] =( identifier[l_root_start] + identifier[l_root_end] )// literal[int] + literal[int] identifier[line1] . identifier[append] ( literal[string] *( identifier[l_root] + literal[int] )) identifier[line1] . identifier[append] ( literal[string] *( identifier[l_box_width] - identifier[l_root] )) identifier[line2] . identifier[append] ( literal[string] * identifier[l_root] + literal[string] ) identifier[line2] . identifier[append] ( literal[string] *( identifier[l_box_width] - identifier[l_root] )) identifier[new_root_start] = identifier[l_box_width] + literal[int] identifier[gap_size] += literal[int] keyword[else] : identifier[new_root_start] = literal[int] identifier[line1] . identifier[append] ( identifier[node_repr] ) identifier[line2] . identifier[append] ( literal[string] * identifier[new_root_width] ) keyword[if] identifier[r_box_width] > literal[int] : identifier[r_root] =( identifier[r_root_start] + identifier[r_root_end] )// literal[int] identifier[line1] . identifier[append] ( literal[string] * identifier[r_root] ) identifier[line1] . identifier[append] ( literal[string] *( identifier[r_box_width] - identifier[r_root] + literal[int] )) identifier[line2] . identifier[append] ( literal[string] * identifier[r_root] + literal[string] ) identifier[line2] . identifier[append] ( literal[string] *( identifier[r_box_width] - identifier[r_root] )) identifier[gap_size] += literal[int] identifier[new_root_end] = identifier[new_root_start] + identifier[new_root_width] - literal[int] identifier[gap] = literal[string] * identifier[gap_size] identifier[new_box] =[ literal[string] . identifier[join] ( identifier[line1] ), literal[string] . identifier[join] ( identifier[line2] )] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[max] ( identifier[len] ( identifier[l_box] ), identifier[len] ( identifier[r_box] ))): identifier[l_line] = identifier[l_box] [ identifier[i] ] keyword[if] identifier[i] < identifier[len] ( identifier[l_box] ) keyword[else] literal[string] * identifier[l_box_width] identifier[r_line] = identifier[r_box] [ identifier[i] ] keyword[if] identifier[i] < identifier[len] ( identifier[r_box] ) keyword[else] literal[string] * identifier[r_box_width] identifier[new_box] . identifier[append] ( identifier[l_line] + identifier[gap] + identifier[r_line] ) keyword[return] identifier[new_box] , identifier[len] ( identifier[new_box] [ literal[int] ]), identifier[new_root_start] , identifier[new_root_end]
def _build_tree_string(root, curr_index, index=False, delimiter='-'): """Recursively walk down the binary tree and build a pretty-print string. In each recursive call, a "box" of characters visually representing the current (sub)tree is constructed line by line. Each line is padded with whitespaces to ensure all lines in the box have the same length. Then the box, its width, and start-end positions of its root node value repr string (required for drawing branches) are sent up to the parent call. The parent call then combines its left and right sub-boxes to build a larger box etc. :param root: Root node of the binary tree. :type root: binarytree.Node | None :param curr_index: Level-order_ index of the current node (root node is 0). :type curr_index: int :param index: If set to True, include the level-order_ node indexes using the following format: ``{index}{delimiter}{value}`` (default: False). :type index: bool :param delimiter: Delimiter character between the node index and the node value (default: '-'). :type delimiter: :return: Box of characters visually representing the current subtree, width of the box, and start-end positions of the repr string of the new root node value. :rtype: ([str], int, int, int) .. _Level-order: https://en.wikipedia.org/wiki/Tree_traversal#Breadth-first_search """ if root is None: return ([], 0, 0, 0) # depends on [control=['if'], data=[]] line1 = [] line2 = [] if index: node_repr = '{}{}{}'.format(curr_index, delimiter, root.value) # depends on [control=['if'], data=[]] else: node_repr = str(root.value) new_root_width = gap_size = len(node_repr) # Get the left and right sub-boxes, their widths, and root repr positions (l_box, l_box_width, l_root_start, l_root_end) = _build_tree_string(root.left, 2 * curr_index + 1, index, delimiter) (r_box, r_box_width, r_root_start, r_root_end) = _build_tree_string(root.right, 2 * curr_index + 2, index, delimiter) # Draw the branch connecting the current root node to the left sub-box # Pad the line with whitespaces where necessary if l_box_width > 0: l_root = (l_root_start + l_root_end) // 2 + 1 line1.append(' ' * (l_root + 1)) line1.append('_' * (l_box_width - l_root)) line2.append(' ' * l_root + '/') line2.append(' ' * (l_box_width - l_root)) new_root_start = l_box_width + 1 gap_size += 1 # depends on [control=['if'], data=['l_box_width']] else: new_root_start = 0 # Draw the representation of the current root node line1.append(node_repr) line2.append(' ' * new_root_width) # Draw the branch connecting the current root node to the right sub-box # Pad the line with whitespaces where necessary if r_box_width > 0: r_root = (r_root_start + r_root_end) // 2 line1.append('_' * r_root) line1.append(' ' * (r_box_width - r_root + 1)) line2.append(' ' * r_root + '\\') line2.append(' ' * (r_box_width - r_root)) gap_size += 1 # depends on [control=['if'], data=['r_box_width']] new_root_end = new_root_start + new_root_width - 1 # Combine the left and right sub-boxes with the branches drawn above gap = ' ' * gap_size new_box = [''.join(line1), ''.join(line2)] for i in range(max(len(l_box), len(r_box))): l_line = l_box[i] if i < len(l_box) else ' ' * l_box_width r_line = r_box[i] if i < len(r_box) else ' ' * r_box_width new_box.append(l_line + gap + r_line) # depends on [control=['for'], data=['i']] # Return the new box, its width and its root repr positions return (new_box, len(new_box[0]), new_root_start, new_root_end)
def get_metric_type(measure, aggregation): """Get the corresponding metric type for the given stats type. :type measure: (:class: '~opencensus.stats.measure.BaseMeasure') :param measure: the measure for which to find a metric type :type aggregation: (:class: '~opencensus.stats.aggregation.BaseAggregation') :param aggregation: the aggregation for which to find a metric type """ if aggregation.aggregation_type == aggregation_module.Type.NONE: raise ValueError("aggregation type must not be NONE") assert isinstance(aggregation, AGGREGATION_TYPE_MAP[aggregation.aggregation_type]) if aggregation.aggregation_type == aggregation_module.Type.SUM: if isinstance(measure, measure_module.MeasureInt): return metric_descriptor.MetricDescriptorType.CUMULATIVE_INT64 elif isinstance(measure, measure_module.MeasureFloat): return metric_descriptor.MetricDescriptorType.CUMULATIVE_DOUBLE else: raise ValueError elif aggregation.aggregation_type == aggregation_module.Type.COUNT: return metric_descriptor.MetricDescriptorType.CUMULATIVE_INT64 elif aggregation.aggregation_type == aggregation_module.Type.DISTRIBUTION: return metric_descriptor.MetricDescriptorType.CUMULATIVE_DISTRIBUTION elif aggregation.aggregation_type == aggregation_module.Type.LASTVALUE: if isinstance(measure, measure_module.MeasureInt): return metric_descriptor.MetricDescriptorType.GAUGE_INT64 elif isinstance(measure, measure_module.MeasureFloat): return metric_descriptor.MetricDescriptorType.GAUGE_DOUBLE else: raise ValueError else: raise AssertionError
def function[get_metric_type, parameter[measure, aggregation]]: constant[Get the corresponding metric type for the given stats type. :type measure: (:class: '~opencensus.stats.measure.BaseMeasure') :param measure: the measure for which to find a metric type :type aggregation: (:class: '~opencensus.stats.aggregation.BaseAggregation') :param aggregation: the aggregation for which to find a metric type ] if compare[name[aggregation].aggregation_type equal[==] name[aggregation_module].Type.NONE] begin[:] <ast.Raise object at 0x7da20c9900d0> assert[call[name[isinstance], parameter[name[aggregation], call[name[AGGREGATION_TYPE_MAP]][name[aggregation].aggregation_type]]]] if compare[name[aggregation].aggregation_type equal[==] name[aggregation_module].Type.SUM] begin[:] if call[name[isinstance], parameter[name[measure], name[measure_module].MeasureInt]] begin[:] return[name[metric_descriptor].MetricDescriptorType.CUMULATIVE_INT64]
keyword[def] identifier[get_metric_type] ( identifier[measure] , identifier[aggregation] ): literal[string] keyword[if] identifier[aggregation] . identifier[aggregation_type] == identifier[aggregation_module] . identifier[Type] . identifier[NONE] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[assert] identifier[isinstance] ( identifier[aggregation] , identifier[AGGREGATION_TYPE_MAP] [ identifier[aggregation] . identifier[aggregation_type] ]) keyword[if] identifier[aggregation] . identifier[aggregation_type] == identifier[aggregation_module] . identifier[Type] . identifier[SUM] : keyword[if] identifier[isinstance] ( identifier[measure] , identifier[measure_module] . identifier[MeasureInt] ): keyword[return] identifier[metric_descriptor] . identifier[MetricDescriptorType] . identifier[CUMULATIVE_INT64] keyword[elif] identifier[isinstance] ( identifier[measure] , identifier[measure_module] . identifier[MeasureFloat] ): keyword[return] identifier[metric_descriptor] . identifier[MetricDescriptorType] . identifier[CUMULATIVE_DOUBLE] keyword[else] : keyword[raise] identifier[ValueError] keyword[elif] identifier[aggregation] . identifier[aggregation_type] == identifier[aggregation_module] . identifier[Type] . identifier[COUNT] : keyword[return] identifier[metric_descriptor] . identifier[MetricDescriptorType] . identifier[CUMULATIVE_INT64] keyword[elif] identifier[aggregation] . identifier[aggregation_type] == identifier[aggregation_module] . identifier[Type] . identifier[DISTRIBUTION] : keyword[return] identifier[metric_descriptor] . identifier[MetricDescriptorType] . identifier[CUMULATIVE_DISTRIBUTION] keyword[elif] identifier[aggregation] . identifier[aggregation_type] == identifier[aggregation_module] . identifier[Type] . identifier[LASTVALUE] : keyword[if] identifier[isinstance] ( identifier[measure] , identifier[measure_module] . identifier[MeasureInt] ): keyword[return] identifier[metric_descriptor] . identifier[MetricDescriptorType] . identifier[GAUGE_INT64] keyword[elif] identifier[isinstance] ( identifier[measure] , identifier[measure_module] . identifier[MeasureFloat] ): keyword[return] identifier[metric_descriptor] . identifier[MetricDescriptorType] . identifier[GAUGE_DOUBLE] keyword[else] : keyword[raise] identifier[ValueError] keyword[else] : keyword[raise] identifier[AssertionError]
def get_metric_type(measure, aggregation): """Get the corresponding metric type for the given stats type. :type measure: (:class: '~opencensus.stats.measure.BaseMeasure') :param measure: the measure for which to find a metric type :type aggregation: (:class: '~opencensus.stats.aggregation.BaseAggregation') :param aggregation: the aggregation for which to find a metric type """ if aggregation.aggregation_type == aggregation_module.Type.NONE: raise ValueError('aggregation type must not be NONE') # depends on [control=['if'], data=[]] assert isinstance(aggregation, AGGREGATION_TYPE_MAP[aggregation.aggregation_type]) if aggregation.aggregation_type == aggregation_module.Type.SUM: if isinstance(measure, measure_module.MeasureInt): return metric_descriptor.MetricDescriptorType.CUMULATIVE_INT64 # depends on [control=['if'], data=[]] elif isinstance(measure, measure_module.MeasureFloat): return metric_descriptor.MetricDescriptorType.CUMULATIVE_DOUBLE # depends on [control=['if'], data=[]] else: raise ValueError # depends on [control=['if'], data=[]] elif aggregation.aggregation_type == aggregation_module.Type.COUNT: return metric_descriptor.MetricDescriptorType.CUMULATIVE_INT64 # depends on [control=['if'], data=[]] elif aggregation.aggregation_type == aggregation_module.Type.DISTRIBUTION: return metric_descriptor.MetricDescriptorType.CUMULATIVE_DISTRIBUTION # depends on [control=['if'], data=[]] elif aggregation.aggregation_type == aggregation_module.Type.LASTVALUE: if isinstance(measure, measure_module.MeasureInt): return metric_descriptor.MetricDescriptorType.GAUGE_INT64 # depends on [control=['if'], data=[]] elif isinstance(measure, measure_module.MeasureFloat): return metric_descriptor.MetricDescriptorType.GAUGE_DOUBLE # depends on [control=['if'], data=[]] else: raise ValueError # depends on [control=['if'], data=[]] else: raise AssertionError
def getVariables(self): """ Get all the variables declared. """ variables = lock_and_call( lambda: self._impl.getVariables(), self._lock ) return EntityMap(variables, Variable)
def function[getVariables, parameter[self]]: constant[ Get all the variables declared. ] variable[variables] assign[=] call[name[lock_and_call], parameter[<ast.Lambda object at 0x7da18bcc8fd0>, name[self]._lock]] return[call[name[EntityMap], parameter[name[variables], name[Variable]]]]
keyword[def] identifier[getVariables] ( identifier[self] ): literal[string] identifier[variables] = identifier[lock_and_call] ( keyword[lambda] : identifier[self] . identifier[_impl] . identifier[getVariables] (), identifier[self] . identifier[_lock] ) keyword[return] identifier[EntityMap] ( identifier[variables] , identifier[Variable] )
def getVariables(self): """ Get all the variables declared. """ variables = lock_and_call(lambda : self._impl.getVariables(), self._lock) return EntityMap(variables, Variable)
def convex_hull_collide(nodes1, nodes2): """Determine if the convex hulls of two curves collide. .. note:: This is a helper for :func:`from_linearized`. Args: nodes1 (numpy.ndarray): Control points of a first curve. nodes2 (numpy.ndarray): Control points of a second curve. Returns: bool: Indicating if the convex hulls collide. """ polygon1 = _helpers.simple_convex_hull(nodes1) _, polygon_size1 = polygon1.shape polygon2 = _helpers.simple_convex_hull(nodes2) _, polygon_size2 = polygon2.shape if polygon_size1 == 2 and polygon_size2 == 2: return line_line_collide(polygon1, polygon2) else: return _helpers.polygon_collide(polygon1, polygon2)
def function[convex_hull_collide, parameter[nodes1, nodes2]]: constant[Determine if the convex hulls of two curves collide. .. note:: This is a helper for :func:`from_linearized`. Args: nodes1 (numpy.ndarray): Control points of a first curve. nodes2 (numpy.ndarray): Control points of a second curve. Returns: bool: Indicating if the convex hulls collide. ] variable[polygon1] assign[=] call[name[_helpers].simple_convex_hull, parameter[name[nodes1]]] <ast.Tuple object at 0x7da207f9bb80> assign[=] name[polygon1].shape variable[polygon2] assign[=] call[name[_helpers].simple_convex_hull, parameter[name[nodes2]]] <ast.Tuple object at 0x7da207f99d20> assign[=] name[polygon2].shape if <ast.BoolOp object at 0x7da207f98370> begin[:] return[call[name[line_line_collide], parameter[name[polygon1], name[polygon2]]]]
keyword[def] identifier[convex_hull_collide] ( identifier[nodes1] , identifier[nodes2] ): literal[string] identifier[polygon1] = identifier[_helpers] . identifier[simple_convex_hull] ( identifier[nodes1] ) identifier[_] , identifier[polygon_size1] = identifier[polygon1] . identifier[shape] identifier[polygon2] = identifier[_helpers] . identifier[simple_convex_hull] ( identifier[nodes2] ) identifier[_] , identifier[polygon_size2] = identifier[polygon2] . identifier[shape] keyword[if] identifier[polygon_size1] == literal[int] keyword[and] identifier[polygon_size2] == literal[int] : keyword[return] identifier[line_line_collide] ( identifier[polygon1] , identifier[polygon2] ) keyword[else] : keyword[return] identifier[_helpers] . identifier[polygon_collide] ( identifier[polygon1] , identifier[polygon2] )
def convex_hull_collide(nodes1, nodes2): """Determine if the convex hulls of two curves collide. .. note:: This is a helper for :func:`from_linearized`. Args: nodes1 (numpy.ndarray): Control points of a first curve. nodes2 (numpy.ndarray): Control points of a second curve. Returns: bool: Indicating if the convex hulls collide. """ polygon1 = _helpers.simple_convex_hull(nodes1) (_, polygon_size1) = polygon1.shape polygon2 = _helpers.simple_convex_hull(nodes2) (_, polygon_size2) = polygon2.shape if polygon_size1 == 2 and polygon_size2 == 2: return line_line_collide(polygon1, polygon2) # depends on [control=['if'], data=[]] else: return _helpers.polygon_collide(polygon1, polygon2)
def _multi_rpush_pipeline(self, pipe, queue, values, bulk_size=0): ''' Pushes multiple elements to a list in a given pipeline If bulk_size is set it will execute the pipeline every bulk_size elements ''' cont = 0 for value in values: pipe.rpush(queue, value) if bulk_size != 0 and cont % bulk_size == 0: pipe.execute()
def function[_multi_rpush_pipeline, parameter[self, pipe, queue, values, bulk_size]]: constant[ Pushes multiple elements to a list in a given pipeline If bulk_size is set it will execute the pipeline every bulk_size elements ] variable[cont] assign[=] constant[0] for taget[name[value]] in starred[name[values]] begin[:] call[name[pipe].rpush, parameter[name[queue], name[value]]] if <ast.BoolOp object at 0x7da207f003a0> begin[:] call[name[pipe].execute, parameter[]]
keyword[def] identifier[_multi_rpush_pipeline] ( identifier[self] , identifier[pipe] , identifier[queue] , identifier[values] , identifier[bulk_size] = literal[int] ): literal[string] identifier[cont] = literal[int] keyword[for] identifier[value] keyword[in] identifier[values] : identifier[pipe] . identifier[rpush] ( identifier[queue] , identifier[value] ) keyword[if] identifier[bulk_size] != literal[int] keyword[and] identifier[cont] % identifier[bulk_size] == literal[int] : identifier[pipe] . identifier[execute] ()
def _multi_rpush_pipeline(self, pipe, queue, values, bulk_size=0): """ Pushes multiple elements to a list in a given pipeline If bulk_size is set it will execute the pipeline every bulk_size elements """ cont = 0 for value in values: pipe.rpush(queue, value) if bulk_size != 0 and cont % bulk_size == 0: pipe.execute() # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['value']]
def copy(self, klass=None): """Create a new instance of the current chain. """ chain = ( klass if klass else self.__class__ )(*self._args, **self._kwargs) chain._tokens = self._tokens.copy() return chain
def function[copy, parameter[self, klass]]: constant[Create a new instance of the current chain. ] variable[chain] assign[=] call[<ast.IfExp object at 0x7da20c6e6860>, parameter[<ast.Starred object at 0x7da20c6e72b0>]] name[chain]._tokens assign[=] call[name[self]._tokens.copy, parameter[]] return[name[chain]]
keyword[def] identifier[copy] ( identifier[self] , identifier[klass] = keyword[None] ): literal[string] identifier[chain] =( identifier[klass] keyword[if] identifier[klass] keyword[else] identifier[self] . identifier[__class__] )(* identifier[self] . identifier[_args] ,** identifier[self] . identifier[_kwargs] ) identifier[chain] . identifier[_tokens] = identifier[self] . identifier[_tokens] . identifier[copy] () keyword[return] identifier[chain]
def copy(self, klass=None): """Create a new instance of the current chain. """ chain = (klass if klass else self.__class__)(*self._args, **self._kwargs) chain._tokens = self._tokens.copy() return chain
def df(self): """ Note: this accessor is read-only. It should be copied, if accessed in an application, more than once. Returns ------- Dataframe of the symbol's final data. """ data = self._final_data() if len(data) == 0: adf = pd.DataFrame(columns = [self.index.name, self.name]) return adf.set_index(self.index.name) adf = pd.DataFrame(data) if len(adf.columns) != 2: msg = "Symbol ({}) needs to be cached prior to building a Dataframe" msg = msg.format(self.name) raise Exception(msg) adf.columns = [self.index.name, self.name] return self._finish_df(adf, 'FINAL')
def function[df, parameter[self]]: constant[ Note: this accessor is read-only. It should be copied, if accessed in an application, more than once. Returns ------- Dataframe of the symbol's final data. ] variable[data] assign[=] call[name[self]._final_data, parameter[]] if compare[call[name[len], parameter[name[data]]] equal[==] constant[0]] begin[:] variable[adf] assign[=] call[name[pd].DataFrame, parameter[]] return[call[name[adf].set_index, parameter[name[self].index.name]]] variable[adf] assign[=] call[name[pd].DataFrame, parameter[name[data]]] if compare[call[name[len], parameter[name[adf].columns]] not_equal[!=] constant[2]] begin[:] variable[msg] assign[=] constant[Symbol ({}) needs to be cached prior to building a Dataframe] variable[msg] assign[=] call[name[msg].format, parameter[name[self].name]] <ast.Raise object at 0x7da20c992b00> name[adf].columns assign[=] list[[<ast.Attribute object at 0x7da20c9922f0>, <ast.Attribute object at 0x7da20c9911b0>]] return[call[name[self]._finish_df, parameter[name[adf], constant[FINAL]]]]
keyword[def] identifier[df] ( identifier[self] ): literal[string] identifier[data] = identifier[self] . identifier[_final_data] () keyword[if] identifier[len] ( identifier[data] )== literal[int] : identifier[adf] = identifier[pd] . identifier[DataFrame] ( identifier[columns] =[ identifier[self] . identifier[index] . identifier[name] , identifier[self] . identifier[name] ]) keyword[return] identifier[adf] . identifier[set_index] ( identifier[self] . identifier[index] . identifier[name] ) identifier[adf] = identifier[pd] . identifier[DataFrame] ( identifier[data] ) keyword[if] identifier[len] ( identifier[adf] . identifier[columns] )!= literal[int] : identifier[msg] = literal[string] identifier[msg] = identifier[msg] . identifier[format] ( identifier[self] . identifier[name] ) keyword[raise] identifier[Exception] ( identifier[msg] ) identifier[adf] . identifier[columns] =[ identifier[self] . identifier[index] . identifier[name] , identifier[self] . identifier[name] ] keyword[return] identifier[self] . identifier[_finish_df] ( identifier[adf] , literal[string] )
def df(self): """ Note: this accessor is read-only. It should be copied, if accessed in an application, more than once. Returns ------- Dataframe of the symbol's final data. """ data = self._final_data() if len(data) == 0: adf = pd.DataFrame(columns=[self.index.name, self.name]) return adf.set_index(self.index.name) # depends on [control=['if'], data=[]] adf = pd.DataFrame(data) if len(adf.columns) != 2: msg = 'Symbol ({}) needs to be cached prior to building a Dataframe' msg = msg.format(self.name) raise Exception(msg) # depends on [control=['if'], data=[]] adf.columns = [self.index.name, self.name] return self._finish_df(adf, 'FINAL')
def _min(self): """Getter for the minimum series value""" return ( self.range[0] if (self.range and self.range[0] is not None) else (min(self.yvals) if self.yvals else None) )
def function[_min, parameter[self]]: constant[Getter for the minimum series value] return[<ast.IfExp object at 0x7da20c7cb580>]
keyword[def] identifier[_min] ( identifier[self] ): literal[string] keyword[return] ( identifier[self] . identifier[range] [ literal[int] ] keyword[if] ( identifier[self] . identifier[range] keyword[and] identifier[self] . identifier[range] [ literal[int] ] keyword[is] keyword[not] keyword[None] ) keyword[else] ( identifier[min] ( identifier[self] . identifier[yvals] ) keyword[if] identifier[self] . identifier[yvals] keyword[else] keyword[None] ) )
def _min(self): """Getter for the minimum series value""" return self.range[0] if self.range and self.range[0] is not None else min(self.yvals) if self.yvals else None
def _read_protocol_line(self): """Reads the next line of instrumentation output relevant to snippets. This method will skip over lines that don't start with 'SNIPPET' or 'INSTRUMENTATION_RESULT'. Returns: (str) Next line of snippet-related instrumentation output, stripped. Raises: jsonrpc_client_base.AppStartError: If EOF is reached without any protocol lines being read. """ while True: line = self._proc.stdout.readline().decode('utf-8') if not line: raise jsonrpc_client_base.AppStartError( self._ad, 'Unexpected EOF waiting for app to start') # readline() uses an empty string to mark EOF, and a single newline # to mark regular empty lines in the output. Don't move the strip() # call above the truthiness check, or this method will start # considering any blank output line to be EOF. line = line.strip() if (line.startswith('INSTRUMENTATION_RESULT:') or line.startswith('SNIPPET ')): self.log.debug( 'Accepted line from instrumentation output: "%s"', line) return line self.log.debug('Discarded line from instrumentation output: "%s"', line)
def function[_read_protocol_line, parameter[self]]: constant[Reads the next line of instrumentation output relevant to snippets. This method will skip over lines that don't start with 'SNIPPET' or 'INSTRUMENTATION_RESULT'. Returns: (str) Next line of snippet-related instrumentation output, stripped. Raises: jsonrpc_client_base.AppStartError: If EOF is reached without any protocol lines being read. ] while constant[True] begin[:] variable[line] assign[=] call[call[name[self]._proc.stdout.readline, parameter[]].decode, parameter[constant[utf-8]]] if <ast.UnaryOp object at 0x7da1b0630e80> begin[:] <ast.Raise object at 0x7da1b0630e50> variable[line] assign[=] call[name[line].strip, parameter[]] if <ast.BoolOp object at 0x7da1b0630b20> begin[:] call[name[self].log.debug, parameter[constant[Accepted line from instrumentation output: "%s"], name[line]]] return[name[line]] call[name[self].log.debug, parameter[constant[Discarded line from instrumentation output: "%s"], name[line]]]
keyword[def] identifier[_read_protocol_line] ( identifier[self] ): literal[string] keyword[while] keyword[True] : identifier[line] = identifier[self] . identifier[_proc] . identifier[stdout] . identifier[readline] (). identifier[decode] ( literal[string] ) keyword[if] keyword[not] identifier[line] : keyword[raise] identifier[jsonrpc_client_base] . identifier[AppStartError] ( identifier[self] . identifier[_ad] , literal[string] ) identifier[line] = identifier[line] . identifier[strip] () keyword[if] ( identifier[line] . identifier[startswith] ( literal[string] ) keyword[or] identifier[line] . identifier[startswith] ( literal[string] )): identifier[self] . identifier[log] . identifier[debug] ( literal[string] , identifier[line] ) keyword[return] identifier[line] identifier[self] . identifier[log] . identifier[debug] ( literal[string] , identifier[line] )
def _read_protocol_line(self): """Reads the next line of instrumentation output relevant to snippets. This method will skip over lines that don't start with 'SNIPPET' or 'INSTRUMENTATION_RESULT'. Returns: (str) Next line of snippet-related instrumentation output, stripped. Raises: jsonrpc_client_base.AppStartError: If EOF is reached without any protocol lines being read. """ while True: line = self._proc.stdout.readline().decode('utf-8') if not line: raise jsonrpc_client_base.AppStartError(self._ad, 'Unexpected EOF waiting for app to start') # depends on [control=['if'], data=[]] # readline() uses an empty string to mark EOF, and a single newline # to mark regular empty lines in the output. Don't move the strip() # call above the truthiness check, or this method will start # considering any blank output line to be EOF. line = line.strip() if line.startswith('INSTRUMENTATION_RESULT:') or line.startswith('SNIPPET '): self.log.debug('Accepted line from instrumentation output: "%s"', line) return line # depends on [control=['if'], data=[]] self.log.debug('Discarded line from instrumentation output: "%s"', line) # depends on [control=['while'], data=[]]
def intersects_id(self, ray_origins, ray_directions, return_locations=False, multiple_hits=True, **kwargs): """ Find the intersections between the current mesh and a list of rays. Parameters ------------ ray_origins: (m,3) float, ray origin points ray_directions: (m,3) float, ray direction vectors multiple_hits: bool, consider multiple hits of each ray or not return_locations: bool, return hit locations or not Returns ----------- index_triangle: (h,) int, index of triangles hit index_ray: (h,) int, index of ray that hit triangle locations: (h,3) float, (optional) position of intersection in space """ (index_tri, index_ray, locations) = ray_triangle_id(triangles=self.mesh.triangles, ray_origins=ray_origins, ray_directions=ray_directions, tree=self.mesh.triangles_tree, multiple_hits=multiple_hits, triangles_normal=self.mesh.face_normals) if return_locations: if len(index_tri) == 0: return index_tri, index_ray, locations unique = grouping.unique_rows(np.column_stack((locations, index_ray)))[0] return index_tri[unique], index_ray[unique], locations[unique] return index_tri, index_ray
def function[intersects_id, parameter[self, ray_origins, ray_directions, return_locations, multiple_hits]]: constant[ Find the intersections between the current mesh and a list of rays. Parameters ------------ ray_origins: (m,3) float, ray origin points ray_directions: (m,3) float, ray direction vectors multiple_hits: bool, consider multiple hits of each ray or not return_locations: bool, return hit locations or not Returns ----------- index_triangle: (h,) int, index of triangles hit index_ray: (h,) int, index of ray that hit triangle locations: (h,3) float, (optional) position of intersection in space ] <ast.Tuple object at 0x7da20c9921d0> assign[=] call[name[ray_triangle_id], parameter[]] if name[return_locations] begin[:] if compare[call[name[len], parameter[name[index_tri]]] equal[==] constant[0]] begin[:] return[tuple[[<ast.Name object at 0x7da20c990610>, <ast.Name object at 0x7da20c992830>, <ast.Name object at 0x7da20c9936a0>]]] variable[unique] assign[=] call[call[name[grouping].unique_rows, parameter[call[name[np].column_stack, parameter[tuple[[<ast.Name object at 0x7da20c992ad0>, <ast.Name object at 0x7da20c9910f0>]]]]]]][constant[0]] return[tuple[[<ast.Subscript object at 0x7da20c990400>, <ast.Subscript object at 0x7da20c991de0>, <ast.Subscript object at 0x7da20c9920b0>]]] return[tuple[[<ast.Name object at 0x7da20c9930a0>, <ast.Name object at 0x7da20c993be0>]]]
keyword[def] identifier[intersects_id] ( identifier[self] , identifier[ray_origins] , identifier[ray_directions] , identifier[return_locations] = keyword[False] , identifier[multiple_hits] = keyword[True] , ** identifier[kwargs] ): literal[string] ( identifier[index_tri] , identifier[index_ray] , identifier[locations] )= identifier[ray_triangle_id] ( identifier[triangles] = identifier[self] . identifier[mesh] . identifier[triangles] , identifier[ray_origins] = identifier[ray_origins] , identifier[ray_directions] = identifier[ray_directions] , identifier[tree] = identifier[self] . identifier[mesh] . identifier[triangles_tree] , identifier[multiple_hits] = identifier[multiple_hits] , identifier[triangles_normal] = identifier[self] . identifier[mesh] . identifier[face_normals] ) keyword[if] identifier[return_locations] : keyword[if] identifier[len] ( identifier[index_tri] )== literal[int] : keyword[return] identifier[index_tri] , identifier[index_ray] , identifier[locations] identifier[unique] = identifier[grouping] . identifier[unique_rows] ( identifier[np] . identifier[column_stack] (( identifier[locations] , identifier[index_ray] )))[ literal[int] ] keyword[return] identifier[index_tri] [ identifier[unique] ], identifier[index_ray] [ identifier[unique] ], identifier[locations] [ identifier[unique] ] keyword[return] identifier[index_tri] , identifier[index_ray]
def intersects_id(self, ray_origins, ray_directions, return_locations=False, multiple_hits=True, **kwargs): """ Find the intersections between the current mesh and a list of rays. Parameters ------------ ray_origins: (m,3) float, ray origin points ray_directions: (m,3) float, ray direction vectors multiple_hits: bool, consider multiple hits of each ray or not return_locations: bool, return hit locations or not Returns ----------- index_triangle: (h,) int, index of triangles hit index_ray: (h,) int, index of ray that hit triangle locations: (h,3) float, (optional) position of intersection in space """ (index_tri, index_ray, locations) = ray_triangle_id(triangles=self.mesh.triangles, ray_origins=ray_origins, ray_directions=ray_directions, tree=self.mesh.triangles_tree, multiple_hits=multiple_hits, triangles_normal=self.mesh.face_normals) if return_locations: if len(index_tri) == 0: return (index_tri, index_ray, locations) # depends on [control=['if'], data=[]] unique = grouping.unique_rows(np.column_stack((locations, index_ray)))[0] return (index_tri[unique], index_ray[unique], locations[unique]) # depends on [control=['if'], data=[]] return (index_tri, index_ray)
def list_arc (archive, compression, cmd, verbosity, interactive): """List a ARC archive.""" cmdlist = [cmd] if verbosity > 1: cmdlist.append('v') else: cmdlist.append('l') cmdlist.append(archive) return cmdlist
def function[list_arc, parameter[archive, compression, cmd, verbosity, interactive]]: constant[List a ARC archive.] variable[cmdlist] assign[=] list[[<ast.Name object at 0x7da1b06046d0>]] if compare[name[verbosity] greater[>] constant[1]] begin[:] call[name[cmdlist].append, parameter[constant[v]]] call[name[cmdlist].append, parameter[name[archive]]] return[name[cmdlist]]
keyword[def] identifier[list_arc] ( identifier[archive] , identifier[compression] , identifier[cmd] , identifier[verbosity] , identifier[interactive] ): literal[string] identifier[cmdlist] =[ identifier[cmd] ] keyword[if] identifier[verbosity] > literal[int] : identifier[cmdlist] . identifier[append] ( literal[string] ) keyword[else] : identifier[cmdlist] . identifier[append] ( literal[string] ) identifier[cmdlist] . identifier[append] ( identifier[archive] ) keyword[return] identifier[cmdlist]
def list_arc(archive, compression, cmd, verbosity, interactive): """List a ARC archive.""" cmdlist = [cmd] if verbosity > 1: cmdlist.append('v') # depends on [control=['if'], data=[]] else: cmdlist.append('l') cmdlist.append(archive) return cmdlist
def flux_production(F): r"""Returns the net flux production for all states Parameters ---------- F : (n, n) ndarray Matrix of flux values between pairs of states. Returns ------- prod : (n) ndarray array with flux production (positive) or consumption (negative) at each state """ influxes = np.array(np.sum(F, axis=0)).flatten() # all that flows in outfluxes = np.array(np.sum(F, axis=1)).flatten() # all that flows out prod = outfluxes - influxes # net flux into nodes return prod
def function[flux_production, parameter[F]]: constant[Returns the net flux production for all states Parameters ---------- F : (n, n) ndarray Matrix of flux values between pairs of states. Returns ------- prod : (n) ndarray array with flux production (positive) or consumption (negative) at each state ] variable[influxes] assign[=] call[call[name[np].array, parameter[call[name[np].sum, parameter[name[F]]]]].flatten, parameter[]] variable[outfluxes] assign[=] call[call[name[np].array, parameter[call[name[np].sum, parameter[name[F]]]]].flatten, parameter[]] variable[prod] assign[=] binary_operation[name[outfluxes] - name[influxes]] return[name[prod]]
keyword[def] identifier[flux_production] ( identifier[F] ): literal[string] identifier[influxes] = identifier[np] . identifier[array] ( identifier[np] . identifier[sum] ( identifier[F] , identifier[axis] = literal[int] )). identifier[flatten] () identifier[outfluxes] = identifier[np] . identifier[array] ( identifier[np] . identifier[sum] ( identifier[F] , identifier[axis] = literal[int] )). identifier[flatten] () identifier[prod] = identifier[outfluxes] - identifier[influxes] keyword[return] identifier[prod]
def flux_production(F): """Returns the net flux production for all states Parameters ---------- F : (n, n) ndarray Matrix of flux values between pairs of states. Returns ------- prod : (n) ndarray array with flux production (positive) or consumption (negative) at each state """ influxes = np.array(np.sum(F, axis=0)).flatten() # all that flows in outfluxes = np.array(np.sum(F, axis=1)).flatten() # all that flows out prod = outfluxes - influxes # net flux into nodes return prod
def check_file_paths(self, *args): """ Ensure all arguments provided correspond to a file """ for path in enumerate(args): path = path[1] if path is not None: try: self.check_file_path(path) except OSError as ex: logger.warn(ex) raise return args
def function[check_file_paths, parameter[self]]: constant[ Ensure all arguments provided correspond to a file ] for taget[name[path]] in starred[call[name[enumerate], parameter[name[args]]]] begin[:] variable[path] assign[=] call[name[path]][constant[1]] if compare[name[path] is_not constant[None]] begin[:] <ast.Try object at 0x7da1b26ae560> return[name[args]]
keyword[def] identifier[check_file_paths] ( identifier[self] ,* identifier[args] ): literal[string] keyword[for] identifier[path] keyword[in] identifier[enumerate] ( identifier[args] ): identifier[path] = identifier[path] [ literal[int] ] keyword[if] identifier[path] keyword[is] keyword[not] keyword[None] : keyword[try] : identifier[self] . identifier[check_file_path] ( identifier[path] ) keyword[except] identifier[OSError] keyword[as] identifier[ex] : identifier[logger] . identifier[warn] ( identifier[ex] ) keyword[raise] keyword[return] identifier[args]
def check_file_paths(self, *args): """ Ensure all arguments provided correspond to a file """ for path in enumerate(args): path = path[1] if path is not None: try: self.check_file_path(path) # depends on [control=['try'], data=[]] except OSError as ex: logger.warn(ex) raise # depends on [control=['except'], data=['ex']] # depends on [control=['if'], data=['path']] # depends on [control=['for'], data=['path']] return args
def compute_group_count(self, pattern): """Compute the number of regexp match groups when the pattern is provided to the :func:`Cardinality.make_pattern()` method. :param pattern: Item regexp pattern (as string). :return: Number of regexp match groups in the cardinality pattern. """ group_count = self.group_count pattern_repeated = 1 if self.is_many(): pattern_repeated = 2 return group_count + pattern_repeated * pattern_group_count(pattern)
def function[compute_group_count, parameter[self, pattern]]: constant[Compute the number of regexp match groups when the pattern is provided to the :func:`Cardinality.make_pattern()` method. :param pattern: Item regexp pattern (as string). :return: Number of regexp match groups in the cardinality pattern. ] variable[group_count] assign[=] name[self].group_count variable[pattern_repeated] assign[=] constant[1] if call[name[self].is_many, parameter[]] begin[:] variable[pattern_repeated] assign[=] constant[2] return[binary_operation[name[group_count] + binary_operation[name[pattern_repeated] * call[name[pattern_group_count], parameter[name[pattern]]]]]]
keyword[def] identifier[compute_group_count] ( identifier[self] , identifier[pattern] ): literal[string] identifier[group_count] = identifier[self] . identifier[group_count] identifier[pattern_repeated] = literal[int] keyword[if] identifier[self] . identifier[is_many] (): identifier[pattern_repeated] = literal[int] keyword[return] identifier[group_count] + identifier[pattern_repeated] * identifier[pattern_group_count] ( identifier[pattern] )
def compute_group_count(self, pattern): """Compute the number of regexp match groups when the pattern is provided to the :func:`Cardinality.make_pattern()` method. :param pattern: Item regexp pattern (as string). :return: Number of regexp match groups in the cardinality pattern. """ group_count = self.group_count pattern_repeated = 1 if self.is_many(): pattern_repeated = 2 # depends on [control=['if'], data=[]] return group_count + pattern_repeated * pattern_group_count(pattern)
def serialize_smarttag(ctx, document, el, root): "Serializes smarttag." if ctx.options['smarttag_span']: _span = etree.SubElement(root, 'span', {'class': 'smarttag', 'data-smarttag-element': el.element}) else: _span = root for elem in el.elements: _ser = ctx.get_serializer(elem) if _ser: _td = _ser(ctx, document, elem, _span) else: if isinstance(elem, doc.Text): children = list(_span) if len(children) == 0: _text = _span.text or u'' _span.text = u'{}{}'.format(_text, elem.text) else: _text = children[-1].tail or u'' children[-1].tail = u'{}{}'.format(_text, elem.text) fire_hooks(ctx, document, el, _span, ctx.get_hook('smarttag')) return root
def function[serialize_smarttag, parameter[ctx, document, el, root]]: constant[Serializes smarttag.] if call[name[ctx].options][constant[smarttag_span]] begin[:] variable[_span] assign[=] call[name[etree].SubElement, parameter[name[root], constant[span], dictionary[[<ast.Constant object at 0x7da207f01660>, <ast.Constant object at 0x7da207f03f40>], [<ast.Constant object at 0x7da207f02350>, <ast.Attribute object at 0x7da207f012d0>]]]] for taget[name[elem]] in starred[name[el].elements] begin[:] variable[_ser] assign[=] call[name[ctx].get_serializer, parameter[name[elem]]] if name[_ser] begin[:] variable[_td] assign[=] call[name[_ser], parameter[name[ctx], name[document], name[elem], name[_span]]] call[name[fire_hooks], parameter[name[ctx], name[document], name[el], name[_span], call[name[ctx].get_hook, parameter[constant[smarttag]]]]] return[name[root]]
keyword[def] identifier[serialize_smarttag] ( identifier[ctx] , identifier[document] , identifier[el] , identifier[root] ): literal[string] keyword[if] identifier[ctx] . identifier[options] [ literal[string] ]: identifier[_span] = identifier[etree] . identifier[SubElement] ( identifier[root] , literal[string] ,{ literal[string] : literal[string] , literal[string] : identifier[el] . identifier[element] }) keyword[else] : identifier[_span] = identifier[root] keyword[for] identifier[elem] keyword[in] identifier[el] . identifier[elements] : identifier[_ser] = identifier[ctx] . identifier[get_serializer] ( identifier[elem] ) keyword[if] identifier[_ser] : identifier[_td] = identifier[_ser] ( identifier[ctx] , identifier[document] , identifier[elem] , identifier[_span] ) keyword[else] : keyword[if] identifier[isinstance] ( identifier[elem] , identifier[doc] . identifier[Text] ): identifier[children] = identifier[list] ( identifier[_span] ) keyword[if] identifier[len] ( identifier[children] )== literal[int] : identifier[_text] = identifier[_span] . identifier[text] keyword[or] literal[string] identifier[_span] . identifier[text] = literal[string] . identifier[format] ( identifier[_text] , identifier[elem] . identifier[text] ) keyword[else] : identifier[_text] = identifier[children] [- literal[int] ]. identifier[tail] keyword[or] literal[string] identifier[children] [- literal[int] ]. identifier[tail] = literal[string] . identifier[format] ( identifier[_text] , identifier[elem] . identifier[text] ) identifier[fire_hooks] ( identifier[ctx] , identifier[document] , identifier[el] , identifier[_span] , identifier[ctx] . identifier[get_hook] ( literal[string] )) keyword[return] identifier[root]
def serialize_smarttag(ctx, document, el, root): """Serializes smarttag.""" if ctx.options['smarttag_span']: _span = etree.SubElement(root, 'span', {'class': 'smarttag', 'data-smarttag-element': el.element}) # depends on [control=['if'], data=[]] else: _span = root for elem in el.elements: _ser = ctx.get_serializer(elem) if _ser: _td = _ser(ctx, document, elem, _span) # depends on [control=['if'], data=[]] elif isinstance(elem, doc.Text): children = list(_span) if len(children) == 0: _text = _span.text or u'' _span.text = u'{}{}'.format(_text, elem.text) # depends on [control=['if'], data=[]] else: _text = children[-1].tail or u'' children[-1].tail = u'{}{}'.format(_text, elem.text) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['elem']] fire_hooks(ctx, document, el, _span, ctx.get_hook('smarttag')) return root
def GetHashCode(self): """uint32 identifier""" slice_length = 4 if len(self.Data) >= 4 else len(self.Data) return int.from_bytes(self.Data[:slice_length], 'little')
def function[GetHashCode, parameter[self]]: constant[uint32 identifier] variable[slice_length] assign[=] <ast.IfExp object at 0x7da204344790> return[call[name[int].from_bytes, parameter[call[name[self].Data][<ast.Slice object at 0x7da204344cd0>], constant[little]]]]
keyword[def] identifier[GetHashCode] ( identifier[self] ): literal[string] identifier[slice_length] = literal[int] keyword[if] identifier[len] ( identifier[self] . identifier[Data] )>= literal[int] keyword[else] identifier[len] ( identifier[self] . identifier[Data] ) keyword[return] identifier[int] . identifier[from_bytes] ( identifier[self] . identifier[Data] [: identifier[slice_length] ], literal[string] )
def GetHashCode(self): """uint32 identifier""" slice_length = 4 if len(self.Data) >= 4 else len(self.Data) return int.from_bytes(self.Data[:slice_length], 'little')
def rotate(self, angle, axis=(0, 0, 1)): """Returns the rotated vector. Assumes angle is in radians""" if not all(isinstance(a, int) for a in axis): raise ValueError x, y, z = self.x, self.y, self.z # Z axis rotation if(axis[2]): x = (self.x * math.cos(angle) - self.y * math.sin(angle)) y = (self.x * math.sin(angle) + self.y * math.cos(angle)) # Y axis rotation if(axis[1]): x = self.x * math.cos(angle) + self.z * math.sin(angle) z = -self.x * math.sin(angle) + self.z * math.cos(angle) # X axis rotation if(axis[0]): y = self.y * math.cos(angle) - self.z * math.sin(angle) z = self.y * math.sin(angle) + self.z * math.cos(angle) return Vector(x, y, z)
def function[rotate, parameter[self, angle, axis]]: constant[Returns the rotated vector. Assumes angle is in radians] if <ast.UnaryOp object at 0x7da1b0e62e00> begin[:] <ast.Raise object at 0x7da1b0e63610> <ast.Tuple object at 0x7da1b0e61e10> assign[=] tuple[[<ast.Attribute object at 0x7da1b0e62c50>, <ast.Attribute object at 0x7da1b0e618a0>, <ast.Attribute object at 0x7da1b0e618d0>]] if call[name[axis]][constant[2]] begin[:] variable[x] assign[=] binary_operation[binary_operation[name[self].x * call[name[math].cos, parameter[name[angle]]]] - binary_operation[name[self].y * call[name[math].sin, parameter[name[angle]]]]] variable[y] assign[=] binary_operation[binary_operation[name[self].x * call[name[math].sin, parameter[name[angle]]]] + binary_operation[name[self].y * call[name[math].cos, parameter[name[angle]]]]] if call[name[axis]][constant[1]] begin[:] variable[x] assign[=] binary_operation[binary_operation[name[self].x * call[name[math].cos, parameter[name[angle]]]] + binary_operation[name[self].z * call[name[math].sin, parameter[name[angle]]]]] variable[z] assign[=] binary_operation[binary_operation[<ast.UnaryOp object at 0x7da1b0e60fa0> * call[name[math].sin, parameter[name[angle]]]] + binary_operation[name[self].z * call[name[math].cos, parameter[name[angle]]]]] if call[name[axis]][constant[0]] begin[:] variable[y] assign[=] binary_operation[binary_operation[name[self].y * call[name[math].cos, parameter[name[angle]]]] - binary_operation[name[self].z * call[name[math].sin, parameter[name[angle]]]]] variable[z] assign[=] binary_operation[binary_operation[name[self].y * call[name[math].sin, parameter[name[angle]]]] + binary_operation[name[self].z * call[name[math].cos, parameter[name[angle]]]]] return[call[name[Vector], parameter[name[x], name[y], name[z]]]]
keyword[def] identifier[rotate] ( identifier[self] , identifier[angle] , identifier[axis] =( literal[int] , literal[int] , literal[int] )): literal[string] keyword[if] keyword[not] identifier[all] ( identifier[isinstance] ( identifier[a] , identifier[int] ) keyword[for] identifier[a] keyword[in] identifier[axis] ): keyword[raise] identifier[ValueError] identifier[x] , identifier[y] , identifier[z] = identifier[self] . identifier[x] , identifier[self] . identifier[y] , identifier[self] . identifier[z] keyword[if] ( identifier[axis] [ literal[int] ]): identifier[x] =( identifier[self] . identifier[x] * identifier[math] . identifier[cos] ( identifier[angle] )- identifier[self] . identifier[y] * identifier[math] . identifier[sin] ( identifier[angle] )) identifier[y] =( identifier[self] . identifier[x] * identifier[math] . identifier[sin] ( identifier[angle] )+ identifier[self] . identifier[y] * identifier[math] . identifier[cos] ( identifier[angle] )) keyword[if] ( identifier[axis] [ literal[int] ]): identifier[x] = identifier[self] . identifier[x] * identifier[math] . identifier[cos] ( identifier[angle] )+ identifier[self] . identifier[z] * identifier[math] . identifier[sin] ( identifier[angle] ) identifier[z] =- identifier[self] . identifier[x] * identifier[math] . identifier[sin] ( identifier[angle] )+ identifier[self] . identifier[z] * identifier[math] . identifier[cos] ( identifier[angle] ) keyword[if] ( identifier[axis] [ literal[int] ]): identifier[y] = identifier[self] . identifier[y] * identifier[math] . identifier[cos] ( identifier[angle] )- identifier[self] . identifier[z] * identifier[math] . identifier[sin] ( identifier[angle] ) identifier[z] = identifier[self] . identifier[y] * identifier[math] . identifier[sin] ( identifier[angle] )+ identifier[self] . identifier[z] * identifier[math] . identifier[cos] ( identifier[angle] ) keyword[return] identifier[Vector] ( identifier[x] , identifier[y] , identifier[z] )
def rotate(self, angle, axis=(0, 0, 1)): """Returns the rotated vector. Assumes angle is in radians""" if not all((isinstance(a, int) for a in axis)): raise ValueError # depends on [control=['if'], data=[]] (x, y, z) = (self.x, self.y, self.z) # Z axis rotation if axis[2]: x = self.x * math.cos(angle) - self.y * math.sin(angle) y = self.x * math.sin(angle) + self.y * math.cos(angle) # depends on [control=['if'], data=[]] # Y axis rotation if axis[1]: x = self.x * math.cos(angle) + self.z * math.sin(angle) z = -self.x * math.sin(angle) + self.z * math.cos(angle) # depends on [control=['if'], data=[]] # X axis rotation if axis[0]: y = self.y * math.cos(angle) - self.z * math.sin(angle) z = self.y * math.sin(angle) + self.z * math.cos(angle) # depends on [control=['if'], data=[]] return Vector(x, y, z)
def _find_position_of_minute(self, minute_dt): """ Internal method that returns the position of the given minute in the list of every trading minute since market open of the first trading day. Adjusts non market minutes to the last close. ex. this method would return 1 for 2002-01-02 9:32 AM Eastern, if 2002-01-02 is the first trading day of the dataset. Parameters ---------- minute_dt: pd.Timestamp The minute whose position should be calculated. Returns ------- int: The position of the given minute in the list of all trading minutes since market open on the first trading day. """ return find_position_of_minute( self._market_open_values, self._market_close_values, minute_dt.value / NANOS_IN_MINUTE, self._minutes_per_day, False, )
def function[_find_position_of_minute, parameter[self, minute_dt]]: constant[ Internal method that returns the position of the given minute in the list of every trading minute since market open of the first trading day. Adjusts non market minutes to the last close. ex. this method would return 1 for 2002-01-02 9:32 AM Eastern, if 2002-01-02 is the first trading day of the dataset. Parameters ---------- minute_dt: pd.Timestamp The minute whose position should be calculated. Returns ------- int: The position of the given minute in the list of all trading minutes since market open on the first trading day. ] return[call[name[find_position_of_minute], parameter[name[self]._market_open_values, name[self]._market_close_values, binary_operation[name[minute_dt].value / name[NANOS_IN_MINUTE]], name[self]._minutes_per_day, constant[False]]]]
keyword[def] identifier[_find_position_of_minute] ( identifier[self] , identifier[minute_dt] ): literal[string] keyword[return] identifier[find_position_of_minute] ( identifier[self] . identifier[_market_open_values] , identifier[self] . identifier[_market_close_values] , identifier[minute_dt] . identifier[value] / identifier[NANOS_IN_MINUTE] , identifier[self] . identifier[_minutes_per_day] , keyword[False] , )
def _find_position_of_minute(self, minute_dt): """ Internal method that returns the position of the given minute in the list of every trading minute since market open of the first trading day. Adjusts non market minutes to the last close. ex. this method would return 1 for 2002-01-02 9:32 AM Eastern, if 2002-01-02 is the first trading day of the dataset. Parameters ---------- minute_dt: pd.Timestamp The minute whose position should be calculated. Returns ------- int: The position of the given minute in the list of all trading minutes since market open on the first trading day. """ return find_position_of_minute(self._market_open_values, self._market_close_values, minute_dt.value / NANOS_IN_MINUTE, self._minutes_per_day, False)
def generateRandomSDRVaryingSparsity(numSDR, numDims, minSparsity, maxSparsity, seed=42): """ Generate a set of random SDRs with varying sparsity :param numSDR: number of SDRs :param numDims: length of SDRs :param minSparsity: minimum sparsity :param maxSparsity: maximum sparsity :param seed: """ randomSDRs = np.zeros((numSDR, numDims), dtype=uintType) indices = np.array(range(numDims)) np.random.seed(seed) for i in range(numSDR): sparsity = np.random.random() * (maxSparsity - minSparsity) + minSparsity numActiveInputBits = int(sparsity * numDims) randomIndices = np.random.permutation(indices) activeBits = randomIndices[:numActiveInputBits] randomSDRs[i, activeBits] = 1 return randomSDRs
def function[generateRandomSDRVaryingSparsity, parameter[numSDR, numDims, minSparsity, maxSparsity, seed]]: constant[ Generate a set of random SDRs with varying sparsity :param numSDR: number of SDRs :param numDims: length of SDRs :param minSparsity: minimum sparsity :param maxSparsity: maximum sparsity :param seed: ] variable[randomSDRs] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Name object at 0x7da1b088a140>, <ast.Name object at 0x7da1b088a830>]]]] variable[indices] assign[=] call[name[np].array, parameter[call[name[range], parameter[name[numDims]]]]] call[name[np].random.seed, parameter[name[seed]]] for taget[name[i]] in starred[call[name[range], parameter[name[numSDR]]]] begin[:] variable[sparsity] assign[=] binary_operation[binary_operation[call[name[np].random.random, parameter[]] * binary_operation[name[maxSparsity] - name[minSparsity]]] + name[minSparsity]] variable[numActiveInputBits] assign[=] call[name[int], parameter[binary_operation[name[sparsity] * name[numDims]]]] variable[randomIndices] assign[=] call[name[np].random.permutation, parameter[name[indices]]] variable[activeBits] assign[=] call[name[randomIndices]][<ast.Slice object at 0x7da1b0889720>] call[name[randomSDRs]][tuple[[<ast.Name object at 0x7da1b088b490>, <ast.Name object at 0x7da1b088a8f0>]]] assign[=] constant[1] return[name[randomSDRs]]
keyword[def] identifier[generateRandomSDRVaryingSparsity] ( identifier[numSDR] , identifier[numDims] , identifier[minSparsity] , identifier[maxSparsity] , identifier[seed] = literal[int] ): literal[string] identifier[randomSDRs] = identifier[np] . identifier[zeros] (( identifier[numSDR] , identifier[numDims] ), identifier[dtype] = identifier[uintType] ) identifier[indices] = identifier[np] . identifier[array] ( identifier[range] ( identifier[numDims] )) identifier[np] . identifier[random] . identifier[seed] ( identifier[seed] ) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[numSDR] ): identifier[sparsity] = identifier[np] . identifier[random] . identifier[random] ()*( identifier[maxSparsity] - identifier[minSparsity] )+ identifier[minSparsity] identifier[numActiveInputBits] = identifier[int] ( identifier[sparsity] * identifier[numDims] ) identifier[randomIndices] = identifier[np] . identifier[random] . identifier[permutation] ( identifier[indices] ) identifier[activeBits] = identifier[randomIndices] [: identifier[numActiveInputBits] ] identifier[randomSDRs] [ identifier[i] , identifier[activeBits] ]= literal[int] keyword[return] identifier[randomSDRs]
def generateRandomSDRVaryingSparsity(numSDR, numDims, minSparsity, maxSparsity, seed=42): """ Generate a set of random SDRs with varying sparsity :param numSDR: number of SDRs :param numDims: length of SDRs :param minSparsity: minimum sparsity :param maxSparsity: maximum sparsity :param seed: """ randomSDRs = np.zeros((numSDR, numDims), dtype=uintType) indices = np.array(range(numDims)) np.random.seed(seed) for i in range(numSDR): sparsity = np.random.random() * (maxSparsity - minSparsity) + minSparsity numActiveInputBits = int(sparsity * numDims) randomIndices = np.random.permutation(indices) activeBits = randomIndices[:numActiveInputBits] randomSDRs[i, activeBits] = 1 # depends on [control=['for'], data=['i']] return randomSDRs
def set_idlepc(self, idlepc): """ Sets the idle Pointer Counter (PC) :param idlepc: idlepc value (string) """ if not idlepc: idlepc = "0x0" is_running = yield from self.is_running() if not is_running: # router is not running yield from self._hypervisor.send('vm set_idle_pc "{name}" {idlepc}'.format(name=self._name, idlepc=idlepc)) else: yield from self._hypervisor.send('vm set_idle_pc_online "{name}" 0 {idlepc}'.format(name=self._name, idlepc=idlepc)) log.info('Router "{name}" [{id}]: idle-PC set to {idlepc}'.format(name=self._name, id=self._id, idlepc=idlepc)) self._idlepc = idlepc
def function[set_idlepc, parameter[self, idlepc]]: constant[ Sets the idle Pointer Counter (PC) :param idlepc: idlepc value (string) ] if <ast.UnaryOp object at 0x7da204566890> begin[:] variable[idlepc] assign[=] constant[0x0] variable[is_running] assign[=] <ast.YieldFrom object at 0x7da2044c23b0> if <ast.UnaryOp object at 0x7da2044c3f70> begin[:] <ast.YieldFrom object at 0x7da2044c2e30> call[name[log].info, parameter[call[constant[Router "{name}" [{id}]: idle-PC set to {idlepc}].format, parameter[]]]] name[self]._idlepc assign[=] name[idlepc]
keyword[def] identifier[set_idlepc] ( identifier[self] , identifier[idlepc] ): literal[string] keyword[if] keyword[not] identifier[idlepc] : identifier[idlepc] = literal[string] identifier[is_running] = keyword[yield] keyword[from] identifier[self] . identifier[is_running] () keyword[if] keyword[not] identifier[is_running] : keyword[yield] keyword[from] identifier[self] . identifier[_hypervisor] . identifier[send] ( literal[string] . identifier[format] ( identifier[name] = identifier[self] . identifier[_name] , identifier[idlepc] = identifier[idlepc] )) keyword[else] : keyword[yield] keyword[from] identifier[self] . identifier[_hypervisor] . identifier[send] ( literal[string] . identifier[format] ( identifier[name] = identifier[self] . identifier[_name] , identifier[idlepc] = identifier[idlepc] )) identifier[log] . identifier[info] ( literal[string] . identifier[format] ( identifier[name] = identifier[self] . identifier[_name] , identifier[id] = identifier[self] . identifier[_id] , identifier[idlepc] = identifier[idlepc] )) identifier[self] . identifier[_idlepc] = identifier[idlepc]
def set_idlepc(self, idlepc): """ Sets the idle Pointer Counter (PC) :param idlepc: idlepc value (string) """ if not idlepc: idlepc = '0x0' # depends on [control=['if'], data=[]] is_running = (yield from self.is_running()) if not is_running: # router is not running yield from self._hypervisor.send('vm set_idle_pc "{name}" {idlepc}'.format(name=self._name, idlepc=idlepc)) # depends on [control=['if'], data=[]] else: yield from self._hypervisor.send('vm set_idle_pc_online "{name}" 0 {idlepc}'.format(name=self._name, idlepc=idlepc)) log.info('Router "{name}" [{id}]: idle-PC set to {idlepc}'.format(name=self._name, id=self._id, idlepc=idlepc)) self._idlepc = idlepc
def sys_hist(self, name=None, where=None): """ Return the effective total low and high histogram for a given systematic over samples in this channel. If a sample does not contain the named systematic then its nominal histogram is used for both low and high variations. Parameters ---------- name : string, optional (default=None) The systematic name otherwise nominal if None where : callable, optional (default=None) A callable taking one argument: the sample, and returns True if this sample should be included in the total. Returns ------- total_low, total_high : histograms The total low and high histograms for this systematic """ total_low, total_high = None, None for sample in self.samples: if where is not None and not where(sample): continue low, high = sample.sys_hist(name) if total_low is None: total_low = low.Clone(shallow=True) else: total_low += low if total_high is None: total_high = high.Clone(shallow=True) else: total_high += high return total_low, total_high
def function[sys_hist, parameter[self, name, where]]: constant[ Return the effective total low and high histogram for a given systematic over samples in this channel. If a sample does not contain the named systematic then its nominal histogram is used for both low and high variations. Parameters ---------- name : string, optional (default=None) The systematic name otherwise nominal if None where : callable, optional (default=None) A callable taking one argument: the sample, and returns True if this sample should be included in the total. Returns ------- total_low, total_high : histograms The total low and high histograms for this systematic ] <ast.Tuple object at 0x7da1b11d4910> assign[=] tuple[[<ast.Constant object at 0x7da1b11d4c70>, <ast.Constant object at 0x7da1b11d4340>]] for taget[name[sample]] in starred[name[self].samples] begin[:] if <ast.BoolOp object at 0x7da1b11d6140> begin[:] continue <ast.Tuple object at 0x7da1b11d5c60> assign[=] call[name[sample].sys_hist, parameter[name[name]]] if compare[name[total_low] is constant[None]] begin[:] variable[total_low] assign[=] call[name[low].Clone, parameter[]] if compare[name[total_high] is constant[None]] begin[:] variable[total_high] assign[=] call[name[high].Clone, parameter[]] return[tuple[[<ast.Name object at 0x7da1b11d7220>, <ast.Name object at 0x7da1b11d7310>]]]
keyword[def] identifier[sys_hist] ( identifier[self] , identifier[name] = keyword[None] , identifier[where] = keyword[None] ): literal[string] identifier[total_low] , identifier[total_high] = keyword[None] , keyword[None] keyword[for] identifier[sample] keyword[in] identifier[self] . identifier[samples] : keyword[if] identifier[where] keyword[is] keyword[not] keyword[None] keyword[and] keyword[not] identifier[where] ( identifier[sample] ): keyword[continue] identifier[low] , identifier[high] = identifier[sample] . identifier[sys_hist] ( identifier[name] ) keyword[if] identifier[total_low] keyword[is] keyword[None] : identifier[total_low] = identifier[low] . identifier[Clone] ( identifier[shallow] = keyword[True] ) keyword[else] : identifier[total_low] += identifier[low] keyword[if] identifier[total_high] keyword[is] keyword[None] : identifier[total_high] = identifier[high] . identifier[Clone] ( identifier[shallow] = keyword[True] ) keyword[else] : identifier[total_high] += identifier[high] keyword[return] identifier[total_low] , identifier[total_high]
def sys_hist(self, name=None, where=None): """ Return the effective total low and high histogram for a given systematic over samples in this channel. If a sample does not contain the named systematic then its nominal histogram is used for both low and high variations. Parameters ---------- name : string, optional (default=None) The systematic name otherwise nominal if None where : callable, optional (default=None) A callable taking one argument: the sample, and returns True if this sample should be included in the total. Returns ------- total_low, total_high : histograms The total low and high histograms for this systematic """ (total_low, total_high) = (None, None) for sample in self.samples: if where is not None and (not where(sample)): continue # depends on [control=['if'], data=[]] (low, high) = sample.sys_hist(name) if total_low is None: total_low = low.Clone(shallow=True) # depends on [control=['if'], data=['total_low']] else: total_low += low if total_high is None: total_high = high.Clone(shallow=True) # depends on [control=['if'], data=['total_high']] else: total_high += high # depends on [control=['for'], data=['sample']] return (total_low, total_high)
def extract_spellout_values(text): """Convert spelled out numbers in a given text to digits.""" values = [] for item in r.REG_TXT.finditer(text): surface, span = clean_surface(item.group(0), item.span()) if not surface or surface.lower() in r.SCALES: continue curr = result = 0.0 for word in surface.split(): try: scale, increment = 1, float(word.lower()) except ValueError: scale, increment = r.NUMWORDS[word.lower()] curr = curr * scale + increment if scale > 100: result += curr curr = 0.0 values.append({'old_surface': surface, 'old_span': span, 'new_surface': unicode(result + curr)}) for item in re.finditer(r'\d+(,\d{3})+', text): values.append({'old_surface': item.group(0), 'old_span': item.span(), 'new_surface': unicode(item.group(0).replace(',', ''))}) return sorted(values, key=lambda x: x['old_span'][0])
def function[extract_spellout_values, parameter[text]]: constant[Convert spelled out numbers in a given text to digits.] variable[values] assign[=] list[[]] for taget[name[item]] in starred[call[name[r].REG_TXT.finditer, parameter[name[text]]]] begin[:] <ast.Tuple object at 0x7da18dc070d0> assign[=] call[name[clean_surface], parameter[call[name[item].group, parameter[constant[0]]], call[name[item].span, parameter[]]]] if <ast.BoolOp object at 0x7da18dc07550> begin[:] continue variable[curr] assign[=] constant[0.0] for taget[name[word]] in starred[call[name[surface].split, parameter[]]] begin[:] <ast.Try object at 0x7da18dc07f70> variable[curr] assign[=] binary_operation[binary_operation[name[curr] * name[scale]] + name[increment]] if compare[name[scale] greater[>] constant[100]] begin[:] <ast.AugAssign object at 0x7da2054a7490> variable[curr] assign[=] constant[0.0] call[name[values].append, parameter[dictionary[[<ast.Constant object at 0x7da2054a76d0>, <ast.Constant object at 0x7da2054a6290>, <ast.Constant object at 0x7da20eb29fc0>], [<ast.Name object at 0x7da20eb2b070>, <ast.Name object at 0x7da20eb2a140>, <ast.Call object at 0x7da20eb29630>]]]] for taget[name[item]] in starred[call[name[re].finditer, parameter[constant[\d+(,\d{3})+], name[text]]]] begin[:] call[name[values].append, parameter[dictionary[[<ast.Constant object at 0x7da20eb2ada0>, <ast.Constant object at 0x7da20c993a90>, <ast.Constant object at 0x7da20c992f80>], [<ast.Call object at 0x7da20c990a00>, <ast.Call object at 0x7da20c9936d0>, <ast.Call object at 0x7da20c992a70>]]]] return[call[name[sorted], parameter[name[values]]]]
keyword[def] identifier[extract_spellout_values] ( identifier[text] ): literal[string] identifier[values] =[] keyword[for] identifier[item] keyword[in] identifier[r] . identifier[REG_TXT] . identifier[finditer] ( identifier[text] ): identifier[surface] , identifier[span] = identifier[clean_surface] ( identifier[item] . identifier[group] ( literal[int] ), identifier[item] . identifier[span] ()) keyword[if] keyword[not] identifier[surface] keyword[or] identifier[surface] . identifier[lower] () keyword[in] identifier[r] . identifier[SCALES] : keyword[continue] identifier[curr] = identifier[result] = literal[int] keyword[for] identifier[word] keyword[in] identifier[surface] . identifier[split] (): keyword[try] : identifier[scale] , identifier[increment] = literal[int] , identifier[float] ( identifier[word] . identifier[lower] ()) keyword[except] identifier[ValueError] : identifier[scale] , identifier[increment] = identifier[r] . identifier[NUMWORDS] [ identifier[word] . identifier[lower] ()] identifier[curr] = identifier[curr] * identifier[scale] + identifier[increment] keyword[if] identifier[scale] > literal[int] : identifier[result] += identifier[curr] identifier[curr] = literal[int] identifier[values] . identifier[append] ({ literal[string] : identifier[surface] , literal[string] : identifier[span] , literal[string] : identifier[unicode] ( identifier[result] + identifier[curr] )}) keyword[for] identifier[item] keyword[in] identifier[re] . identifier[finditer] ( literal[string] , identifier[text] ): identifier[values] . identifier[append] ({ literal[string] : identifier[item] . identifier[group] ( literal[int] ), literal[string] : identifier[item] . identifier[span] (), literal[string] : identifier[unicode] ( identifier[item] . identifier[group] ( literal[int] ). identifier[replace] ( literal[string] , literal[string] ))}) keyword[return] identifier[sorted] ( identifier[values] , identifier[key] = keyword[lambda] identifier[x] : identifier[x] [ literal[string] ][ literal[int] ])
def extract_spellout_values(text): """Convert spelled out numbers in a given text to digits.""" values = [] for item in r.REG_TXT.finditer(text): (surface, span) = clean_surface(item.group(0), item.span()) if not surface or surface.lower() in r.SCALES: continue # depends on [control=['if'], data=[]] curr = result = 0.0 for word in surface.split(): try: (scale, increment) = (1, float(word.lower())) # depends on [control=['try'], data=[]] except ValueError: (scale, increment) = r.NUMWORDS[word.lower()] # depends on [control=['except'], data=[]] curr = curr * scale + increment if scale > 100: result += curr curr = 0.0 # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['word']] values.append({'old_surface': surface, 'old_span': span, 'new_surface': unicode(result + curr)}) # depends on [control=['for'], data=['item']] for item in re.finditer('\\d+(,\\d{3})+', text): values.append({'old_surface': item.group(0), 'old_span': item.span(), 'new_surface': unicode(item.group(0).replace(',', ''))}) # depends on [control=['for'], data=['item']] return sorted(values, key=lambda x: x['old_span'][0])
def dragMoveEvent( self, event ): """ Handles the drag move event. :param event | <QDragEvent> """ tags = nativestring(event.mimeData().text()) if ( event.source() == self ): event.acceptProposedAction() elif ( tags ): event.acceptProposedAction() else: super(XMultiTagEdit, self).dragMoveEvent(event)
def function[dragMoveEvent, parameter[self, event]]: constant[ Handles the drag move event. :param event | <QDragEvent> ] variable[tags] assign[=] call[name[nativestring], parameter[call[call[name[event].mimeData, parameter[]].text, parameter[]]]] if compare[call[name[event].source, parameter[]] equal[==] name[self]] begin[:] call[name[event].acceptProposedAction, parameter[]]
keyword[def] identifier[dragMoveEvent] ( identifier[self] , identifier[event] ): literal[string] identifier[tags] = identifier[nativestring] ( identifier[event] . identifier[mimeData] (). identifier[text] ()) keyword[if] ( identifier[event] . identifier[source] ()== identifier[self] ): identifier[event] . identifier[acceptProposedAction] () keyword[elif] ( identifier[tags] ): identifier[event] . identifier[acceptProposedAction] () keyword[else] : identifier[super] ( identifier[XMultiTagEdit] , identifier[self] ). identifier[dragMoveEvent] ( identifier[event] )
def dragMoveEvent(self, event): """ Handles the drag move event. :param event | <QDragEvent> """ tags = nativestring(event.mimeData().text()) if event.source() == self: event.acceptProposedAction() # depends on [control=['if'], data=[]] elif tags: event.acceptProposedAction() # depends on [control=['if'], data=[]] else: super(XMultiTagEdit, self).dragMoveEvent(event)
def latex2png(snippet, outfile): """Compiles a LaTeX snippet to png""" pngimage = os.path.join(IMAGEDIR, outfile + '.png') texdocument = os.path.join(IMAGEDIR, 'tmp.tex') with open(texdocument, 'w') as doc: doc.write(LATEX_DOC % (snippet)) environment = os.environ environment['shell_escape_commands'] = \ "bibtex,bibtex8,kpsewhich,makeindex,mpost,repstopdf," + \ ','.join( os.path.basename(n) for n in chain.from_iterable( iglob(os.path.join(chemin, 'gregorio*')) for chemin in os.environ["PATH"].split(os.pathsep) ) ) proc = Popen( ["lualatex", '-output-directory=' + IMAGEDIR, texdocument], stdin=PIPE, stdout=STDERR, env=environment ) proc.communicate() proc.stdin.close() call(["pdfcrop", os.path.join(IMAGEDIR, "tmp.pdf")], stdout=STDERR) call( [ "gs", "-sDEVICE=pngalpha", "-r144", "-sOutputFile=" + pngimage, os.path.join(IMAGEDIR, "tmp-crop.pdf"), ], stdout=STDERR, )
def function[latex2png, parameter[snippet, outfile]]: constant[Compiles a LaTeX snippet to png] variable[pngimage] assign[=] call[name[os].path.join, parameter[name[IMAGEDIR], binary_operation[name[outfile] + constant[.png]]]] variable[texdocument] assign[=] call[name[os].path.join, parameter[name[IMAGEDIR], constant[tmp.tex]]] with call[name[open], parameter[name[texdocument], constant[w]]] begin[:] call[name[doc].write, parameter[binary_operation[name[LATEX_DOC] <ast.Mod object at 0x7da2590d6920> name[snippet]]]] variable[environment] assign[=] name[os].environ call[name[environment]][constant[shell_escape_commands]] assign[=] binary_operation[constant[bibtex,bibtex8,kpsewhich,makeindex,mpost,repstopdf,] + call[constant[,].join, parameter[<ast.GeneratorExp object at 0x7da18bcc87f0>]]] variable[proc] assign[=] call[name[Popen], parameter[list[[<ast.Constant object at 0x7da18bcca2c0>, <ast.BinOp object at 0x7da18bcca500>, <ast.Name object at 0x7da18bcca710>]]]] call[name[proc].communicate, parameter[]] call[name[proc].stdin.close, parameter[]] call[name[call], parameter[list[[<ast.Constant object at 0x7da18bcc8610>, <ast.Call object at 0x7da20c6e6c80>]]]] call[name[call], parameter[list[[<ast.Constant object at 0x7da18bcc8df0>, <ast.Constant object at 0x7da18bcca8f0>, <ast.Constant object at 0x7da18bcc9f00>, <ast.BinOp object at 0x7da18bccafb0>, <ast.Call object at 0x7da18bcc8670>]]]]
keyword[def] identifier[latex2png] ( identifier[snippet] , identifier[outfile] ): literal[string] identifier[pngimage] = identifier[os] . identifier[path] . identifier[join] ( identifier[IMAGEDIR] , identifier[outfile] + literal[string] ) identifier[texdocument] = identifier[os] . identifier[path] . identifier[join] ( identifier[IMAGEDIR] , literal[string] ) keyword[with] identifier[open] ( identifier[texdocument] , literal[string] ) keyword[as] identifier[doc] : identifier[doc] . identifier[write] ( identifier[LATEX_DOC] %( identifier[snippet] )) identifier[environment] = identifier[os] . identifier[environ] identifier[environment] [ literal[string] ]= literal[string] + literal[string] . identifier[join] ( identifier[os] . identifier[path] . identifier[basename] ( identifier[n] ) keyword[for] identifier[n] keyword[in] identifier[chain] . identifier[from_iterable] ( identifier[iglob] ( identifier[os] . identifier[path] . identifier[join] ( identifier[chemin] , literal[string] )) keyword[for] identifier[chemin] keyword[in] identifier[os] . identifier[environ] [ literal[string] ]. identifier[split] ( identifier[os] . identifier[pathsep] ) ) ) identifier[proc] = identifier[Popen] ( [ literal[string] , literal[string] + identifier[IMAGEDIR] , identifier[texdocument] ], identifier[stdin] = identifier[PIPE] , identifier[stdout] = identifier[STDERR] , identifier[env] = identifier[environment] ) identifier[proc] . identifier[communicate] () identifier[proc] . identifier[stdin] . identifier[close] () identifier[call] ([ literal[string] , identifier[os] . identifier[path] . identifier[join] ( identifier[IMAGEDIR] , literal[string] )], identifier[stdout] = identifier[STDERR] ) identifier[call] ( [ literal[string] , literal[string] , literal[string] , literal[string] + identifier[pngimage] , identifier[os] . identifier[path] . identifier[join] ( identifier[IMAGEDIR] , literal[string] ), ], identifier[stdout] = identifier[STDERR] , )
def latex2png(snippet, outfile): """Compiles a LaTeX snippet to png""" pngimage = os.path.join(IMAGEDIR, outfile + '.png') texdocument = os.path.join(IMAGEDIR, 'tmp.tex') with open(texdocument, 'w') as doc: doc.write(LATEX_DOC % snippet) # depends on [control=['with'], data=['doc']] environment = os.environ environment['shell_escape_commands'] = 'bibtex,bibtex8,kpsewhich,makeindex,mpost,repstopdf,' + ','.join((os.path.basename(n) for n in chain.from_iterable((iglob(os.path.join(chemin, 'gregorio*')) for chemin in os.environ['PATH'].split(os.pathsep))))) proc = Popen(['lualatex', '-output-directory=' + IMAGEDIR, texdocument], stdin=PIPE, stdout=STDERR, env=environment) proc.communicate() proc.stdin.close() call(['pdfcrop', os.path.join(IMAGEDIR, 'tmp.pdf')], stdout=STDERR) call(['gs', '-sDEVICE=pngalpha', '-r144', '-sOutputFile=' + pngimage, os.path.join(IMAGEDIR, 'tmp-crop.pdf')], stdout=STDERR)
def get_triggers(self): """ Retrieves all of the user's triggers that are set on the Weather Alert API. :returns: list of `pyowm.alertapi30.trigger.Trigger` objects """ status, data = self.http_client.get_json( TRIGGERS_URI, params={'appid': self.API_key}, headers={'Content-Type': 'application/json'}) return [self.trigger_parser.parse_dict(item) for item in data]
def function[get_triggers, parameter[self]]: constant[ Retrieves all of the user's triggers that are set on the Weather Alert API. :returns: list of `pyowm.alertapi30.trigger.Trigger` objects ] <ast.Tuple object at 0x7da18f58e710> assign[=] call[name[self].http_client.get_json, parameter[name[TRIGGERS_URI]]] return[<ast.ListComp object at 0x7da18f58c0d0>]
keyword[def] identifier[get_triggers] ( identifier[self] ): literal[string] identifier[status] , identifier[data] = identifier[self] . identifier[http_client] . identifier[get_json] ( identifier[TRIGGERS_URI] , identifier[params] ={ literal[string] : identifier[self] . identifier[API_key] }, identifier[headers] ={ literal[string] : literal[string] }) keyword[return] [ identifier[self] . identifier[trigger_parser] . identifier[parse_dict] ( identifier[item] ) keyword[for] identifier[item] keyword[in] identifier[data] ]
def get_triggers(self): """ Retrieves all of the user's triggers that are set on the Weather Alert API. :returns: list of `pyowm.alertapi30.trigger.Trigger` objects """ (status, data) = self.http_client.get_json(TRIGGERS_URI, params={'appid': self.API_key}, headers={'Content-Type': 'application/json'}) return [self.trigger_parser.parse_dict(item) for item in data]
def _raise_server_index(self): """Round robin magic: Raises the current redis server index and returns it""" self._current_server_index = (self._current_server_index + 1) % len(self._servers) return self._current_server_index
def function[_raise_server_index, parameter[self]]: constant[Round robin magic: Raises the current redis server index and returns it] name[self]._current_server_index assign[=] binary_operation[binary_operation[name[self]._current_server_index + constant[1]] <ast.Mod object at 0x7da2590d6920> call[name[len], parameter[name[self]._servers]]] return[name[self]._current_server_index]
keyword[def] identifier[_raise_server_index] ( identifier[self] ): literal[string] identifier[self] . identifier[_current_server_index] =( identifier[self] . identifier[_current_server_index] + literal[int] )% identifier[len] ( identifier[self] . identifier[_servers] ) keyword[return] identifier[self] . identifier[_current_server_index]
def _raise_server_index(self): """Round robin magic: Raises the current redis server index and returns it""" self._current_server_index = (self._current_server_index + 1) % len(self._servers) return self._current_server_index
def try_storage(self, identifier, req, resp, resource, uri_kwargs): """Try to find user in configured user storage object. Args: identifier: User identifier. Returns: user object. """ if identifier is None: user = None # note: if user_storage is defined, always use it in order to # authenticate user. elif self.user_storage is not None: user = self.user_storage.get_user( self, identifier, req, resp, resource, uri_kwargs ) # note: some authentication middleware classes may not require # to be initialized with their own user_storage. In such # case this will always authenticate with "syntetic user" # if there is a valid indentity. elif self.user_storage is None and not self.only_with_storage: user = { 'identified_with': self, 'identifier': identifier } else: # pragma: nocover # note: this should not happen if the base class is properly # initialized. Still, user can skip super().__init__() call. user = None return user
def function[try_storage, parameter[self, identifier, req, resp, resource, uri_kwargs]]: constant[Try to find user in configured user storage object. Args: identifier: User identifier. Returns: user object. ] if compare[name[identifier] is constant[None]] begin[:] variable[user] assign[=] constant[None] return[name[user]]
keyword[def] identifier[try_storage] ( identifier[self] , identifier[identifier] , identifier[req] , identifier[resp] , identifier[resource] , identifier[uri_kwargs] ): literal[string] keyword[if] identifier[identifier] keyword[is] keyword[None] : identifier[user] = keyword[None] keyword[elif] identifier[self] . identifier[user_storage] keyword[is] keyword[not] keyword[None] : identifier[user] = identifier[self] . identifier[user_storage] . identifier[get_user] ( identifier[self] , identifier[identifier] , identifier[req] , identifier[resp] , identifier[resource] , identifier[uri_kwargs] ) keyword[elif] identifier[self] . identifier[user_storage] keyword[is] keyword[None] keyword[and] keyword[not] identifier[self] . identifier[only_with_storage] : identifier[user] ={ literal[string] : identifier[self] , literal[string] : identifier[identifier] } keyword[else] : identifier[user] = keyword[None] keyword[return] identifier[user]
def try_storage(self, identifier, req, resp, resource, uri_kwargs): """Try to find user in configured user storage object. Args: identifier: User identifier. Returns: user object. """ if identifier is None: user = None # depends on [control=['if'], data=[]] # note: if user_storage is defined, always use it in order to # authenticate user. elif self.user_storage is not None: user = self.user_storage.get_user(self, identifier, req, resp, resource, uri_kwargs) # depends on [control=['if'], data=[]] # note: some authentication middleware classes may not require # to be initialized with their own user_storage. In such # case this will always authenticate with "syntetic user" # if there is a valid indentity. elif self.user_storage is None and (not self.only_with_storage): user = {'identified_with': self, 'identifier': identifier} # depends on [control=['if'], data=[]] else: # pragma: nocover # note: this should not happen if the base class is properly # initialized. Still, user can skip super().__init__() call. user = None return user
def read(self, config_dir=None, clear=False, config_file=None): """ The munge Config's read function only allows to read from a config directory, but we also want to be able to read straight from a config file as well """ if config_file: data_file = os.path.basename(config_file) data_path = os.path.dirname(config_file) if clear: self.clear() config = munge.load_datafile(data_file, data_path, default=None) if not config: raise IOError("Config file not found: %s" % config_file) munge.util.recursive_update(self.data, config) self._meta_config_dir = data_path return else: return super(Config, self).read(config_dir=config_dir, clear=clear)
def function[read, parameter[self, config_dir, clear, config_file]]: constant[ The munge Config's read function only allows to read from a config directory, but we also want to be able to read straight from a config file as well ] if name[config_file] begin[:] variable[data_file] assign[=] call[name[os].path.basename, parameter[name[config_file]]] variable[data_path] assign[=] call[name[os].path.dirname, parameter[name[config_file]]] if name[clear] begin[:] call[name[self].clear, parameter[]] variable[config] assign[=] call[name[munge].load_datafile, parameter[name[data_file], name[data_path]]] if <ast.UnaryOp object at 0x7da1b0a2eb60> begin[:] <ast.Raise object at 0x7da1b0a2f550> call[name[munge].util.recursive_update, parameter[name[self].data, name[config]]] name[self]._meta_config_dir assign[=] name[data_path] return[None]
keyword[def] identifier[read] ( identifier[self] , identifier[config_dir] = keyword[None] , identifier[clear] = keyword[False] , identifier[config_file] = keyword[None] ): literal[string] keyword[if] identifier[config_file] : identifier[data_file] = identifier[os] . identifier[path] . identifier[basename] ( identifier[config_file] ) identifier[data_path] = identifier[os] . identifier[path] . identifier[dirname] ( identifier[config_file] ) keyword[if] identifier[clear] : identifier[self] . identifier[clear] () identifier[config] = identifier[munge] . identifier[load_datafile] ( identifier[data_file] , identifier[data_path] , identifier[default] = keyword[None] ) keyword[if] keyword[not] identifier[config] : keyword[raise] identifier[IOError] ( literal[string] % identifier[config_file] ) identifier[munge] . identifier[util] . identifier[recursive_update] ( identifier[self] . identifier[data] , identifier[config] ) identifier[self] . identifier[_meta_config_dir] = identifier[data_path] keyword[return] keyword[else] : keyword[return] identifier[super] ( identifier[Config] , identifier[self] ). identifier[read] ( identifier[config_dir] = identifier[config_dir] , identifier[clear] = identifier[clear] )
def read(self, config_dir=None, clear=False, config_file=None): """ The munge Config's read function only allows to read from a config directory, but we also want to be able to read straight from a config file as well """ if config_file: data_file = os.path.basename(config_file) data_path = os.path.dirname(config_file) if clear: self.clear() # depends on [control=['if'], data=[]] config = munge.load_datafile(data_file, data_path, default=None) if not config: raise IOError('Config file not found: %s' % config_file) # depends on [control=['if'], data=[]] munge.util.recursive_update(self.data, config) self._meta_config_dir = data_path return # depends on [control=['if'], data=[]] else: return super(Config, self).read(config_dir=config_dir, clear=clear)
def tree_to_stream(entries, write): """Write the give list of entries into a stream using its write method :param entries: **sorted** list of tuples with (binsha, mode, name) :param write: write method which takes a data string""" ord_zero = ord('0') bit_mask = 7 # 3 bits set for binsha, mode, name in entries: mode_str = b'' for i in xrange(6): mode_str = bchr(((mode >> (i * 3)) & bit_mask) + ord_zero) + mode_str # END for each 8 octal value # git slices away the first octal if its zero if byte_ord(mode_str[0]) == ord_zero: mode_str = mode_str[1:] # END save a byte # here it comes: if the name is actually unicode, the replacement below # will not work as the binsha is not part of the ascii unicode encoding - # hence we must convert to an utf8 string for it to work properly. # According to my tests, this is exactly what git does, that is it just # takes the input literally, which appears to be utf8 on linux. if isinstance(name, text_type): name = name.encode(defenc) write(b''.join((mode_str, b' ', name, b'\0', binsha)))
def function[tree_to_stream, parameter[entries, write]]: constant[Write the give list of entries into a stream using its write method :param entries: **sorted** list of tuples with (binsha, mode, name) :param write: write method which takes a data string] variable[ord_zero] assign[=] call[name[ord], parameter[constant[0]]] variable[bit_mask] assign[=] constant[7] for taget[tuple[[<ast.Name object at 0x7da1b1d5d8a0>, <ast.Name object at 0x7da1b1d5e800>, <ast.Name object at 0x7da1b1d5e0b0>]]] in starred[name[entries]] begin[:] variable[mode_str] assign[=] constant[b''] for taget[name[i]] in starred[call[name[xrange], parameter[constant[6]]]] begin[:] variable[mode_str] assign[=] binary_operation[call[name[bchr], parameter[binary_operation[binary_operation[binary_operation[name[mode] <ast.RShift object at 0x7da2590d6a40> binary_operation[name[i] * constant[3]]] <ast.BitAnd object at 0x7da2590d6b60> name[bit_mask]] + name[ord_zero]]]] + name[mode_str]] if compare[call[name[byte_ord], parameter[call[name[mode_str]][constant[0]]]] equal[==] name[ord_zero]] begin[:] variable[mode_str] assign[=] call[name[mode_str]][<ast.Slice object at 0x7da1b1dbf100>] if call[name[isinstance], parameter[name[name], name[text_type]]] begin[:] variable[name] assign[=] call[name[name].encode, parameter[name[defenc]]] call[name[write], parameter[call[constant[b''].join, parameter[tuple[[<ast.Name object at 0x7da1b1d5fa60>, <ast.Constant object at 0x7da1b1d5dd80>, <ast.Name object at 0x7da1b1d5e3e0>, <ast.Constant object at 0x7da1b1d5e440>, <ast.Name object at 0x7da1b1d5eb30>]]]]]]
keyword[def] identifier[tree_to_stream] ( identifier[entries] , identifier[write] ): literal[string] identifier[ord_zero] = identifier[ord] ( literal[string] ) identifier[bit_mask] = literal[int] keyword[for] identifier[binsha] , identifier[mode] , identifier[name] keyword[in] identifier[entries] : identifier[mode_str] = literal[string] keyword[for] identifier[i] keyword[in] identifier[xrange] ( literal[int] ): identifier[mode_str] = identifier[bchr] ((( identifier[mode] >>( identifier[i] * literal[int] ))& identifier[bit_mask] )+ identifier[ord_zero] )+ identifier[mode_str] keyword[if] identifier[byte_ord] ( identifier[mode_str] [ literal[int] ])== identifier[ord_zero] : identifier[mode_str] = identifier[mode_str] [ literal[int] :] keyword[if] identifier[isinstance] ( identifier[name] , identifier[text_type] ): identifier[name] = identifier[name] . identifier[encode] ( identifier[defenc] ) identifier[write] ( literal[string] . identifier[join] (( identifier[mode_str] , literal[string] , identifier[name] , literal[string] , identifier[binsha] )))
def tree_to_stream(entries, write): """Write the give list of entries into a stream using its write method :param entries: **sorted** list of tuples with (binsha, mode, name) :param write: write method which takes a data string""" ord_zero = ord('0') bit_mask = 7 # 3 bits set for (binsha, mode, name) in entries: mode_str = b'' for i in xrange(6): mode_str = bchr((mode >> i * 3 & bit_mask) + ord_zero) + mode_str # depends on [control=['for'], data=['i']] # END for each 8 octal value # git slices away the first octal if its zero if byte_ord(mode_str[0]) == ord_zero: mode_str = mode_str[1:] # depends on [control=['if'], data=[]] # END save a byte # here it comes: if the name is actually unicode, the replacement below # will not work as the binsha is not part of the ascii unicode encoding - # hence we must convert to an utf8 string for it to work properly. # According to my tests, this is exactly what git does, that is it just # takes the input literally, which appears to be utf8 on linux. if isinstance(name, text_type): name = name.encode(defenc) # depends on [control=['if'], data=[]] write(b''.join((mode_str, b' ', name, b'\x00', binsha))) # depends on [control=['for'], data=[]]
def build_job(self, jenkins_server): """ This function makes an API call to Jenkins to trigger a build for 'job_name' It returned a dict with 2 keys : body and headers. headers contains also a dict-like object which can be queried to get the location to poll in the queue. :param jenkins_server: The jenkins server where the job should be triggered :return: Dict containing the response body (key body) and the headers coming along (headers) """ # Warning if the parameter is too long, the URL can be longer than # the maximum allowed size if self.parameters and isinstance(self.parameters, six.string_types): import ast self.parameters = ast.literal_eval(self.parameters) if not self.parameters: # We need a None to call the non parametrized jenkins api end point self.parameters = None request = Request(jenkins_server.build_job_url(self.job_name, self.parameters, None)) return jenkins_request_with_headers(jenkins_server, request)
def function[build_job, parameter[self, jenkins_server]]: constant[ This function makes an API call to Jenkins to trigger a build for 'job_name' It returned a dict with 2 keys : body and headers. headers contains also a dict-like object which can be queried to get the location to poll in the queue. :param jenkins_server: The jenkins server where the job should be triggered :return: Dict containing the response body (key body) and the headers coming along (headers) ] if <ast.BoolOp object at 0x7da20e9600a0> begin[:] import module[ast] name[self].parameters assign[=] call[name[ast].literal_eval, parameter[name[self].parameters]] if <ast.UnaryOp object at 0x7da2054a5c30> begin[:] name[self].parameters assign[=] constant[None] variable[request] assign[=] call[name[Request], parameter[call[name[jenkins_server].build_job_url, parameter[name[self].job_name, name[self].parameters, constant[None]]]]] return[call[name[jenkins_request_with_headers], parameter[name[jenkins_server], name[request]]]]
keyword[def] identifier[build_job] ( identifier[self] , identifier[jenkins_server] ): literal[string] keyword[if] identifier[self] . identifier[parameters] keyword[and] identifier[isinstance] ( identifier[self] . identifier[parameters] , identifier[six] . identifier[string_types] ): keyword[import] identifier[ast] identifier[self] . identifier[parameters] = identifier[ast] . identifier[literal_eval] ( identifier[self] . identifier[parameters] ) keyword[if] keyword[not] identifier[self] . identifier[parameters] : identifier[self] . identifier[parameters] = keyword[None] identifier[request] = identifier[Request] ( identifier[jenkins_server] . identifier[build_job_url] ( identifier[self] . identifier[job_name] , identifier[self] . identifier[parameters] , keyword[None] )) keyword[return] identifier[jenkins_request_with_headers] ( identifier[jenkins_server] , identifier[request] )
def build_job(self, jenkins_server): """ This function makes an API call to Jenkins to trigger a build for 'job_name' It returned a dict with 2 keys : body and headers. headers contains also a dict-like object which can be queried to get the location to poll in the queue. :param jenkins_server: The jenkins server where the job should be triggered :return: Dict containing the response body (key body) and the headers coming along (headers) """ # Warning if the parameter is too long, the URL can be longer than # the maximum allowed size if self.parameters and isinstance(self.parameters, six.string_types): import ast self.parameters = ast.literal_eval(self.parameters) # depends on [control=['if'], data=[]] if not self.parameters: # We need a None to call the non parametrized jenkins api end point self.parameters = None # depends on [control=['if'], data=[]] request = Request(jenkins_server.build_job_url(self.job_name, self.parameters, None)) return jenkins_request_with_headers(jenkins_server, request)
def list_packet_names(self): """ Returns the existing packet names. :rtype: ~collections.Iterable[str] """ # Server does not do pagination on listings of this resource. # Return an iterator anyway for similarity with other API methods path = '/archive/{}/packet-names'.format(self._instance) response = self._client.get_proto(path=path) message = archive_pb2.GetPacketNamesResponse() message.ParseFromString(response.content) names = getattr(message, 'name') return iter(names)
def function[list_packet_names, parameter[self]]: constant[ Returns the existing packet names. :rtype: ~collections.Iterable[str] ] variable[path] assign[=] call[constant[/archive/{}/packet-names].format, parameter[name[self]._instance]] variable[response] assign[=] call[name[self]._client.get_proto, parameter[]] variable[message] assign[=] call[name[archive_pb2].GetPacketNamesResponse, parameter[]] call[name[message].ParseFromString, parameter[name[response].content]] variable[names] assign[=] call[name[getattr], parameter[name[message], constant[name]]] return[call[name[iter], parameter[name[names]]]]
keyword[def] identifier[list_packet_names] ( identifier[self] ): literal[string] identifier[path] = literal[string] . identifier[format] ( identifier[self] . identifier[_instance] ) identifier[response] = identifier[self] . identifier[_client] . identifier[get_proto] ( identifier[path] = identifier[path] ) identifier[message] = identifier[archive_pb2] . identifier[GetPacketNamesResponse] () identifier[message] . identifier[ParseFromString] ( identifier[response] . identifier[content] ) identifier[names] = identifier[getattr] ( identifier[message] , literal[string] ) keyword[return] identifier[iter] ( identifier[names] )
def list_packet_names(self): """ Returns the existing packet names. :rtype: ~collections.Iterable[str] """ # Server does not do pagination on listings of this resource. # Return an iterator anyway for similarity with other API methods path = '/archive/{}/packet-names'.format(self._instance) response = self._client.get_proto(path=path) message = archive_pb2.GetPacketNamesResponse() message.ParseFromString(response.content) names = getattr(message, 'name') return iter(names)
def find_sources_in_image(self, filename, hdu_index=0, outfile=None, rms=None, bkg=None, max_summits=None, innerclip=5, outerclip=4, cores=None, rmsin=None, bkgin=None, beam=None, doislandflux=False, nopositive=False, nonegative=False, mask=None, lat=None, imgpsf=None, blank=False, docov=True, cube_index=None): """ Run the Aegean source finder. Parameters ---------- filename : str or HDUList Image filename or HDUList. hdu_index : int The index of the FITS HDU (extension). outfile : str file for printing catalog (NOT a table, just a text file of my own design) rms : float Use this rms for the entire image (will also assume that background is 0) max_summits : int Fit up to this many components to each island (extras are included but not fit) innerclip, outerclip : float The seed (inner) and flood (outer) clipping level (sigmas). cores : int Number of CPU cores to use. None means all cores. rmsin, bkgin : str or HDUList Filename or HDUList for the noise and background images. If either are None, then it will be calculated internally. beam : (major, minor, pa) Floats representing the synthesised beam (degrees). Replaces whatever is given in the FITS header. If the FITS header has no BMAJ/BMIN then this is required. doislandflux : bool If True then each island will also be characterized. nopositive, nonegative : bool Whether to return positive or negative sources. Default nopositive=False, nonegative=True. mask : str The filename of a region file created by MIMAS. Islands outside of this region will be ignored. lat : float The latitude of the telescope (declination of zenith). imgpsf : str or HDUList Filename or HDUList for a psf image. blank : bool Cause the output image to be blanked where islands are found. docov : bool If True then include covariance matrix in the fitting process. (default=True) cube_index : int For image cubes, cube_index determines which slice is used. Returns ------- sources : list List of sources found. """ # Tell numpy to be quiet np.seterr(invalid='ignore') if cores is not None: if not (cores >= 1): raise AssertionError("cores must be one or more") self.load_globals(filename, hdu_index=hdu_index, bkgin=bkgin, rmsin=rmsin, beam=beam, rms=rms, bkg=bkg, cores=cores, verb=True, mask=mask, lat=lat, psf=imgpsf, blank=blank, docov=docov, cube_index=cube_index) global_data = self.global_data rmsimg = global_data.rmsimg data = global_data.data_pix self.log.info("beam = {0:5.2f}'' x {1:5.2f}'' at {2:5.2f}deg".format( global_data.beam.a * 3600, global_data.beam.b * 3600, global_data.beam.pa)) # stop people from doing silly things. if outerclip > innerclip: outerclip = innerclip self.log.info("seedclip={0}".format(innerclip)) self.log.info("floodclip={0}".format(outerclip)) isle_num = 0 if cores == 1: # single-threaded, no parallel processing queue = [] else: queue = pprocess.Queue(limit=cores, reuse=1) fit_parallel = queue.manage(pprocess.MakeReusable(self._fit_islands)) island_group = [] group_size = 20 for i, xmin, xmax, ymin, ymax in self._gen_flood_wrap(data, rmsimg, innerclip, outerclip, domask=True): # ignore empty islands # This should now be impossible to trigger if np.size(i) < 1: self.log.warn("Empty island detected, this should be imposisble.") continue isle_num += 1 scalars = (innerclip, outerclip, max_summits) offsets = (xmin, xmax, ymin, ymax) island_data = IslandFittingData(isle_num, i, scalars, offsets, doislandflux) # If cores==1 run fitting in main process. Otherwise build up groups of islands # and submit to queue for subprocesses. Passing a group of islands is more # efficient than passing single islands to the subprocesses. if cores == 1: res = self._fit_island(island_data) queue.append(res) else: island_group.append(island_data) # If the island group is full queue it for the subprocesses to fit if len(island_group) >= group_size: fit_parallel(island_group) island_group = [] # The last partially-filled island group also needs to be queued for fitting if len(island_group) > 0: fit_parallel(island_group) # Write the output to the output file if outfile: print(header.format("{0}-({1})".format(__version__, __date__), filename), file=outfile) print(OutputSource.header, file=outfile) sources = [] for srcs in queue: if srcs: # ignore empty lists for src in srcs: # ignore sources that we have been told to ignore if (src.peak_flux > 0 and nopositive) or (src.peak_flux < 0 and nonegative): continue sources.append(src) if outfile: print(str(src), file=outfile) self.sources.extend(sources) return sources
def function[find_sources_in_image, parameter[self, filename, hdu_index, outfile, rms, bkg, max_summits, innerclip, outerclip, cores, rmsin, bkgin, beam, doislandflux, nopositive, nonegative, mask, lat, imgpsf, blank, docov, cube_index]]: constant[ Run the Aegean source finder. Parameters ---------- filename : str or HDUList Image filename or HDUList. hdu_index : int The index of the FITS HDU (extension). outfile : str file for printing catalog (NOT a table, just a text file of my own design) rms : float Use this rms for the entire image (will also assume that background is 0) max_summits : int Fit up to this many components to each island (extras are included but not fit) innerclip, outerclip : float The seed (inner) and flood (outer) clipping level (sigmas). cores : int Number of CPU cores to use. None means all cores. rmsin, bkgin : str or HDUList Filename or HDUList for the noise and background images. If either are None, then it will be calculated internally. beam : (major, minor, pa) Floats representing the synthesised beam (degrees). Replaces whatever is given in the FITS header. If the FITS header has no BMAJ/BMIN then this is required. doislandflux : bool If True then each island will also be characterized. nopositive, nonegative : bool Whether to return positive or negative sources. Default nopositive=False, nonegative=True. mask : str The filename of a region file created by MIMAS. Islands outside of this region will be ignored. lat : float The latitude of the telescope (declination of zenith). imgpsf : str or HDUList Filename or HDUList for a psf image. blank : bool Cause the output image to be blanked where islands are found. docov : bool If True then include covariance matrix in the fitting process. (default=True) cube_index : int For image cubes, cube_index determines which slice is used. Returns ------- sources : list List of sources found. ] call[name[np].seterr, parameter[]] if compare[name[cores] is_not constant[None]] begin[:] if <ast.UnaryOp object at 0x7da20e955090> begin[:] <ast.Raise object at 0x7da20e9545e0> call[name[self].load_globals, parameter[name[filename]]] variable[global_data] assign[=] name[self].global_data variable[rmsimg] assign[=] name[global_data].rmsimg variable[data] assign[=] name[global_data].data_pix call[name[self].log.info, parameter[call[constant[beam = {0:5.2f}'' x {1:5.2f}'' at {2:5.2f}deg].format, parameter[binary_operation[name[global_data].beam.a * constant[3600]], binary_operation[name[global_data].beam.b * constant[3600]], name[global_data].beam.pa]]]] if compare[name[outerclip] greater[>] name[innerclip]] begin[:] variable[outerclip] assign[=] name[innerclip] call[name[self].log.info, parameter[call[constant[seedclip={0}].format, parameter[name[innerclip]]]]] call[name[self].log.info, parameter[call[constant[floodclip={0}].format, parameter[name[outerclip]]]]] variable[isle_num] assign[=] constant[0] if compare[name[cores] equal[==] constant[1]] begin[:] variable[queue] assign[=] list[[]] variable[island_group] assign[=] list[[]] variable[group_size] assign[=] constant[20] for taget[tuple[[<ast.Name object at 0x7da20c993130>, <ast.Name object at 0x7da20c992710>, <ast.Name object at 0x7da20c992050>, <ast.Name object at 0x7da20c9915a0>, <ast.Name object at 0x7da20c990640>]]] in starred[call[name[self]._gen_flood_wrap, parameter[name[data], name[rmsimg], name[innerclip], name[outerclip]]]] begin[:] if compare[call[name[np].size, parameter[name[i]]] less[<] constant[1]] begin[:] call[name[self].log.warn, parameter[constant[Empty island detected, this should be imposisble.]]] continue <ast.AugAssign object at 0x7da20c9926e0> variable[scalars] assign[=] tuple[[<ast.Name object at 0x7da20c991750>, <ast.Name object at 0x7da20c993850>, <ast.Name object at 0x7da20c9939d0>]] variable[offsets] assign[=] tuple[[<ast.Name object at 0x7da20c993880>, <ast.Name object at 0x7da20c991480>, <ast.Name object at 0x7da20c9908e0>, <ast.Name object at 0x7da20c993640>]] variable[island_data] assign[=] call[name[IslandFittingData], parameter[name[isle_num], name[i], name[scalars], name[offsets], name[doislandflux]]] if compare[name[cores] equal[==] constant[1]] begin[:] variable[res] assign[=] call[name[self]._fit_island, parameter[name[island_data]]] call[name[queue].append, parameter[name[res]]] if compare[call[name[len], parameter[name[island_group]]] greater[>] constant[0]] begin[:] call[name[fit_parallel], parameter[name[island_group]]] if name[outfile] begin[:] call[name[print], parameter[call[name[header].format, parameter[call[constant[{0}-({1})].format, parameter[name[__version__], name[__date__]]], name[filename]]]]] call[name[print], parameter[name[OutputSource].header]] variable[sources] assign[=] list[[]] for taget[name[srcs]] in starred[name[queue]] begin[:] if name[srcs] begin[:] for taget[name[src]] in starred[name[srcs]] begin[:] if <ast.BoolOp object at 0x7da20c991f90> begin[:] continue call[name[sources].append, parameter[name[src]]] if name[outfile] begin[:] call[name[print], parameter[call[name[str], parameter[name[src]]]]] call[name[self].sources.extend, parameter[name[sources]]] return[name[sources]]
keyword[def] identifier[find_sources_in_image] ( identifier[self] , identifier[filename] , identifier[hdu_index] = literal[int] , identifier[outfile] = keyword[None] , identifier[rms] = keyword[None] , identifier[bkg] = keyword[None] , identifier[max_summits] = keyword[None] , identifier[innerclip] = literal[int] , identifier[outerclip] = literal[int] , identifier[cores] = keyword[None] , identifier[rmsin] = keyword[None] , identifier[bkgin] = keyword[None] , identifier[beam] = keyword[None] , identifier[doislandflux] = keyword[False] , identifier[nopositive] = keyword[False] , identifier[nonegative] = keyword[False] , identifier[mask] = keyword[None] , identifier[lat] = keyword[None] , identifier[imgpsf] = keyword[None] , identifier[blank] = keyword[False] , identifier[docov] = keyword[True] , identifier[cube_index] = keyword[None] ): literal[string] identifier[np] . identifier[seterr] ( identifier[invalid] = literal[string] ) keyword[if] identifier[cores] keyword[is] keyword[not] keyword[None] : keyword[if] keyword[not] ( identifier[cores] >= literal[int] ): keyword[raise] identifier[AssertionError] ( literal[string] ) identifier[self] . identifier[load_globals] ( identifier[filename] , identifier[hdu_index] = identifier[hdu_index] , identifier[bkgin] = identifier[bkgin] , identifier[rmsin] = identifier[rmsin] , identifier[beam] = identifier[beam] , identifier[rms] = identifier[rms] , identifier[bkg] = identifier[bkg] , identifier[cores] = identifier[cores] , identifier[verb] = keyword[True] , identifier[mask] = identifier[mask] , identifier[lat] = identifier[lat] , identifier[psf] = identifier[imgpsf] , identifier[blank] = identifier[blank] , identifier[docov] = identifier[docov] , identifier[cube_index] = identifier[cube_index] ) identifier[global_data] = identifier[self] . identifier[global_data] identifier[rmsimg] = identifier[global_data] . identifier[rmsimg] identifier[data] = identifier[global_data] . identifier[data_pix] identifier[self] . identifier[log] . identifier[info] ( literal[string] . identifier[format] ( identifier[global_data] . identifier[beam] . identifier[a] * literal[int] , identifier[global_data] . identifier[beam] . identifier[b] * literal[int] , identifier[global_data] . identifier[beam] . identifier[pa] )) keyword[if] identifier[outerclip] > identifier[innerclip] : identifier[outerclip] = identifier[innerclip] identifier[self] . identifier[log] . identifier[info] ( literal[string] . identifier[format] ( identifier[innerclip] )) identifier[self] . identifier[log] . identifier[info] ( literal[string] . identifier[format] ( identifier[outerclip] )) identifier[isle_num] = literal[int] keyword[if] identifier[cores] == literal[int] : identifier[queue] =[] keyword[else] : identifier[queue] = identifier[pprocess] . identifier[Queue] ( identifier[limit] = identifier[cores] , identifier[reuse] = literal[int] ) identifier[fit_parallel] = identifier[queue] . identifier[manage] ( identifier[pprocess] . identifier[MakeReusable] ( identifier[self] . identifier[_fit_islands] )) identifier[island_group] =[] identifier[group_size] = literal[int] keyword[for] identifier[i] , identifier[xmin] , identifier[xmax] , identifier[ymin] , identifier[ymax] keyword[in] identifier[self] . identifier[_gen_flood_wrap] ( identifier[data] , identifier[rmsimg] , identifier[innerclip] , identifier[outerclip] , identifier[domask] = keyword[True] ): keyword[if] identifier[np] . identifier[size] ( identifier[i] )< literal[int] : identifier[self] . identifier[log] . identifier[warn] ( literal[string] ) keyword[continue] identifier[isle_num] += literal[int] identifier[scalars] =( identifier[innerclip] , identifier[outerclip] , identifier[max_summits] ) identifier[offsets] =( identifier[xmin] , identifier[xmax] , identifier[ymin] , identifier[ymax] ) identifier[island_data] = identifier[IslandFittingData] ( identifier[isle_num] , identifier[i] , identifier[scalars] , identifier[offsets] , identifier[doislandflux] ) keyword[if] identifier[cores] == literal[int] : identifier[res] = identifier[self] . identifier[_fit_island] ( identifier[island_data] ) identifier[queue] . identifier[append] ( identifier[res] ) keyword[else] : identifier[island_group] . identifier[append] ( identifier[island_data] ) keyword[if] identifier[len] ( identifier[island_group] )>= identifier[group_size] : identifier[fit_parallel] ( identifier[island_group] ) identifier[island_group] =[] keyword[if] identifier[len] ( identifier[island_group] )> literal[int] : identifier[fit_parallel] ( identifier[island_group] ) keyword[if] identifier[outfile] : identifier[print] ( identifier[header] . identifier[format] ( literal[string] . identifier[format] ( identifier[__version__] , identifier[__date__] ), identifier[filename] ), identifier[file] = identifier[outfile] ) identifier[print] ( identifier[OutputSource] . identifier[header] , identifier[file] = identifier[outfile] ) identifier[sources] =[] keyword[for] identifier[srcs] keyword[in] identifier[queue] : keyword[if] identifier[srcs] : keyword[for] identifier[src] keyword[in] identifier[srcs] : keyword[if] ( identifier[src] . identifier[peak_flux] > literal[int] keyword[and] identifier[nopositive] ) keyword[or] ( identifier[src] . identifier[peak_flux] < literal[int] keyword[and] identifier[nonegative] ): keyword[continue] identifier[sources] . identifier[append] ( identifier[src] ) keyword[if] identifier[outfile] : identifier[print] ( identifier[str] ( identifier[src] ), identifier[file] = identifier[outfile] ) identifier[self] . identifier[sources] . identifier[extend] ( identifier[sources] ) keyword[return] identifier[sources]
def find_sources_in_image(self, filename, hdu_index=0, outfile=None, rms=None, bkg=None, max_summits=None, innerclip=5, outerclip=4, cores=None, rmsin=None, bkgin=None, beam=None, doislandflux=False, nopositive=False, nonegative=False, mask=None, lat=None, imgpsf=None, blank=False, docov=True, cube_index=None): """ Run the Aegean source finder. Parameters ---------- filename : str or HDUList Image filename or HDUList. hdu_index : int The index of the FITS HDU (extension). outfile : str file for printing catalog (NOT a table, just a text file of my own design) rms : float Use this rms for the entire image (will also assume that background is 0) max_summits : int Fit up to this many components to each island (extras are included but not fit) innerclip, outerclip : float The seed (inner) and flood (outer) clipping level (sigmas). cores : int Number of CPU cores to use. None means all cores. rmsin, bkgin : str or HDUList Filename or HDUList for the noise and background images. If either are None, then it will be calculated internally. beam : (major, minor, pa) Floats representing the synthesised beam (degrees). Replaces whatever is given in the FITS header. If the FITS header has no BMAJ/BMIN then this is required. doislandflux : bool If True then each island will also be characterized. nopositive, nonegative : bool Whether to return positive or negative sources. Default nopositive=False, nonegative=True. mask : str The filename of a region file created by MIMAS. Islands outside of this region will be ignored. lat : float The latitude of the telescope (declination of zenith). imgpsf : str or HDUList Filename or HDUList for a psf image. blank : bool Cause the output image to be blanked where islands are found. docov : bool If True then include covariance matrix in the fitting process. (default=True) cube_index : int For image cubes, cube_index determines which slice is used. Returns ------- sources : list List of sources found. """ # Tell numpy to be quiet np.seterr(invalid='ignore') if cores is not None: if not cores >= 1: raise AssertionError('cores must be one or more') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['cores']] self.load_globals(filename, hdu_index=hdu_index, bkgin=bkgin, rmsin=rmsin, beam=beam, rms=rms, bkg=bkg, cores=cores, verb=True, mask=mask, lat=lat, psf=imgpsf, blank=blank, docov=docov, cube_index=cube_index) global_data = self.global_data rmsimg = global_data.rmsimg data = global_data.data_pix self.log.info("beam = {0:5.2f}'' x {1:5.2f}'' at {2:5.2f}deg".format(global_data.beam.a * 3600, global_data.beam.b * 3600, global_data.beam.pa)) # stop people from doing silly things. if outerclip > innerclip: outerclip = innerclip # depends on [control=['if'], data=['outerclip', 'innerclip']] self.log.info('seedclip={0}'.format(innerclip)) self.log.info('floodclip={0}'.format(outerclip)) isle_num = 0 if cores == 1: # single-threaded, no parallel processing queue = [] # depends on [control=['if'], data=[]] else: queue = pprocess.Queue(limit=cores, reuse=1) fit_parallel = queue.manage(pprocess.MakeReusable(self._fit_islands)) island_group = [] group_size = 20 for (i, xmin, xmax, ymin, ymax) in self._gen_flood_wrap(data, rmsimg, innerclip, outerclip, domask=True): # ignore empty islands # This should now be impossible to trigger if np.size(i) < 1: self.log.warn('Empty island detected, this should be imposisble.') continue # depends on [control=['if'], data=[]] isle_num += 1 scalars = (innerclip, outerclip, max_summits) offsets = (xmin, xmax, ymin, ymax) island_data = IslandFittingData(isle_num, i, scalars, offsets, doislandflux) # If cores==1 run fitting in main process. Otherwise build up groups of islands # and submit to queue for subprocesses. Passing a group of islands is more # efficient than passing single islands to the subprocesses. if cores == 1: res = self._fit_island(island_data) queue.append(res) # depends on [control=['if'], data=[]] else: island_group.append(island_data) # If the island group is full queue it for the subprocesses to fit if len(island_group) >= group_size: fit_parallel(island_group) island_group = [] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # The last partially-filled island group also needs to be queued for fitting if len(island_group) > 0: fit_parallel(island_group) # depends on [control=['if'], data=[]] # Write the output to the output file if outfile: print(header.format('{0}-({1})'.format(__version__, __date__), filename), file=outfile) print(OutputSource.header, file=outfile) # depends on [control=['if'], data=[]] sources = [] for srcs in queue: if srcs: # ignore empty lists for src in srcs: # ignore sources that we have been told to ignore if src.peak_flux > 0 and nopositive or (src.peak_flux < 0 and nonegative): continue # depends on [control=['if'], data=[]] sources.append(src) if outfile: print(str(src), file=outfile) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['src']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['srcs']] self.sources.extend(sources) return sources
def _load_folder(folder_entry, corpus): """ Load the given subfolder into the corpus (e.g. bed, one, ...) """ for wav_path in glob.glob(os.path.join(folder_entry.path, '*.wav')): wav_name = os.path.basename(wav_path) basename, __ = os.path.splitext(wav_name) command = folder_entry.name file_idx = '{}_{}'.format(basename, command) issuer_idx = str(basename).split('_', maxsplit=1)[0] corpus.new_file(wav_path, file_idx) if issuer_idx not in corpus.issuers.keys(): corpus.import_issuers(issuers.Speaker( issuer_idx )) utt = corpus.new_utterance(file_idx, file_idx, issuer_idx) labels = annotations.LabelList.create_single(command, idx=audiomate.corpus.LL_WORD_TRANSCRIPT) utt.set_label_list(labels)
def function[_load_folder, parameter[folder_entry, corpus]]: constant[ Load the given subfolder into the corpus (e.g. bed, one, ...) ] for taget[name[wav_path]] in starred[call[name[glob].glob, parameter[call[name[os].path.join, parameter[name[folder_entry].path, constant[*.wav]]]]]] begin[:] variable[wav_name] assign[=] call[name[os].path.basename, parameter[name[wav_path]]] <ast.Tuple object at 0x7da1b0b1e9e0> assign[=] call[name[os].path.splitext, parameter[name[wav_name]]] variable[command] assign[=] name[folder_entry].name variable[file_idx] assign[=] call[constant[{}_{}].format, parameter[name[basename], name[command]]] variable[issuer_idx] assign[=] call[call[call[name[str], parameter[name[basename]]].split, parameter[constant[_]]]][constant[0]] call[name[corpus].new_file, parameter[name[wav_path], name[file_idx]]] if compare[name[issuer_idx] <ast.NotIn object at 0x7da2590d7190> call[name[corpus].issuers.keys, parameter[]]] begin[:] call[name[corpus].import_issuers, parameter[call[name[issuers].Speaker, parameter[name[issuer_idx]]]]] variable[utt] assign[=] call[name[corpus].new_utterance, parameter[name[file_idx], name[file_idx], name[issuer_idx]]] variable[labels] assign[=] call[name[annotations].LabelList.create_single, parameter[name[command]]] call[name[utt].set_label_list, parameter[name[labels]]]
keyword[def] identifier[_load_folder] ( identifier[folder_entry] , identifier[corpus] ): literal[string] keyword[for] identifier[wav_path] keyword[in] identifier[glob] . identifier[glob] ( identifier[os] . identifier[path] . identifier[join] ( identifier[folder_entry] . identifier[path] , literal[string] )): identifier[wav_name] = identifier[os] . identifier[path] . identifier[basename] ( identifier[wav_path] ) identifier[basename] , identifier[__] = identifier[os] . identifier[path] . identifier[splitext] ( identifier[wav_name] ) identifier[command] = identifier[folder_entry] . identifier[name] identifier[file_idx] = literal[string] . identifier[format] ( identifier[basename] , identifier[command] ) identifier[issuer_idx] = identifier[str] ( identifier[basename] ). identifier[split] ( literal[string] , identifier[maxsplit] = literal[int] )[ literal[int] ] identifier[corpus] . identifier[new_file] ( identifier[wav_path] , identifier[file_idx] ) keyword[if] identifier[issuer_idx] keyword[not] keyword[in] identifier[corpus] . identifier[issuers] . identifier[keys] (): identifier[corpus] . identifier[import_issuers] ( identifier[issuers] . identifier[Speaker] ( identifier[issuer_idx] )) identifier[utt] = identifier[corpus] . identifier[new_utterance] ( identifier[file_idx] , identifier[file_idx] , identifier[issuer_idx] ) identifier[labels] = identifier[annotations] . identifier[LabelList] . identifier[create_single] ( identifier[command] , identifier[idx] = identifier[audiomate] . identifier[corpus] . identifier[LL_WORD_TRANSCRIPT] ) identifier[utt] . identifier[set_label_list] ( identifier[labels] )
def _load_folder(folder_entry, corpus): """ Load the given subfolder into the corpus (e.g. bed, one, ...) """ for wav_path in glob.glob(os.path.join(folder_entry.path, '*.wav')): wav_name = os.path.basename(wav_path) (basename, __) = os.path.splitext(wav_name) command = folder_entry.name file_idx = '{}_{}'.format(basename, command) issuer_idx = str(basename).split('_', maxsplit=1)[0] corpus.new_file(wav_path, file_idx) if issuer_idx not in corpus.issuers.keys(): corpus.import_issuers(issuers.Speaker(issuer_idx)) # depends on [control=['if'], data=['issuer_idx']] utt = corpus.new_utterance(file_idx, file_idx, issuer_idx) labels = annotations.LabelList.create_single(command, idx=audiomate.corpus.LL_WORD_TRANSCRIPT) utt.set_label_list(labels) # depends on [control=['for'], data=['wav_path']]
def plot_cumulative_returns_by_quantile(quantile_returns, period, freq, ax=None): """ Plots the cumulative returns of various factor quantiles. Parameters ---------- quantile_returns : pd.DataFrame Returns by factor quantile period: pandas.Timedelta or string Length of period for which the returns are computed (e.g. 1 day) if 'period' is a string it must follow pandas.Timedelta constructor format (e.g. '1 days', '1D', '30m', '3h', '1D1h', etc) freq : pandas DateOffset Used to specify a particular trading calendar e.g. BusinessDay or Day Usually this is inferred from utils.infer_trading_calendar, which is called by either get_clean_factor_and_forward_returns or compute_forward_returns ax : matplotlib.Axes, optional Axes upon which to plot. Returns ------- ax : matplotlib.Axes """ if ax is None: f, ax = plt.subplots(1, 1, figsize=(18, 6)) ret_wide = quantile_returns.unstack('factor_quantile') cum_ret = ret_wide.apply(perf.cumulative_returns, period=period, freq=freq) cum_ret = cum_ret.loc[:, ::-1] # we want negative quantiles as 'red' cum_ret.plot(lw=2, ax=ax, cmap=cm.coolwarm) ax.legend() ymin, ymax = cum_ret.min().min(), cum_ret.max().max() ax.set(ylabel='Log Cumulative Returns', title='''Cumulative Return by Quantile ({} Period Forward Return)'''.format(period), xlabel='', yscale='symlog', yticks=np.linspace(ymin, ymax, 5), ylim=(ymin, ymax)) ax.yaxis.set_major_formatter(ScalarFormatter()) ax.axhline(1.0, linestyle='-', color='black', lw=1) return ax
def function[plot_cumulative_returns_by_quantile, parameter[quantile_returns, period, freq, ax]]: constant[ Plots the cumulative returns of various factor quantiles. Parameters ---------- quantile_returns : pd.DataFrame Returns by factor quantile period: pandas.Timedelta or string Length of period for which the returns are computed (e.g. 1 day) if 'period' is a string it must follow pandas.Timedelta constructor format (e.g. '1 days', '1D', '30m', '3h', '1D1h', etc) freq : pandas DateOffset Used to specify a particular trading calendar e.g. BusinessDay or Day Usually this is inferred from utils.infer_trading_calendar, which is called by either get_clean_factor_and_forward_returns or compute_forward_returns ax : matplotlib.Axes, optional Axes upon which to plot. Returns ------- ax : matplotlib.Axes ] if compare[name[ax] is constant[None]] begin[:] <ast.Tuple object at 0x7da20cabe080> assign[=] call[name[plt].subplots, parameter[constant[1], constant[1]]] variable[ret_wide] assign[=] call[name[quantile_returns].unstack, parameter[constant[factor_quantile]]] variable[cum_ret] assign[=] call[name[ret_wide].apply, parameter[name[perf].cumulative_returns]] variable[cum_ret] assign[=] call[name[cum_ret].loc][tuple[[<ast.Slice object at 0x7da20cabdf00>, <ast.Slice object at 0x7da20cabe7d0>]]] call[name[cum_ret].plot, parameter[]] call[name[ax].legend, parameter[]] <ast.Tuple object at 0x7da20cabc4f0> assign[=] tuple[[<ast.Call object at 0x7da20cabc700>, <ast.Call object at 0x7da20cabcbe0>]] call[name[ax].set, parameter[]] call[name[ax].yaxis.set_major_formatter, parameter[call[name[ScalarFormatter], parameter[]]]] call[name[ax].axhline, parameter[constant[1.0]]] return[name[ax]]
keyword[def] identifier[plot_cumulative_returns_by_quantile] ( identifier[quantile_returns] , identifier[period] , identifier[freq] , identifier[ax] = keyword[None] ): literal[string] keyword[if] identifier[ax] keyword[is] keyword[None] : identifier[f] , identifier[ax] = identifier[plt] . identifier[subplots] ( literal[int] , literal[int] , identifier[figsize] =( literal[int] , literal[int] )) identifier[ret_wide] = identifier[quantile_returns] . identifier[unstack] ( literal[string] ) identifier[cum_ret] = identifier[ret_wide] . identifier[apply] ( identifier[perf] . identifier[cumulative_returns] , identifier[period] = identifier[period] , identifier[freq] = identifier[freq] ) identifier[cum_ret] = identifier[cum_ret] . identifier[loc] [:,::- literal[int] ] identifier[cum_ret] . identifier[plot] ( identifier[lw] = literal[int] , identifier[ax] = identifier[ax] , identifier[cmap] = identifier[cm] . identifier[coolwarm] ) identifier[ax] . identifier[legend] () identifier[ymin] , identifier[ymax] = identifier[cum_ret] . identifier[min] (). identifier[min] (), identifier[cum_ret] . identifier[max] (). identifier[max] () identifier[ax] . identifier[set] ( identifier[ylabel] = literal[string] , identifier[title] = literal[string] . identifier[format] ( identifier[period] ), identifier[xlabel] = literal[string] , identifier[yscale] = literal[string] , identifier[yticks] = identifier[np] . identifier[linspace] ( identifier[ymin] , identifier[ymax] , literal[int] ), identifier[ylim] =( identifier[ymin] , identifier[ymax] )) identifier[ax] . identifier[yaxis] . identifier[set_major_formatter] ( identifier[ScalarFormatter] ()) identifier[ax] . identifier[axhline] ( literal[int] , identifier[linestyle] = literal[string] , identifier[color] = literal[string] , identifier[lw] = literal[int] ) keyword[return] identifier[ax]
def plot_cumulative_returns_by_quantile(quantile_returns, period, freq, ax=None): """ Plots the cumulative returns of various factor quantiles. Parameters ---------- quantile_returns : pd.DataFrame Returns by factor quantile period: pandas.Timedelta or string Length of period for which the returns are computed (e.g. 1 day) if 'period' is a string it must follow pandas.Timedelta constructor format (e.g. '1 days', '1D', '30m', '3h', '1D1h', etc) freq : pandas DateOffset Used to specify a particular trading calendar e.g. BusinessDay or Day Usually this is inferred from utils.infer_trading_calendar, which is called by either get_clean_factor_and_forward_returns or compute_forward_returns ax : matplotlib.Axes, optional Axes upon which to plot. Returns ------- ax : matplotlib.Axes """ if ax is None: (f, ax) = plt.subplots(1, 1, figsize=(18, 6)) # depends on [control=['if'], data=['ax']] ret_wide = quantile_returns.unstack('factor_quantile') cum_ret = ret_wide.apply(perf.cumulative_returns, period=period, freq=freq) cum_ret = cum_ret.loc[:, ::-1] # we want negative quantiles as 'red' cum_ret.plot(lw=2, ax=ax, cmap=cm.coolwarm) ax.legend() (ymin, ymax) = (cum_ret.min().min(), cum_ret.max().max()) ax.set(ylabel='Log Cumulative Returns', title='Cumulative Return by Quantile\n ({} Period Forward Return)'.format(period), xlabel='', yscale='symlog', yticks=np.linspace(ymin, ymax, 5), ylim=(ymin, ymax)) ax.yaxis.set_major_formatter(ScalarFormatter()) ax.axhline(1.0, linestyle='-', color='black', lw=1) return ax
def WriteBuildYaml(self, fd, build_timestamp=True): """Write build spec to fd.""" output = { "Client.build_environment": rdf_client.Uname.FromCurrentSystem().signature(), "Template.build_type": config.CONFIG.Get("ClientBuilder.build_type", context=self.context), "Template.version_major": config.CONFIG.Get("Source.version_major", context=self.context), "Template.version_minor": config.CONFIG.Get("Source.version_minor", context=self.context), "Template.version_revision": config.CONFIG.Get("Source.version_revision", context=self.context), "Template.version_release": config.CONFIG.Get("Source.version_release", context=self.context), "Template.arch": config.CONFIG.Get("Client.arch", context=self.context) } if build_timestamp: output["Client.build_time"] = rdfvalue.RDFDatetime.Now() else: self.REQUIRED_BUILD_YAML_KEYS.remove("Client.build_time") for key, value in iteritems(output): output[key] = str(value) output["Template.build_context"] = self.context output_keys = set(iterkeys(output)) if output_keys != self.REQUIRED_BUILD_YAML_KEYS: raise RuntimeError("Bad build.yaml: expected %s, got %s" % (self.REQUIRED_BUILD_YAML_KEYS, output_keys)) fd.write(yaml.Dump(output).encode("utf-8"))
def function[WriteBuildYaml, parameter[self, fd, build_timestamp]]: constant[Write build spec to fd.] variable[output] assign[=] dictionary[[<ast.Constant object at 0x7da1b1b46710>, <ast.Constant object at 0x7da1b1b44610>, <ast.Constant object at 0x7da1b1b473a0>, <ast.Constant object at 0x7da1b1b45c90>, <ast.Constant object at 0x7da1b1b44820>, <ast.Constant object at 0x7da1b1b46320>, <ast.Constant object at 0x7da1b1b45090>], [<ast.Call object at 0x7da1b1b45570>, <ast.Call object at 0x7da1b1b448b0>, <ast.Call object at 0x7da1b1b46cb0>, <ast.Call object at 0x7da1b1b46ad0>, <ast.Call object at 0x7da1b1b47100>, <ast.Call object at 0x7da1b1b45c30>, <ast.Call object at 0x7da1b1b46ef0>]] if name[build_timestamp] begin[:] call[name[output]][constant[Client.build_time]] assign[=] call[name[rdfvalue].RDFDatetime.Now, parameter[]] for taget[tuple[[<ast.Name object at 0x7da1b1b46950>, <ast.Name object at 0x7da1b1b45f90>]]] in starred[call[name[iteritems], parameter[name[output]]]] begin[:] call[name[output]][name[key]] assign[=] call[name[str], parameter[name[value]]] call[name[output]][constant[Template.build_context]] assign[=] name[self].context variable[output_keys] assign[=] call[name[set], parameter[call[name[iterkeys], parameter[name[output]]]]] if compare[name[output_keys] not_equal[!=] name[self].REQUIRED_BUILD_YAML_KEYS] begin[:] <ast.Raise object at 0x7da18f58fa00> call[name[fd].write, parameter[call[call[name[yaml].Dump, parameter[name[output]]].encode, parameter[constant[utf-8]]]]]
keyword[def] identifier[WriteBuildYaml] ( identifier[self] , identifier[fd] , identifier[build_timestamp] = keyword[True] ): literal[string] identifier[output] ={ literal[string] : identifier[rdf_client] . identifier[Uname] . identifier[FromCurrentSystem] (). identifier[signature] (), literal[string] : identifier[config] . identifier[CONFIG] . identifier[Get] ( literal[string] , identifier[context] = identifier[self] . identifier[context] ), literal[string] : identifier[config] . identifier[CONFIG] . identifier[Get] ( literal[string] , identifier[context] = identifier[self] . identifier[context] ), literal[string] : identifier[config] . identifier[CONFIG] . identifier[Get] ( literal[string] , identifier[context] = identifier[self] . identifier[context] ), literal[string] : identifier[config] . identifier[CONFIG] . identifier[Get] ( literal[string] , identifier[context] = identifier[self] . identifier[context] ), literal[string] : identifier[config] . identifier[CONFIG] . identifier[Get] ( literal[string] , identifier[context] = identifier[self] . identifier[context] ), literal[string] : identifier[config] . identifier[CONFIG] . identifier[Get] ( literal[string] , identifier[context] = identifier[self] . identifier[context] ) } keyword[if] identifier[build_timestamp] : identifier[output] [ literal[string] ]= identifier[rdfvalue] . identifier[RDFDatetime] . identifier[Now] () keyword[else] : identifier[self] . identifier[REQUIRED_BUILD_YAML_KEYS] . identifier[remove] ( literal[string] ) keyword[for] identifier[key] , identifier[value] keyword[in] identifier[iteritems] ( identifier[output] ): identifier[output] [ identifier[key] ]= identifier[str] ( identifier[value] ) identifier[output] [ literal[string] ]= identifier[self] . identifier[context] identifier[output_keys] = identifier[set] ( identifier[iterkeys] ( identifier[output] )) keyword[if] identifier[output_keys] != identifier[self] . identifier[REQUIRED_BUILD_YAML_KEYS] : keyword[raise] identifier[RuntimeError] ( literal[string] % ( identifier[self] . identifier[REQUIRED_BUILD_YAML_KEYS] , identifier[output_keys] )) identifier[fd] . identifier[write] ( identifier[yaml] . identifier[Dump] ( identifier[output] ). identifier[encode] ( literal[string] ))
def WriteBuildYaml(self, fd, build_timestamp=True): """Write build spec to fd.""" output = {'Client.build_environment': rdf_client.Uname.FromCurrentSystem().signature(), 'Template.build_type': config.CONFIG.Get('ClientBuilder.build_type', context=self.context), 'Template.version_major': config.CONFIG.Get('Source.version_major', context=self.context), 'Template.version_minor': config.CONFIG.Get('Source.version_minor', context=self.context), 'Template.version_revision': config.CONFIG.Get('Source.version_revision', context=self.context), 'Template.version_release': config.CONFIG.Get('Source.version_release', context=self.context), 'Template.arch': config.CONFIG.Get('Client.arch', context=self.context)} if build_timestamp: output['Client.build_time'] = rdfvalue.RDFDatetime.Now() # depends on [control=['if'], data=[]] else: self.REQUIRED_BUILD_YAML_KEYS.remove('Client.build_time') for (key, value) in iteritems(output): output[key] = str(value) # depends on [control=['for'], data=[]] output['Template.build_context'] = self.context output_keys = set(iterkeys(output)) if output_keys != self.REQUIRED_BUILD_YAML_KEYS: raise RuntimeError('Bad build.yaml: expected %s, got %s' % (self.REQUIRED_BUILD_YAML_KEYS, output_keys)) # depends on [control=['if'], data=['output_keys']] fd.write(yaml.Dump(output).encode('utf-8'))
def inputs(form_args): """ Creates list of input elements """ element = [] for name, value in form_args.items(): element.append( '<input type="hidden" name="{}" value="{}"/>'.format(name, value)) return "\n".join(element)
def function[inputs, parameter[form_args]]: constant[ Creates list of input elements ] variable[element] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da18dc99420>, <ast.Name object at 0x7da18dc9a3b0>]]] in starred[call[name[form_args].items, parameter[]]] begin[:] call[name[element].append, parameter[call[constant[<input type="hidden" name="{}" value="{}"/>].format, parameter[name[name], name[value]]]]] return[call[constant[ ].join, parameter[name[element]]]]
keyword[def] identifier[inputs] ( identifier[form_args] ): literal[string] identifier[element] =[] keyword[for] identifier[name] , identifier[value] keyword[in] identifier[form_args] . identifier[items] (): identifier[element] . identifier[append] ( literal[string] . identifier[format] ( identifier[name] , identifier[value] )) keyword[return] literal[string] . identifier[join] ( identifier[element] )
def inputs(form_args): """ Creates list of input elements """ element = [] for (name, value) in form_args.items(): element.append('<input type="hidden" name="{}" value="{}"/>'.format(name, value)) # depends on [control=['for'], data=[]] return '\n'.join(element)
def get_value(self, dictionary): """ Given the *incoming* primitive data, return the value for this field that should be validated and transformed to a native value. """ if html.is_html_input(dictionary): # HTML forms will represent empty fields as '', and cannot # represent None or False values directly. if self.field_name not in dictionary: if getattr(self.root, 'partial', False): return empty return self.default_empty_html ret = dictionary[self.field_name] if ret == '' and self.allow_null: # If the field is blank, and null is a valid value then # determine if we should use null instead. return '' if getattr(self, 'allow_blank', False) else None elif ret == '' and not self.required: # If the field is blank, and emptyness is valid then # determine if we should use emptyness instead. return '' if getattr(self, 'allow_blank', False) else empty return ret return dictionary.get(self.field_name, empty)
def function[get_value, parameter[self, dictionary]]: constant[ Given the *incoming* primitive data, return the value for this field that should be validated and transformed to a native value. ] if call[name[html].is_html_input, parameter[name[dictionary]]] begin[:] if compare[name[self].field_name <ast.NotIn object at 0x7da2590d7190> name[dictionary]] begin[:] if call[name[getattr], parameter[name[self].root, constant[partial], constant[False]]] begin[:] return[name[empty]] return[name[self].default_empty_html] variable[ret] assign[=] call[name[dictionary]][name[self].field_name] if <ast.BoolOp object at 0x7da20c6c5f30> begin[:] return[<ast.IfExp object at 0x7da20c6c5090>] return[name[ret]] return[call[name[dictionary].get, parameter[name[self].field_name, name[empty]]]]
keyword[def] identifier[get_value] ( identifier[self] , identifier[dictionary] ): literal[string] keyword[if] identifier[html] . identifier[is_html_input] ( identifier[dictionary] ): keyword[if] identifier[self] . identifier[field_name] keyword[not] keyword[in] identifier[dictionary] : keyword[if] identifier[getattr] ( identifier[self] . identifier[root] , literal[string] , keyword[False] ): keyword[return] identifier[empty] keyword[return] identifier[self] . identifier[default_empty_html] identifier[ret] = identifier[dictionary] [ identifier[self] . identifier[field_name] ] keyword[if] identifier[ret] == literal[string] keyword[and] identifier[self] . identifier[allow_null] : keyword[return] literal[string] keyword[if] identifier[getattr] ( identifier[self] , literal[string] , keyword[False] ) keyword[else] keyword[None] keyword[elif] identifier[ret] == literal[string] keyword[and] keyword[not] identifier[self] . identifier[required] : keyword[return] literal[string] keyword[if] identifier[getattr] ( identifier[self] , literal[string] , keyword[False] ) keyword[else] identifier[empty] keyword[return] identifier[ret] keyword[return] identifier[dictionary] . identifier[get] ( identifier[self] . identifier[field_name] , identifier[empty] )
def get_value(self, dictionary): """ Given the *incoming* primitive data, return the value for this field that should be validated and transformed to a native value. """ if html.is_html_input(dictionary): # HTML forms will represent empty fields as '', and cannot # represent None or False values directly. if self.field_name not in dictionary: if getattr(self.root, 'partial', False): return empty # depends on [control=['if'], data=[]] return self.default_empty_html # depends on [control=['if'], data=[]] ret = dictionary[self.field_name] if ret == '' and self.allow_null: # If the field is blank, and null is a valid value then # determine if we should use null instead. return '' if getattr(self, 'allow_blank', False) else None # depends on [control=['if'], data=[]] elif ret == '' and (not self.required): # If the field is blank, and emptyness is valid then # determine if we should use emptyness instead. return '' if getattr(self, 'allow_blank', False) else empty # depends on [control=['if'], data=[]] return ret # depends on [control=['if'], data=[]] return dictionary.get(self.field_name, empty)
def timetree_likelihood(self): ''' Return the likelihood of the data given the current branch length in the tree ''' LH = 0 for node in self.tree.find_clades(order='preorder'): # sum the likelihood contributions of all branches if node.up is None: # root node continue LH -= node.branch_length_interpolator(node.branch_length) # add the root sequence LH and return if self.aln: LH += self.gtr.sequence_logLH(self.tree.root.cseq, pattern_multiplicity=self.multiplicity) return LH
def function[timetree_likelihood, parameter[self]]: constant[ Return the likelihood of the data given the current branch length in the tree ] variable[LH] assign[=] constant[0] for taget[name[node]] in starred[call[name[self].tree.find_clades, parameter[]]] begin[:] if compare[name[node].up is constant[None]] begin[:] continue <ast.AugAssign object at 0x7da20c76d5d0> if name[self].aln begin[:] <ast.AugAssign object at 0x7da20c76f790> return[name[LH]]
keyword[def] identifier[timetree_likelihood] ( identifier[self] ): literal[string] identifier[LH] = literal[int] keyword[for] identifier[node] keyword[in] identifier[self] . identifier[tree] . identifier[find_clades] ( identifier[order] = literal[string] ): keyword[if] identifier[node] . identifier[up] keyword[is] keyword[None] : keyword[continue] identifier[LH] -= identifier[node] . identifier[branch_length_interpolator] ( identifier[node] . identifier[branch_length] ) keyword[if] identifier[self] . identifier[aln] : identifier[LH] += identifier[self] . identifier[gtr] . identifier[sequence_logLH] ( identifier[self] . identifier[tree] . identifier[root] . identifier[cseq] , identifier[pattern_multiplicity] = identifier[self] . identifier[multiplicity] ) keyword[return] identifier[LH]
def timetree_likelihood(self): """ Return the likelihood of the data given the current branch length in the tree """ LH = 0 for node in self.tree.find_clades(order='preorder'): # sum the likelihood contributions of all branches if node.up is None: # root node continue # depends on [control=['if'], data=[]] LH -= node.branch_length_interpolator(node.branch_length) # depends on [control=['for'], data=['node']] # add the root sequence LH and return if self.aln: LH += self.gtr.sequence_logLH(self.tree.root.cseq, pattern_multiplicity=self.multiplicity) # depends on [control=['if'], data=[]] return LH
def FindClassIdInMethodMetaIgnoreCase(classId): """ Methods whether classId is valid or not . Given class is case insensitive. """ if classId in _MethodFactoryMeta: return classId lClassId = classId.lower() for key in _MethodFactoryMeta.keys(): if (key.lower() == classId.lower()): return key return None
def function[FindClassIdInMethodMetaIgnoreCase, parameter[classId]]: constant[ Methods whether classId is valid or not . Given class is case insensitive. ] if compare[name[classId] in name[_MethodFactoryMeta]] begin[:] return[name[classId]] variable[lClassId] assign[=] call[name[classId].lower, parameter[]] for taget[name[key]] in starred[call[name[_MethodFactoryMeta].keys, parameter[]]] begin[:] if compare[call[name[key].lower, parameter[]] equal[==] call[name[classId].lower, parameter[]]] begin[:] return[name[key]] return[constant[None]]
keyword[def] identifier[FindClassIdInMethodMetaIgnoreCase] ( identifier[classId] ): literal[string] keyword[if] identifier[classId] keyword[in] identifier[_MethodFactoryMeta] : keyword[return] identifier[classId] identifier[lClassId] = identifier[classId] . identifier[lower] () keyword[for] identifier[key] keyword[in] identifier[_MethodFactoryMeta] . identifier[keys] (): keyword[if] ( identifier[key] . identifier[lower] ()== identifier[classId] . identifier[lower] ()): keyword[return] identifier[key] keyword[return] keyword[None]
def FindClassIdInMethodMetaIgnoreCase(classId): """ Methods whether classId is valid or not . Given class is case insensitive. """ if classId in _MethodFactoryMeta: return classId # depends on [control=['if'], data=['classId']] lClassId = classId.lower() for key in _MethodFactoryMeta.keys(): if key.lower() == classId.lower(): return key # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['key']] return None
def clean(): """Deletes dev files""" rm("$testfn*") rm("*.bak") rm("*.core") rm("*.egg-info") rm("*.orig") rm("*.pyc") rm("*.pyd") rm("*.pyo") rm("*.rej") rm("*.so") rm("*.~") rm("*__pycache__") rm(".coverage") rm(".tox") rm(".coverage") rm("build") rm("dist") rm("docs/_build") rm("htmlcov") rm("tmp") rm("venv")
def function[clean, parameter[]]: constant[Deletes dev files] call[name[rm], parameter[constant[$testfn*]]] call[name[rm], parameter[constant[*.bak]]] call[name[rm], parameter[constant[*.core]]] call[name[rm], parameter[constant[*.egg-info]]] call[name[rm], parameter[constant[*.orig]]] call[name[rm], parameter[constant[*.pyc]]] call[name[rm], parameter[constant[*.pyd]]] call[name[rm], parameter[constant[*.pyo]]] call[name[rm], parameter[constant[*.rej]]] call[name[rm], parameter[constant[*.so]]] call[name[rm], parameter[constant[*.~]]] call[name[rm], parameter[constant[*__pycache__]]] call[name[rm], parameter[constant[.coverage]]] call[name[rm], parameter[constant[.tox]]] call[name[rm], parameter[constant[.coverage]]] call[name[rm], parameter[constant[build]]] call[name[rm], parameter[constant[dist]]] call[name[rm], parameter[constant[docs/_build]]] call[name[rm], parameter[constant[htmlcov]]] call[name[rm], parameter[constant[tmp]]] call[name[rm], parameter[constant[venv]]]
keyword[def] identifier[clean] (): literal[string] identifier[rm] ( literal[string] ) identifier[rm] ( literal[string] ) identifier[rm] ( literal[string] ) identifier[rm] ( literal[string] ) identifier[rm] ( literal[string] ) identifier[rm] ( literal[string] ) identifier[rm] ( literal[string] ) identifier[rm] ( literal[string] ) identifier[rm] ( literal[string] ) identifier[rm] ( literal[string] ) identifier[rm] ( literal[string] ) identifier[rm] ( literal[string] ) identifier[rm] ( literal[string] ) identifier[rm] ( literal[string] ) identifier[rm] ( literal[string] ) identifier[rm] ( literal[string] ) identifier[rm] ( literal[string] ) identifier[rm] ( literal[string] ) identifier[rm] ( literal[string] ) identifier[rm] ( literal[string] ) identifier[rm] ( literal[string] )
def clean(): """Deletes dev files""" rm('$testfn*') rm('*.bak') rm('*.core') rm('*.egg-info') rm('*.orig') rm('*.pyc') rm('*.pyd') rm('*.pyo') rm('*.rej') rm('*.so') rm('*.~') rm('*__pycache__') rm('.coverage') rm('.tox') rm('.coverage') rm('build') rm('dist') rm('docs/_build') rm('htmlcov') rm('tmp') rm('venv')
def ensure_mapping_format(variables): """ ensure variables are in mapping format. Args: variables (list/dict): original variables Returns: dict: ensured variables in dict format Examples: >>> variables = [ {"a": 1}, {"b": 2} ] >>> print(ensure_mapping_format(variables)) { "a": 1, "b": 2 } """ if isinstance(variables, list): variables_dict = {} for map_dict in variables: variables_dict.update(map_dict) return variables_dict elif isinstance(variables, dict): return variables else: raise exceptions.ParamsError("variables format error!")
def function[ensure_mapping_format, parameter[variables]]: constant[ ensure variables are in mapping format. Args: variables (list/dict): original variables Returns: dict: ensured variables in dict format Examples: >>> variables = [ {"a": 1}, {"b": 2} ] >>> print(ensure_mapping_format(variables)) { "a": 1, "b": 2 } ] if call[name[isinstance], parameter[name[variables], name[list]]] begin[:] variable[variables_dict] assign[=] dictionary[[], []] for taget[name[map_dict]] in starred[name[variables]] begin[:] call[name[variables_dict].update, parameter[name[map_dict]]] return[name[variables_dict]]
keyword[def] identifier[ensure_mapping_format] ( identifier[variables] ): literal[string] keyword[if] identifier[isinstance] ( identifier[variables] , identifier[list] ): identifier[variables_dict] ={} keyword[for] identifier[map_dict] keyword[in] identifier[variables] : identifier[variables_dict] . identifier[update] ( identifier[map_dict] ) keyword[return] identifier[variables_dict] keyword[elif] identifier[isinstance] ( identifier[variables] , identifier[dict] ): keyword[return] identifier[variables] keyword[else] : keyword[raise] identifier[exceptions] . identifier[ParamsError] ( literal[string] )
def ensure_mapping_format(variables): """ ensure variables are in mapping format. Args: variables (list/dict): original variables Returns: dict: ensured variables in dict format Examples: >>> variables = [ {"a": 1}, {"b": 2} ] >>> print(ensure_mapping_format(variables)) { "a": 1, "b": 2 } """ if isinstance(variables, list): variables_dict = {} for map_dict in variables: variables_dict.update(map_dict) # depends on [control=['for'], data=['map_dict']] return variables_dict # depends on [control=['if'], data=[]] elif isinstance(variables, dict): return variables # depends on [control=['if'], data=[]] else: raise exceptions.ParamsError('variables format error!')
def asrgb(self, *args, **kwargs): """Read image data from file and return RGB image as numpy array.""" if self._keyframe is None: raise RuntimeError('keyframe not set') kwargs['validate'] = False return TiffPage.asrgb(self, *args, **kwargs)
def function[asrgb, parameter[self]]: constant[Read image data from file and return RGB image as numpy array.] if compare[name[self]._keyframe is constant[None]] begin[:] <ast.Raise object at 0x7da1b1836fb0> call[name[kwargs]][constant[validate]] assign[=] constant[False] return[call[name[TiffPage].asrgb, parameter[name[self], <ast.Starred object at 0x7da1b1837a60>]]]
keyword[def] identifier[asrgb] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ): literal[string] keyword[if] identifier[self] . identifier[_keyframe] keyword[is] keyword[None] : keyword[raise] identifier[RuntimeError] ( literal[string] ) identifier[kwargs] [ literal[string] ]= keyword[False] keyword[return] identifier[TiffPage] . identifier[asrgb] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] )
def asrgb(self, *args, **kwargs): """Read image data from file and return RGB image as numpy array.""" if self._keyframe is None: raise RuntimeError('keyframe not set') # depends on [control=['if'], data=[]] kwargs['validate'] = False return TiffPage.asrgb(self, *args, **kwargs)
def get_member_types(obj, member_name, prop_getter = False): """Still experimental, incomplete and hardly tested. Works like get_types, but is also applicable to descriptors. """ cls = obj.__class__ member = getattr(cls, member_name) slf = not (isinstance(member, staticmethod) or isinstance(member, classmethod)) clsm = isinstance(member, classmethod) return _get_types(member, clsm, slf, cls, prop_getter)
def function[get_member_types, parameter[obj, member_name, prop_getter]]: constant[Still experimental, incomplete and hardly tested. Works like get_types, but is also applicable to descriptors. ] variable[cls] assign[=] name[obj].__class__ variable[member] assign[=] call[name[getattr], parameter[name[cls], name[member_name]]] variable[slf] assign[=] <ast.UnaryOp object at 0x7da1b0ddbb80> variable[clsm] assign[=] call[name[isinstance], parameter[name[member], name[classmethod]]] return[call[name[_get_types], parameter[name[member], name[clsm], name[slf], name[cls], name[prop_getter]]]]
keyword[def] identifier[get_member_types] ( identifier[obj] , identifier[member_name] , identifier[prop_getter] = keyword[False] ): literal[string] identifier[cls] = identifier[obj] . identifier[__class__] identifier[member] = identifier[getattr] ( identifier[cls] , identifier[member_name] ) identifier[slf] = keyword[not] ( identifier[isinstance] ( identifier[member] , identifier[staticmethod] ) keyword[or] identifier[isinstance] ( identifier[member] , identifier[classmethod] )) identifier[clsm] = identifier[isinstance] ( identifier[member] , identifier[classmethod] ) keyword[return] identifier[_get_types] ( identifier[member] , identifier[clsm] , identifier[slf] , identifier[cls] , identifier[prop_getter] )
def get_member_types(obj, member_name, prop_getter=False): """Still experimental, incomplete and hardly tested. Works like get_types, but is also applicable to descriptors. """ cls = obj.__class__ member = getattr(cls, member_name) slf = not (isinstance(member, staticmethod) or isinstance(member, classmethod)) clsm = isinstance(member, classmethod) return _get_types(member, clsm, slf, cls, prop_getter)
def get_match_info(template, match, state): """ Given a template and a regex match within said template, return a dictionary of information about the match to be used to help parse the template. """ info = match.groupdict() # Put special delimiter cases in terms of normal ones if info['change']: info.update({ 'tag_type' : '=', 'tag_key' : info['delims'], }) elif info['raw']: info.update({ 'tag_type' : '&', 'tag_key' : info['raw_key'], }) # Rename the important match variables for convenience tag_start = match.start() tag_end = match.end() tag_type = info['tag_type'] tag_key = info['tag_key'] lead_wsp = info['lead_wsp'] end_wsp = info['end_wsp'] begins_line = (tag_start == 0) or (template[tag_start-1] in state.eol_chars) ends_line = (tag_end == len(template) or template[tag_end] in state.eol_chars) interpolating = (tag_type in ('', '&')) standalone = (not interpolating) and begins_line and ends_line if end_wsp: tag_end -= len(end_wsp) if standalone: template_length = len(template) # Standalone tags strip exactly one occurence of '\r', '\n', or '\r\n' # from the end of the line. if tag_end < len(template) and template[tag_end] == '\r': tag_end += 1 if tag_end < len(template) and template[tag_end] == '\n': tag_end += 1 elif lead_wsp: tag_start += len(lead_wsp) lead_wsp = '' info.update({ 'tag_start' : tag_start, 'tag_end' : tag_end, 'tag_type' : tag_type, 'tag_key' : tag_key, 'lead_wsp' : lead_wsp, 'end_wsp' : end_wsp, 'begins_line' : begins_line, 'ends_line' : ends_line, 'interpolating' : interpolating, 'standalone' : standalone, }) return info
def function[get_match_info, parameter[template, match, state]]: constant[ Given a template and a regex match within said template, return a dictionary of information about the match to be used to help parse the template. ] variable[info] assign[=] call[name[match].groupdict, parameter[]] if call[name[info]][constant[change]] begin[:] call[name[info].update, parameter[dictionary[[<ast.Constant object at 0x7da18f58c520>, <ast.Constant object at 0x7da18f58cfd0>], [<ast.Constant object at 0x7da18f58e4a0>, <ast.Subscript object at 0x7da18f58f1f0>]]]] variable[tag_start] assign[=] call[name[match].start, parameter[]] variable[tag_end] assign[=] call[name[match].end, parameter[]] variable[tag_type] assign[=] call[name[info]][constant[tag_type]] variable[tag_key] assign[=] call[name[info]][constant[tag_key]] variable[lead_wsp] assign[=] call[name[info]][constant[lead_wsp]] variable[end_wsp] assign[=] call[name[info]][constant[end_wsp]] variable[begins_line] assign[=] <ast.BoolOp object at 0x7da18f58cd90> variable[ends_line] assign[=] <ast.BoolOp object at 0x7da18dc055d0> variable[interpolating] assign[=] compare[name[tag_type] in tuple[[<ast.Constant object at 0x7da18dc05780>, <ast.Constant object at 0x7da18dc05900>]]] variable[standalone] assign[=] <ast.BoolOp object at 0x7da18dc05c90> if name[end_wsp] begin[:] <ast.AugAssign object at 0x7da18dc04340> if name[standalone] begin[:] variable[template_length] assign[=] call[name[len], parameter[name[template]]] if <ast.BoolOp object at 0x7da18dc06a40> begin[:] <ast.AugAssign object at 0x7da18dc07be0> if <ast.BoolOp object at 0x7da18dc04a90> begin[:] <ast.AugAssign object at 0x7da20e74b370> call[name[info].update, parameter[dictionary[[<ast.Constant object at 0x7da20e74ada0>, <ast.Constant object at 0x7da20e7486d0>, <ast.Constant object at 0x7da20e74b7f0>, <ast.Constant object at 0x7da20e748dc0>, <ast.Constant object at 0x7da20e74bd90>, <ast.Constant object at 0x7da20e74be20>, <ast.Constant object at 0x7da20e74b220>, <ast.Constant object at 0x7da20e74b040>, <ast.Constant object at 0x7da20e7485e0>, <ast.Constant object at 0x7da20e748580>], [<ast.Name object at 0x7da20e74ae60>, <ast.Name object at 0x7da20e749b40>, <ast.Name object at 0x7da20e74b1c0>, <ast.Name object at 0x7da20e7497b0>, <ast.Name object at 0x7da20e748a90>, <ast.Name object at 0x7da20e749720>, <ast.Name object at 0x7da20e74a8f0>, <ast.Name object at 0x7da20e74bb80>, <ast.Name object at 0x7da20e7489a0>, <ast.Name object at 0x7da20e7483a0>]]]] return[name[info]]
keyword[def] identifier[get_match_info] ( identifier[template] , identifier[match] , identifier[state] ): literal[string] identifier[info] = identifier[match] . identifier[groupdict] () keyword[if] identifier[info] [ literal[string] ]: identifier[info] . identifier[update] ({ literal[string] : literal[string] , literal[string] : identifier[info] [ literal[string] ], }) keyword[elif] identifier[info] [ literal[string] ]: identifier[info] . identifier[update] ({ literal[string] : literal[string] , literal[string] : identifier[info] [ literal[string] ], }) identifier[tag_start] = identifier[match] . identifier[start] () identifier[tag_end] = identifier[match] . identifier[end] () identifier[tag_type] = identifier[info] [ literal[string] ] identifier[tag_key] = identifier[info] [ literal[string] ] identifier[lead_wsp] = identifier[info] [ literal[string] ] identifier[end_wsp] = identifier[info] [ literal[string] ] identifier[begins_line] =( identifier[tag_start] == literal[int] ) keyword[or] ( identifier[template] [ identifier[tag_start] - literal[int] ] keyword[in] identifier[state] . identifier[eol_chars] ) identifier[ends_line] =( identifier[tag_end] == identifier[len] ( identifier[template] ) keyword[or] identifier[template] [ identifier[tag_end] ] keyword[in] identifier[state] . identifier[eol_chars] ) identifier[interpolating] =( identifier[tag_type] keyword[in] ( literal[string] , literal[string] )) identifier[standalone] =( keyword[not] identifier[interpolating] ) keyword[and] identifier[begins_line] keyword[and] identifier[ends_line] keyword[if] identifier[end_wsp] : identifier[tag_end] -= identifier[len] ( identifier[end_wsp] ) keyword[if] identifier[standalone] : identifier[template_length] = identifier[len] ( identifier[template] ) keyword[if] identifier[tag_end] < identifier[len] ( identifier[template] ) keyword[and] identifier[template] [ identifier[tag_end] ]== literal[string] : identifier[tag_end] += literal[int] keyword[if] identifier[tag_end] < identifier[len] ( identifier[template] ) keyword[and] identifier[template] [ identifier[tag_end] ]== literal[string] : identifier[tag_end] += literal[int] keyword[elif] identifier[lead_wsp] : identifier[tag_start] += identifier[len] ( identifier[lead_wsp] ) identifier[lead_wsp] = literal[string] identifier[info] . identifier[update] ({ literal[string] : identifier[tag_start] , literal[string] : identifier[tag_end] , literal[string] : identifier[tag_type] , literal[string] : identifier[tag_key] , literal[string] : identifier[lead_wsp] , literal[string] : identifier[end_wsp] , literal[string] : identifier[begins_line] , literal[string] : identifier[ends_line] , literal[string] : identifier[interpolating] , literal[string] : identifier[standalone] , }) keyword[return] identifier[info]
def get_match_info(template, match, state): """ Given a template and a regex match within said template, return a dictionary of information about the match to be used to help parse the template. """ info = match.groupdict() # Put special delimiter cases in terms of normal ones if info['change']: info.update({'tag_type': '=', 'tag_key': info['delims']}) # depends on [control=['if'], data=[]] elif info['raw']: info.update({'tag_type': '&', 'tag_key': info['raw_key']}) # depends on [control=['if'], data=[]] # Rename the important match variables for convenience tag_start = match.start() tag_end = match.end() tag_type = info['tag_type'] tag_key = info['tag_key'] lead_wsp = info['lead_wsp'] end_wsp = info['end_wsp'] begins_line = tag_start == 0 or template[tag_start - 1] in state.eol_chars ends_line = tag_end == len(template) or template[tag_end] in state.eol_chars interpolating = tag_type in ('', '&') standalone = not interpolating and begins_line and ends_line if end_wsp: tag_end -= len(end_wsp) # depends on [control=['if'], data=[]] if standalone: template_length = len(template) # Standalone tags strip exactly one occurence of '\r', '\n', or '\r\n' # from the end of the line. if tag_end < len(template) and template[tag_end] == '\r': tag_end += 1 # depends on [control=['if'], data=[]] if tag_end < len(template) and template[tag_end] == '\n': tag_end += 1 # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif lead_wsp: tag_start += len(lead_wsp) lead_wsp = '' # depends on [control=['if'], data=[]] info.update({'tag_start': tag_start, 'tag_end': tag_end, 'tag_type': tag_type, 'tag_key': tag_key, 'lead_wsp': lead_wsp, 'end_wsp': end_wsp, 'begins_line': begins_line, 'ends_line': ends_line, 'interpolating': interpolating, 'standalone': standalone}) return info
def get_bounding_box(self): """ Returns the bounding box of the polygons. Returns ------- out : Numpy array[2,2] or ``None`` Bounding box of this polygon in the form [[x_min, y_min], [x_max, y_max]], or ``None`` if the polygon is empty. """ if len(self.polygons) == 0: return None return numpy.array(((min(pts[:, 0].min() for pts in self.polygons), min(pts[:, 1].min() for pts in self.polygons)), (max(pts[:, 0].max() for pts in self.polygons), max(pts[:, 1].max() for pts in self.polygons))))
def function[get_bounding_box, parameter[self]]: constant[ Returns the bounding box of the polygons. Returns ------- out : Numpy array[2,2] or ``None`` Bounding box of this polygon in the form [[x_min, y_min], [x_max, y_max]], or ``None`` if the polygon is empty. ] if compare[call[name[len], parameter[name[self].polygons]] equal[==] constant[0]] begin[:] return[constant[None]] return[call[name[numpy].array, parameter[tuple[[<ast.Tuple object at 0x7da20c6e77f0>, <ast.Tuple object at 0x7da20c6e61d0>]]]]]
keyword[def] identifier[get_bounding_box] ( identifier[self] ): literal[string] keyword[if] identifier[len] ( identifier[self] . identifier[polygons] )== literal[int] : keyword[return] keyword[None] keyword[return] identifier[numpy] . identifier[array] ((( identifier[min] ( identifier[pts] [:, literal[int] ]. identifier[min] () keyword[for] identifier[pts] keyword[in] identifier[self] . identifier[polygons] ), identifier[min] ( identifier[pts] [:, literal[int] ]. identifier[min] () keyword[for] identifier[pts] keyword[in] identifier[self] . identifier[polygons] )), ( identifier[max] ( identifier[pts] [:, literal[int] ]. identifier[max] () keyword[for] identifier[pts] keyword[in] identifier[self] . identifier[polygons] ), identifier[max] ( identifier[pts] [:, literal[int] ]. identifier[max] () keyword[for] identifier[pts] keyword[in] identifier[self] . identifier[polygons] ))))
def get_bounding_box(self): """ Returns the bounding box of the polygons. Returns ------- out : Numpy array[2,2] or ``None`` Bounding box of this polygon in the form [[x_min, y_min], [x_max, y_max]], or ``None`` if the polygon is empty. """ if len(self.polygons) == 0: return None # depends on [control=['if'], data=[]] return numpy.array(((min((pts[:, 0].min() for pts in self.polygons)), min((pts[:, 1].min() for pts in self.polygons))), (max((pts[:, 0].max() for pts in self.polygons)), max((pts[:, 1].max() for pts in self.polygons)))))
def measure_pure_state(prep_program, reference_state, quantum_resource, variance_bound=1.0E-6): """ Measure the coefficients of the pure state :param prep_program: pyQuil program to prepare the state :param reference_state: Integer of the computational basis state to use as a reference :param quantum_resource: An instance of a quantum abstract machine :param variance_bound: Default 1.0E-6. variance of the monte carlo estimator for the non-hermitian operator :return: an estimate of the wavefunction as a numpy.ndarray """ num_qubits = len(prep_program.get_qubits()) amplitudes_to_measure = list(range(2 ** num_qubits)) amplitudes = measure_wf_coefficients(prep_program, amplitudes_to_measure, reference_state, quantum_resource, variance_bound=variance_bound) wavefunction = np.asarray(amplitudes) return wavefunction.reshape((-1, 1))
def function[measure_pure_state, parameter[prep_program, reference_state, quantum_resource, variance_bound]]: constant[ Measure the coefficients of the pure state :param prep_program: pyQuil program to prepare the state :param reference_state: Integer of the computational basis state to use as a reference :param quantum_resource: An instance of a quantum abstract machine :param variance_bound: Default 1.0E-6. variance of the monte carlo estimator for the non-hermitian operator :return: an estimate of the wavefunction as a numpy.ndarray ] variable[num_qubits] assign[=] call[name[len], parameter[call[name[prep_program].get_qubits, parameter[]]]] variable[amplitudes_to_measure] assign[=] call[name[list], parameter[call[name[range], parameter[binary_operation[constant[2] ** name[num_qubits]]]]]] variable[amplitudes] assign[=] call[name[measure_wf_coefficients], parameter[name[prep_program], name[amplitudes_to_measure], name[reference_state], name[quantum_resource]]] variable[wavefunction] assign[=] call[name[np].asarray, parameter[name[amplitudes]]] return[call[name[wavefunction].reshape, parameter[tuple[[<ast.UnaryOp object at 0x7da2047ea140>, <ast.Constant object at 0x7da2047e8af0>]]]]]
keyword[def] identifier[measure_pure_state] ( identifier[prep_program] , identifier[reference_state] , identifier[quantum_resource] , identifier[variance_bound] = literal[int] ): literal[string] identifier[num_qubits] = identifier[len] ( identifier[prep_program] . identifier[get_qubits] ()) identifier[amplitudes_to_measure] = identifier[list] ( identifier[range] ( literal[int] ** identifier[num_qubits] )) identifier[amplitudes] = identifier[measure_wf_coefficients] ( identifier[prep_program] , identifier[amplitudes_to_measure] , identifier[reference_state] , identifier[quantum_resource] , identifier[variance_bound] = identifier[variance_bound] ) identifier[wavefunction] = identifier[np] . identifier[asarray] ( identifier[amplitudes] ) keyword[return] identifier[wavefunction] . identifier[reshape] ((- literal[int] , literal[int] ))
def measure_pure_state(prep_program, reference_state, quantum_resource, variance_bound=1e-06): """ Measure the coefficients of the pure state :param prep_program: pyQuil program to prepare the state :param reference_state: Integer of the computational basis state to use as a reference :param quantum_resource: An instance of a quantum abstract machine :param variance_bound: Default 1.0E-6. variance of the monte carlo estimator for the non-hermitian operator :return: an estimate of the wavefunction as a numpy.ndarray """ num_qubits = len(prep_program.get_qubits()) amplitudes_to_measure = list(range(2 ** num_qubits)) amplitudes = measure_wf_coefficients(prep_program, amplitudes_to_measure, reference_state, quantum_resource, variance_bound=variance_bound) wavefunction = np.asarray(amplitudes) return wavefunction.reshape((-1, 1))
def asDictionary(self): """ returns the object as a python dictionary """ # value = self._dict if value is None: template = { "hasM" : self._hasM, "hasZ" : self._hasZ, "points" : [], "spatialReference" : self.spatialReference } for pt in self._points: template['points'].append(pt.asList) self._dict = template return self._dict
def function[asDictionary, parameter[self]]: constant[ returns the object as a python dictionary ] variable[value] assign[=] name[self]._dict if compare[name[value] is constant[None]] begin[:] variable[template] assign[=] dictionary[[<ast.Constant object at 0x7da207f02500>, <ast.Constant object at 0x7da207f00af0>, <ast.Constant object at 0x7da207f03be0>, <ast.Constant object at 0x7da207f00d90>], [<ast.Attribute object at 0x7da207f038b0>, <ast.Attribute object at 0x7da207f01ea0>, <ast.List object at 0x7da207f01f30>, <ast.Attribute object at 0x7da207f002e0>]] for taget[name[pt]] in starred[name[self]._points] begin[:] call[call[name[template]][constant[points]].append, parameter[name[pt].asList]] name[self]._dict assign[=] name[template] return[name[self]._dict]
keyword[def] identifier[asDictionary] ( identifier[self] ): literal[string] identifier[value] = identifier[self] . identifier[_dict] keyword[if] identifier[value] keyword[is] keyword[None] : identifier[template] ={ literal[string] : identifier[self] . identifier[_hasM] , literal[string] : identifier[self] . identifier[_hasZ] , literal[string] :[], literal[string] : identifier[self] . identifier[spatialReference] } keyword[for] identifier[pt] keyword[in] identifier[self] . identifier[_points] : identifier[template] [ literal[string] ]. identifier[append] ( identifier[pt] . identifier[asList] ) identifier[self] . identifier[_dict] = identifier[template] keyword[return] identifier[self] . identifier[_dict]
def asDictionary(self): """ returns the object as a python dictionary """ # value = self._dict if value is None: template = {'hasM': self._hasM, 'hasZ': self._hasZ, 'points': [], 'spatialReference': self.spatialReference} for pt in self._points: template['points'].append(pt.asList) # depends on [control=['for'], data=['pt']] self._dict = template # depends on [control=['if'], data=[]] return self._dict
def open(self): ''' Open notification or quick settings. Usage: d.open.notification() d.open.quick_settings() ''' @param_to_property(action=["notification", "quick_settings"]) def _open(action): if action == "notification": return self.server.jsonrpc.openNotification() else: return self.server.jsonrpc.openQuickSettings() return _open
def function[open, parameter[self]]: constant[ Open notification or quick settings. Usage: d.open.notification() d.open.quick_settings() ] def function[_open, parameter[action]]: if compare[name[action] equal[==] constant[notification]] begin[:] return[call[name[self].server.jsonrpc.openNotification, parameter[]]] return[name[_open]]
keyword[def] identifier[open] ( identifier[self] ): literal[string] @ identifier[param_to_property] ( identifier[action] =[ literal[string] , literal[string] ]) keyword[def] identifier[_open] ( identifier[action] ): keyword[if] identifier[action] == literal[string] : keyword[return] identifier[self] . identifier[server] . identifier[jsonrpc] . identifier[openNotification] () keyword[else] : keyword[return] identifier[self] . identifier[server] . identifier[jsonrpc] . identifier[openQuickSettings] () keyword[return] identifier[_open]
def open(self): """ Open notification or quick settings. Usage: d.open.notification() d.open.quick_settings() """ @param_to_property(action=['notification', 'quick_settings']) def _open(action): if action == 'notification': return self.server.jsonrpc.openNotification() # depends on [control=['if'], data=[]] else: return self.server.jsonrpc.openQuickSettings() return _open
def require_python(minimum): """Require at least a minimum Python version. The version number is expressed in terms of `sys.hexversion`. E.g. to require a minimum of Python 2.6, use:: >>> require_python(0x206000f0) :param minimum: Minimum Python version supported. :type minimum: integer """ if sys.hexversion < minimum: hversion = hex(minimum)[2:] if len(hversion) % 2 != 0: hversion = '0' + hversion split = list(hversion) parts = [] while split: parts.append(int(''.join((split.pop(0), split.pop(0))), 16)) major, minor, micro, release = parts if release == 0xf0: print('Python {0}.{1}.{2} or better is required'.format( major, minor, micro)) else: print('Python {0}.{1}.{2} ({3}) or better is required'.format( major, minor, micro, hex(release)[2:])) sys.exit(1)
def function[require_python, parameter[minimum]]: constant[Require at least a minimum Python version. The version number is expressed in terms of `sys.hexversion`. E.g. to require a minimum of Python 2.6, use:: >>> require_python(0x206000f0) :param minimum: Minimum Python version supported. :type minimum: integer ] if compare[name[sys].hexversion less[<] name[minimum]] begin[:] variable[hversion] assign[=] call[call[name[hex], parameter[name[minimum]]]][<ast.Slice object at 0x7da20c6aa140>] if compare[binary_operation[call[name[len], parameter[name[hversion]]] <ast.Mod object at 0x7da2590d6920> constant[2]] not_equal[!=] constant[0]] begin[:] variable[hversion] assign[=] binary_operation[constant[0] + name[hversion]] variable[split] assign[=] call[name[list], parameter[name[hversion]]] variable[parts] assign[=] list[[]] while name[split] begin[:] call[name[parts].append, parameter[call[name[int], parameter[call[constant[].join, parameter[tuple[[<ast.Call object at 0x7da20c6aad40>, <ast.Call object at 0x7da20c6a9a80>]]]], constant[16]]]]] <ast.Tuple object at 0x7da1b14d7610> assign[=] name[parts] if compare[name[release] equal[==] constant[240]] begin[:] call[name[print], parameter[call[constant[Python {0}.{1}.{2} or better is required].format, parameter[name[major], name[minor], name[micro]]]]] call[name[sys].exit, parameter[constant[1]]]
keyword[def] identifier[require_python] ( identifier[minimum] ): literal[string] keyword[if] identifier[sys] . identifier[hexversion] < identifier[minimum] : identifier[hversion] = identifier[hex] ( identifier[minimum] )[ literal[int] :] keyword[if] identifier[len] ( identifier[hversion] )% literal[int] != literal[int] : identifier[hversion] = literal[string] + identifier[hversion] identifier[split] = identifier[list] ( identifier[hversion] ) identifier[parts] =[] keyword[while] identifier[split] : identifier[parts] . identifier[append] ( identifier[int] ( literal[string] . identifier[join] (( identifier[split] . identifier[pop] ( literal[int] ), identifier[split] . identifier[pop] ( literal[int] ))), literal[int] )) identifier[major] , identifier[minor] , identifier[micro] , identifier[release] = identifier[parts] keyword[if] identifier[release] == literal[int] : identifier[print] ( literal[string] . identifier[format] ( identifier[major] , identifier[minor] , identifier[micro] )) keyword[else] : identifier[print] ( literal[string] . identifier[format] ( identifier[major] , identifier[minor] , identifier[micro] , identifier[hex] ( identifier[release] )[ literal[int] :])) identifier[sys] . identifier[exit] ( literal[int] )
def require_python(minimum): """Require at least a minimum Python version. The version number is expressed in terms of `sys.hexversion`. E.g. to require a minimum of Python 2.6, use:: >>> require_python(0x206000f0) :param minimum: Minimum Python version supported. :type minimum: integer """ if sys.hexversion < minimum: hversion = hex(minimum)[2:] if len(hversion) % 2 != 0: hversion = '0' + hversion # depends on [control=['if'], data=[]] split = list(hversion) parts = [] while split: parts.append(int(''.join((split.pop(0), split.pop(0))), 16)) # depends on [control=['while'], data=[]] (major, minor, micro, release) = parts if release == 240: print('Python {0}.{1}.{2} or better is required'.format(major, minor, micro)) # depends on [control=['if'], data=[]] else: print('Python {0}.{1}.{2} ({3}) or better is required'.format(major, minor, micro, hex(release)[2:])) sys.exit(1) # depends on [control=['if'], data=['minimum']]
def load(self, query=None, search_in_source_files=False): """Load all vcard files in this address book from disk. If a search string is given only files which contents match that will be loaded. :param query: a regular expression to limit the results :type query: str :param search_in_source_files: apply search regexp directly on the .vcf files to speed up parsing (less accurate) :type search_in_source_files: bool :returns: the number of successfully loaded cards and the number of errors :rtype: int, int :throws: AddressBookParseError """ if self._loaded: return logging.debug('Loading Vdir %s with query %s', self.name, query) errors = 0 for filename in self._find_vcard_files( search=query, search_in_source_files=search_in_source_files): try: card = CarddavObject.from_file(self, filename, self._private_objects, self._localize_dates) except (IOError, vobject.base.ParseError) as err: verb = "open" if isinstance(err, IOError) else "parse" logging.debug("Error: Could not %s file %s\n%s", verb, filename, err) if self._skip: errors += 1 else: # FIXME: This should throw an apropriate exception and the # sys.exit should be called somewhere closer to the command # line parsing. logging.error( "The vcard file %s of address book %s could not be " "parsed\nUse --debug for more information or " "--skip-unparsable to proceed", filename, self.name) sys.exit(2) else: uid = card.get_uid() if not uid: logging.warning("Card %s from address book %s has no UID " "and will not be availbale.", card, self.name) elif uid in self.contacts: logging.warning( "Card %s and %s from address book %s have the same " "UID. The former will not be availbale.", card, self.contacts[uid], self.name) else: self.contacts[uid] = card self._loaded = True if errors: logging.warning( "%d of %d vCard files of address book %s could not be parsed.", errors, len(self.contacts) + errors, self) logging.debug('Loded %s contacts from address book %s.', len(self.contacts), self.name)
def function[load, parameter[self, query, search_in_source_files]]: constant[Load all vcard files in this address book from disk. If a search string is given only files which contents match that will be loaded. :param query: a regular expression to limit the results :type query: str :param search_in_source_files: apply search regexp directly on the .vcf files to speed up parsing (less accurate) :type search_in_source_files: bool :returns: the number of successfully loaded cards and the number of errors :rtype: int, int :throws: AddressBookParseError ] if name[self]._loaded begin[:] return[None] call[name[logging].debug, parameter[constant[Loading Vdir %s with query %s], name[self].name, name[query]]] variable[errors] assign[=] constant[0] for taget[name[filename]] in starred[call[name[self]._find_vcard_files, parameter[]]] begin[:] <ast.Try object at 0x7da1b26ae590> name[self]._loaded assign[=] constant[True] if name[errors] begin[:] call[name[logging].warning, parameter[constant[%d of %d vCard files of address book %s could not be parsed.], name[errors], binary_operation[call[name[len], parameter[name[self].contacts]] + name[errors]], name[self]]] call[name[logging].debug, parameter[constant[Loded %s contacts from address book %s.], call[name[len], parameter[name[self].contacts]], name[self].name]]
keyword[def] identifier[load] ( identifier[self] , identifier[query] = keyword[None] , identifier[search_in_source_files] = keyword[False] ): literal[string] keyword[if] identifier[self] . identifier[_loaded] : keyword[return] identifier[logging] . identifier[debug] ( literal[string] , identifier[self] . identifier[name] , identifier[query] ) identifier[errors] = literal[int] keyword[for] identifier[filename] keyword[in] identifier[self] . identifier[_find_vcard_files] ( identifier[search] = identifier[query] , identifier[search_in_source_files] = identifier[search_in_source_files] ): keyword[try] : identifier[card] = identifier[CarddavObject] . identifier[from_file] ( identifier[self] , identifier[filename] , identifier[self] . identifier[_private_objects] , identifier[self] . identifier[_localize_dates] ) keyword[except] ( identifier[IOError] , identifier[vobject] . identifier[base] . identifier[ParseError] ) keyword[as] identifier[err] : identifier[verb] = literal[string] keyword[if] identifier[isinstance] ( identifier[err] , identifier[IOError] ) keyword[else] literal[string] identifier[logging] . identifier[debug] ( literal[string] , identifier[verb] , identifier[filename] , identifier[err] ) keyword[if] identifier[self] . identifier[_skip] : identifier[errors] += literal[int] keyword[else] : identifier[logging] . identifier[error] ( literal[string] literal[string] literal[string] , identifier[filename] , identifier[self] . identifier[name] ) identifier[sys] . identifier[exit] ( literal[int] ) keyword[else] : identifier[uid] = identifier[card] . identifier[get_uid] () keyword[if] keyword[not] identifier[uid] : identifier[logging] . identifier[warning] ( literal[string] literal[string] , identifier[card] , identifier[self] . identifier[name] ) keyword[elif] identifier[uid] keyword[in] identifier[self] . identifier[contacts] : identifier[logging] . identifier[warning] ( literal[string] literal[string] , identifier[card] , identifier[self] . identifier[contacts] [ identifier[uid] ], identifier[self] . identifier[name] ) keyword[else] : identifier[self] . identifier[contacts] [ identifier[uid] ]= identifier[card] identifier[self] . identifier[_loaded] = keyword[True] keyword[if] identifier[errors] : identifier[logging] . identifier[warning] ( literal[string] , identifier[errors] , identifier[len] ( identifier[self] . identifier[contacts] )+ identifier[errors] , identifier[self] ) identifier[logging] . identifier[debug] ( literal[string] , identifier[len] ( identifier[self] . identifier[contacts] ), identifier[self] . identifier[name] )
def load(self, query=None, search_in_source_files=False): """Load all vcard files in this address book from disk. If a search string is given only files which contents match that will be loaded. :param query: a regular expression to limit the results :type query: str :param search_in_source_files: apply search regexp directly on the .vcf files to speed up parsing (less accurate) :type search_in_source_files: bool :returns: the number of successfully loaded cards and the number of errors :rtype: int, int :throws: AddressBookParseError """ if self._loaded: return # depends on [control=['if'], data=[]] logging.debug('Loading Vdir %s with query %s', self.name, query) errors = 0 for filename in self._find_vcard_files(search=query, search_in_source_files=search_in_source_files): try: card = CarddavObject.from_file(self, filename, self._private_objects, self._localize_dates) # depends on [control=['try'], data=[]] except (IOError, vobject.base.ParseError) as err: verb = 'open' if isinstance(err, IOError) else 'parse' logging.debug('Error: Could not %s file %s\n%s', verb, filename, err) if self._skip: errors += 1 # depends on [control=['if'], data=[]] else: # FIXME: This should throw an apropriate exception and the # sys.exit should be called somewhere closer to the command # line parsing. logging.error('The vcard file %s of address book %s could not be parsed\nUse --debug for more information or --skip-unparsable to proceed', filename, self.name) sys.exit(2) # depends on [control=['except'], data=['err']] else: uid = card.get_uid() if not uid: logging.warning('Card %s from address book %s has no UID and will not be availbale.', card, self.name) # depends on [control=['if'], data=[]] elif uid in self.contacts: logging.warning('Card %s and %s from address book %s have the same UID. The former will not be availbale.', card, self.contacts[uid], self.name) # depends on [control=['if'], data=['uid']] else: self.contacts[uid] = card # depends on [control=['for'], data=['filename']] self._loaded = True if errors: logging.warning('%d of %d vCard files of address book %s could not be parsed.', errors, len(self.contacts) + errors, self) # depends on [control=['if'], data=[]] logging.debug('Loded %s contacts from address book %s.', len(self.contacts), self.name)
def autoLayout( self, padX = None, padY = None, direction = Qt.Horizontal, layout = 'Layered', animate = 0, centerOn = None, center = None, debug=False ): """ Automatically lays out all the nodes in the scene using the \ autoLayoutNodes method. :param padX | <int> || None | default is 2 * cell width padY | <int> || None | default is 2 * cell height direction | <Qt.Direction> layout | <str> | name of the layout plugin to use animate | <int> | number of seconds to animate over :return {<XNode>: <QRectF>, ..} | new rects per affected node """ return self.autoLayoutNodes(self.nodes(), padX, padY, direction, layout, animate, centerOn, center, debug)
def function[autoLayout, parameter[self, padX, padY, direction, layout, animate, centerOn, center, debug]]: constant[ Automatically lays out all the nodes in the scene using the autoLayoutNodes method. :param padX | <int> || None | default is 2 * cell width padY | <int> || None | default is 2 * cell height direction | <Qt.Direction> layout | <str> | name of the layout plugin to use animate | <int> | number of seconds to animate over :return {<XNode>: <QRectF>, ..} | new rects per affected node ] return[call[name[self].autoLayoutNodes, parameter[call[name[self].nodes, parameter[]], name[padX], name[padY], name[direction], name[layout], name[animate], name[centerOn], name[center], name[debug]]]]
keyword[def] identifier[autoLayout] ( identifier[self] , identifier[padX] = keyword[None] , identifier[padY] = keyword[None] , identifier[direction] = identifier[Qt] . identifier[Horizontal] , identifier[layout] = literal[string] , identifier[animate] = literal[int] , identifier[centerOn] = keyword[None] , identifier[center] = keyword[None] , identifier[debug] = keyword[False] ): literal[string] keyword[return] identifier[self] . identifier[autoLayoutNodes] ( identifier[self] . identifier[nodes] (), identifier[padX] , identifier[padY] , identifier[direction] , identifier[layout] , identifier[animate] , identifier[centerOn] , identifier[center] , identifier[debug] )
def autoLayout(self, padX=None, padY=None, direction=Qt.Horizontal, layout='Layered', animate=0, centerOn=None, center=None, debug=False): """ Automatically lays out all the nodes in the scene using the autoLayoutNodes method. :param padX | <int> || None | default is 2 * cell width padY | <int> || None | default is 2 * cell height direction | <Qt.Direction> layout | <str> | name of the layout plugin to use animate | <int> | number of seconds to animate over :return {<XNode>: <QRectF>, ..} | new rects per affected node """ return self.autoLayoutNodes(self.nodes(), padX, padY, direction, layout, animate, centerOn, center, debug)
def toc(quiet=False): r""" Homemade version of matlab tic and toc function, tic starts or resets the clock, toc reports the time since the last call of tic. Parameters ---------- quiet : Boolean If False (default) then a message is output to the console. If True the message is not displayed and the elapsed time is returned. See Also -------- tic """ if '_startTime_for_tictoc' in globals(): t = _time.time() - _startTime_for_tictoc if quiet is False: print('Elapsed time in seconds: ', t) else: return t else: raise Exception('Start time not set, call tic first')
def function[toc, parameter[quiet]]: constant[ Homemade version of matlab tic and toc function, tic starts or resets the clock, toc reports the time since the last call of tic. Parameters ---------- quiet : Boolean If False (default) then a message is output to the console. If True the message is not displayed and the elapsed time is returned. See Also -------- tic ] if compare[constant[_startTime_for_tictoc] in call[name[globals], parameter[]]] begin[:] variable[t] assign[=] binary_operation[call[name[_time].time, parameter[]] - name[_startTime_for_tictoc]] if compare[name[quiet] is constant[False]] begin[:] call[name[print], parameter[constant[Elapsed time in seconds: ], name[t]]]
keyword[def] identifier[toc] ( identifier[quiet] = keyword[False] ): literal[string] keyword[if] literal[string] keyword[in] identifier[globals] (): identifier[t] = identifier[_time] . identifier[time] ()- identifier[_startTime_for_tictoc] keyword[if] identifier[quiet] keyword[is] keyword[False] : identifier[print] ( literal[string] , identifier[t] ) keyword[else] : keyword[return] identifier[t] keyword[else] : keyword[raise] identifier[Exception] ( literal[string] )
def toc(quiet=False): """ Homemade version of matlab tic and toc function, tic starts or resets the clock, toc reports the time since the last call of tic. Parameters ---------- quiet : Boolean If False (default) then a message is output to the console. If True the message is not displayed and the elapsed time is returned. See Also -------- tic """ if '_startTime_for_tictoc' in globals(): t = _time.time() - _startTime_for_tictoc if quiet is False: print('Elapsed time in seconds: ', t) # depends on [control=['if'], data=[]] else: return t # depends on [control=['if'], data=[]] else: raise Exception('Start time not set, call tic first')
def get_content(self, obj): """All content for a state's page on an election day.""" election_day = ElectionDay.objects.get( date=self.context['election_date']) division = obj # In case of house special election, # use parent division. if obj.level.name == DivisionLevel.DISTRICT: division = obj.parent special = True if self.context.get('special') else False return PageContent.objects.division_content( election_day, division, special )
def function[get_content, parameter[self, obj]]: constant[All content for a state's page on an election day.] variable[election_day] assign[=] call[name[ElectionDay].objects.get, parameter[]] variable[division] assign[=] name[obj] if compare[name[obj].level.name equal[==] name[DivisionLevel].DISTRICT] begin[:] variable[division] assign[=] name[obj].parent variable[special] assign[=] <ast.IfExp object at 0x7da1b1f212a0> return[call[name[PageContent].objects.division_content, parameter[name[election_day], name[division], name[special]]]]
keyword[def] identifier[get_content] ( identifier[self] , identifier[obj] ): literal[string] identifier[election_day] = identifier[ElectionDay] . identifier[objects] . identifier[get] ( identifier[date] = identifier[self] . identifier[context] [ literal[string] ]) identifier[division] = identifier[obj] keyword[if] identifier[obj] . identifier[level] . identifier[name] == identifier[DivisionLevel] . identifier[DISTRICT] : identifier[division] = identifier[obj] . identifier[parent] identifier[special] = keyword[True] keyword[if] identifier[self] . identifier[context] . identifier[get] ( literal[string] ) keyword[else] keyword[False] keyword[return] identifier[PageContent] . identifier[objects] . identifier[division_content] ( identifier[election_day] , identifier[division] , identifier[special] )
def get_content(self, obj): """All content for a state's page on an election day.""" election_day = ElectionDay.objects.get(date=self.context['election_date']) division = obj # In case of house special election, # use parent division. if obj.level.name == DivisionLevel.DISTRICT: division = obj.parent # depends on [control=['if'], data=[]] special = True if self.context.get('special') else False return PageContent.objects.division_content(election_day, division, special)
def analyze(problem, X, Y, num_resamples=1000, conf_level=0.95, print_to_console=False, num_levels=4, seed=None): """Perform Morris Analysis on model outputs. Returns a dictionary with keys 'mu', 'mu_star', 'sigma', and 'mu_star_conf', where each entry is a list of parameters containing the indices in the same order as the parameter file. Arguments --------- problem : dict The problem definition X : numpy.matrix The NumPy matrix containing the model inputs of dtype=float Y : numpy.array The NumPy array containing the model outputs of dtype=float num_resamples : int The number of resamples used to compute the confidence intervals (default 1000) conf_level : float The confidence interval level (default 0.95) print_to_console : bool Print results directly to console (default False) num_levels : int The number of grid levels, must be identical to the value passed to SALib.sample.morris (default 4) Returns ------- Si : dict A dictionary of sensitivity indices containing the following entries. - `mu` - the mean elementary effect - `mu_star` - the absolute of the mean elementary effect - `sigma` - the standard deviation of the elementary effect - `mu_star_conf` - the bootstrapped confidence interval - `names` - the names of the parameters References ---------- .. [1] Morris, M. (1991). "Factorial Sampling Plans for Preliminary Computational Experiments." Technometrics, 33(2):161-174, doi:10.1080/00401706.1991.10484804. .. [2] Campolongo, F., J. Cariboni, and A. Saltelli (2007). "An effective screening design for sensitivity analysis of large models." Environmental Modelling & Software, 22(10):1509-1518, doi:10.1016/j.envsoft.2006.10.004. Examples -------- >>> X = morris.sample(problem, 1000, num_levels=4) >>> Y = Ishigami.evaluate(X) >>> Si = morris.analyze(problem, X, Y, conf_level=0.95, >>> print_to_console=True, num_levels=4) """ if seed: np.random.seed(seed) msg = ("dtype of {} array must be 'float', float32 or float64") if X.dtype not in ['float', 'float32', 'float64']: raise ValueError(msg.format('X')) if Y.dtype not in ['float', 'float32', 'float64']: raise ValueError(msg.format('Y')) # Assume that there are no groups groups = None delta = compute_delta(num_levels) num_vars = problem['num_vars'] if (problem.get('groups') is None) & (Y.size % (num_vars + 1) == 0): num_trajectories = int(Y.size / (num_vars + 1)) elif problem.get('groups') is not None: groups, unique_group_names = compute_groups_matrix( problem['groups']) number_of_groups = len(unique_group_names) num_trajectories = int(Y.size / (number_of_groups + 1)) else: raise ValueError("Number of samples in model output file must be" "a multiple of (D+1), where D is the number of" "parameters (or groups) in your parameter file.") ee = np.zeros((num_vars, num_trajectories)) ee = compute_elementary_effects( X, Y, int(Y.size / num_trajectories), delta) # Output the Mu, Mu*, and Sigma Values. Also return them in case this is # being called from Python Si = ResultDict((k, [None] * num_vars) for k in ['names', 'mu', 'mu_star', 'sigma', 'mu_star_conf']) Si['mu'] = np.average(ee, 1) Si['mu_star'] = np.average(np.abs(ee), 1) Si['sigma'] = np.std(ee, axis=1, ddof=1) Si['names'] = problem['names'] for j in range(num_vars): Si['mu_star_conf'][j] = compute_mu_star_confidence( ee[j, :], num_trajectories, num_resamples, conf_level) if groups is None: if print_to_console: print("{0:<30} {1:>10} {2:>10} {3:>15} {4:>10}".format( "Parameter", "Mu_Star", "Mu", "Mu_Star_Conf", "Sigma") ) for j in list(range(num_vars)): print("{0:30} {1:10.3f} {2:10.3f} {3:15.3f} {4:10.3f}".format( Si['names'][j], Si['mu_star'][j], Si['mu'][j], Si['mu_star_conf'][j], Si['sigma'][j]) ) return Si elif groups is not None: # if there are groups, then the elementary effects returned need to be # computed over the groups of variables, # rather than the individual variables Si_grouped = dict((k, [None] * num_vars) for k in ['mu_star', 'mu_star_conf']) Si_grouped['mu_star'] = compute_grouped_metric(Si['mu_star'], groups) Si_grouped['mu_star_conf'] = compute_grouped_metric(Si['mu_star_conf'], groups) Si_grouped['names'] = unique_group_names Si_grouped['sigma'] = compute_grouped_sigma(Si['sigma'], groups) Si_grouped['mu'] = compute_grouped_sigma(Si['mu'], groups) if print_to_console: print("{0:<30} {1:>10} {2:>10} {3:>15} {4:>10}".format( "Parameter", "Mu_Star", "Mu", "Mu_Star_Conf", "Sigma") ) for j in list(range(number_of_groups)): print("{0:30} {1:10.3f} {2:10.3f} {3:15.3f} {4:10.3f}".format( Si_grouped['names'][j], Si_grouped['mu_star'][j], Si_grouped['mu'][j], Si_grouped['mu_star_conf'][j], Si_grouped['sigma'][j]) ) return Si_grouped else: raise RuntimeError( "Could not determine which parameters should be returned")
def function[analyze, parameter[problem, X, Y, num_resamples, conf_level, print_to_console, num_levels, seed]]: constant[Perform Morris Analysis on model outputs. Returns a dictionary with keys 'mu', 'mu_star', 'sigma', and 'mu_star_conf', where each entry is a list of parameters containing the indices in the same order as the parameter file. Arguments --------- problem : dict The problem definition X : numpy.matrix The NumPy matrix containing the model inputs of dtype=float Y : numpy.array The NumPy array containing the model outputs of dtype=float num_resamples : int The number of resamples used to compute the confidence intervals (default 1000) conf_level : float The confidence interval level (default 0.95) print_to_console : bool Print results directly to console (default False) num_levels : int The number of grid levels, must be identical to the value passed to SALib.sample.morris (default 4) Returns ------- Si : dict A dictionary of sensitivity indices containing the following entries. - `mu` - the mean elementary effect - `mu_star` - the absolute of the mean elementary effect - `sigma` - the standard deviation of the elementary effect - `mu_star_conf` - the bootstrapped confidence interval - `names` - the names of the parameters References ---------- .. [1] Morris, M. (1991). "Factorial Sampling Plans for Preliminary Computational Experiments." Technometrics, 33(2):161-174, doi:10.1080/00401706.1991.10484804. .. [2] Campolongo, F., J. Cariboni, and A. Saltelli (2007). "An effective screening design for sensitivity analysis of large models." Environmental Modelling & Software, 22(10):1509-1518, doi:10.1016/j.envsoft.2006.10.004. Examples -------- >>> X = morris.sample(problem, 1000, num_levels=4) >>> Y = Ishigami.evaluate(X) >>> Si = morris.analyze(problem, X, Y, conf_level=0.95, >>> print_to_console=True, num_levels=4) ] if name[seed] begin[:] call[name[np].random.seed, parameter[name[seed]]] variable[msg] assign[=] constant[dtype of {} array must be 'float', float32 or float64] if compare[name[X].dtype <ast.NotIn object at 0x7da2590d7190> list[[<ast.Constant object at 0x7da1b18c9840>, <ast.Constant object at 0x7da1b18ca710>, <ast.Constant object at 0x7da1b18c86a0>]]] begin[:] <ast.Raise object at 0x7da1b18c98a0> if compare[name[Y].dtype <ast.NotIn object at 0x7da2590d7190> list[[<ast.Constant object at 0x7da1b18cbb20>, <ast.Constant object at 0x7da1b18cbac0>, <ast.Constant object at 0x7da1b18cac80>]]] begin[:] <ast.Raise object at 0x7da1b18cac50> variable[groups] assign[=] constant[None] variable[delta] assign[=] call[name[compute_delta], parameter[name[num_levels]]] variable[num_vars] assign[=] call[name[problem]][constant[num_vars]] if binary_operation[compare[call[name[problem].get, parameter[constant[groups]]] is constant[None]] <ast.BitAnd object at 0x7da2590d6b60> compare[binary_operation[name[Y].size <ast.Mod object at 0x7da2590d6920> binary_operation[name[num_vars] + constant[1]]] equal[==] constant[0]]] begin[:] variable[num_trajectories] assign[=] call[name[int], parameter[binary_operation[name[Y].size / binary_operation[name[num_vars] + constant[1]]]]] variable[ee] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Name object at 0x7da1b18c84f0>, <ast.Name object at 0x7da1b18ca740>]]]] variable[ee] assign[=] call[name[compute_elementary_effects], parameter[name[X], name[Y], call[name[int], parameter[binary_operation[name[Y].size / name[num_trajectories]]]], name[delta]]] variable[Si] assign[=] call[name[ResultDict], parameter[<ast.GeneratorExp object at 0x7da1b18ca590>]] call[name[Si]][constant[mu]] assign[=] call[name[np].average, parameter[name[ee], constant[1]]] call[name[Si]][constant[mu_star]] assign[=] call[name[np].average, parameter[call[name[np].abs, parameter[name[ee]]], constant[1]]] call[name[Si]][constant[sigma]] assign[=] call[name[np].std, parameter[name[ee]]] call[name[Si]][constant[names]] assign[=] call[name[problem]][constant[names]] for taget[name[j]] in starred[call[name[range], parameter[name[num_vars]]]] begin[:] call[call[name[Si]][constant[mu_star_conf]]][name[j]] assign[=] call[name[compute_mu_star_confidence], parameter[call[name[ee]][tuple[[<ast.Name object at 0x7da1b18c93c0>, <ast.Slice object at 0x7da1b18c9450>]]], name[num_trajectories], name[num_resamples], name[conf_level]]] if compare[name[groups] is constant[None]] begin[:] if name[print_to_console] begin[:] call[name[print], parameter[call[constant[{0:<30} {1:>10} {2:>10} {3:>15} {4:>10}].format, parameter[constant[Parameter], constant[Mu_Star], constant[Mu], constant[Mu_Star_Conf], constant[Sigma]]]]] for taget[name[j]] in starred[call[name[list], parameter[call[name[range], parameter[name[num_vars]]]]]] begin[:] call[name[print], parameter[call[constant[{0:30} {1:10.3f} {2:10.3f} {3:15.3f} {4:10.3f}].format, parameter[call[call[name[Si]][constant[names]]][name[j]], call[call[name[Si]][constant[mu_star]]][name[j]], call[call[name[Si]][constant[mu]]][name[j]], call[call[name[Si]][constant[mu_star_conf]]][name[j]], call[call[name[Si]][constant[sigma]]][name[j]]]]]] return[name[Si]]
keyword[def] identifier[analyze] ( identifier[problem] , identifier[X] , identifier[Y] , identifier[num_resamples] = literal[int] , identifier[conf_level] = literal[int] , identifier[print_to_console] = keyword[False] , identifier[num_levels] = literal[int] , identifier[seed] = keyword[None] ): literal[string] keyword[if] identifier[seed] : identifier[np] . identifier[random] . identifier[seed] ( identifier[seed] ) identifier[msg] =( literal[string] ) keyword[if] identifier[X] . identifier[dtype] keyword[not] keyword[in] [ literal[string] , literal[string] , literal[string] ]: keyword[raise] identifier[ValueError] ( identifier[msg] . identifier[format] ( literal[string] )) keyword[if] identifier[Y] . identifier[dtype] keyword[not] keyword[in] [ literal[string] , literal[string] , literal[string] ]: keyword[raise] identifier[ValueError] ( identifier[msg] . identifier[format] ( literal[string] )) identifier[groups] = keyword[None] identifier[delta] = identifier[compute_delta] ( identifier[num_levels] ) identifier[num_vars] = identifier[problem] [ literal[string] ] keyword[if] ( identifier[problem] . identifier[get] ( literal[string] ) keyword[is] keyword[None] )&( identifier[Y] . identifier[size] %( identifier[num_vars] + literal[int] )== literal[int] ): identifier[num_trajectories] = identifier[int] ( identifier[Y] . identifier[size] /( identifier[num_vars] + literal[int] )) keyword[elif] identifier[problem] . identifier[get] ( literal[string] ) keyword[is] keyword[not] keyword[None] : identifier[groups] , identifier[unique_group_names] = identifier[compute_groups_matrix] ( identifier[problem] [ literal[string] ]) identifier[number_of_groups] = identifier[len] ( identifier[unique_group_names] ) identifier[num_trajectories] = identifier[int] ( identifier[Y] . identifier[size] /( identifier[number_of_groups] + literal[int] )) keyword[else] : keyword[raise] identifier[ValueError] ( literal[string] literal[string] literal[string] ) identifier[ee] = identifier[np] . identifier[zeros] (( identifier[num_vars] , identifier[num_trajectories] )) identifier[ee] = identifier[compute_elementary_effects] ( identifier[X] , identifier[Y] , identifier[int] ( identifier[Y] . identifier[size] / identifier[num_trajectories] ), identifier[delta] ) identifier[Si] = identifier[ResultDict] (( identifier[k] ,[ keyword[None] ]* identifier[num_vars] ) keyword[for] identifier[k] keyword[in] [ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]) identifier[Si] [ literal[string] ]= identifier[np] . identifier[average] ( identifier[ee] , literal[int] ) identifier[Si] [ literal[string] ]= identifier[np] . identifier[average] ( identifier[np] . identifier[abs] ( identifier[ee] ), literal[int] ) identifier[Si] [ literal[string] ]= identifier[np] . identifier[std] ( identifier[ee] , identifier[axis] = literal[int] , identifier[ddof] = literal[int] ) identifier[Si] [ literal[string] ]= identifier[problem] [ literal[string] ] keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[num_vars] ): identifier[Si] [ literal[string] ][ identifier[j] ]= identifier[compute_mu_star_confidence] ( identifier[ee] [ identifier[j] ,:], identifier[num_trajectories] , identifier[num_resamples] , identifier[conf_level] ) keyword[if] identifier[groups] keyword[is] keyword[None] : keyword[if] identifier[print_to_console] : identifier[print] ( literal[string] . identifier[format] ( literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ) ) keyword[for] identifier[j] keyword[in] identifier[list] ( identifier[range] ( identifier[num_vars] )): identifier[print] ( literal[string] . identifier[format] ( identifier[Si] [ literal[string] ][ identifier[j] ], identifier[Si] [ literal[string] ][ identifier[j] ], identifier[Si] [ literal[string] ][ identifier[j] ], identifier[Si] [ literal[string] ][ identifier[j] ], identifier[Si] [ literal[string] ][ identifier[j] ]) ) keyword[return] identifier[Si] keyword[elif] identifier[groups] keyword[is] keyword[not] keyword[None] : identifier[Si_grouped] = identifier[dict] (( identifier[k] ,[ keyword[None] ]* identifier[num_vars] ) keyword[for] identifier[k] keyword[in] [ literal[string] , literal[string] ]) identifier[Si_grouped] [ literal[string] ]= identifier[compute_grouped_metric] ( identifier[Si] [ literal[string] ], identifier[groups] ) identifier[Si_grouped] [ literal[string] ]= identifier[compute_grouped_metric] ( identifier[Si] [ literal[string] ], identifier[groups] ) identifier[Si_grouped] [ literal[string] ]= identifier[unique_group_names] identifier[Si_grouped] [ literal[string] ]= identifier[compute_grouped_sigma] ( identifier[Si] [ literal[string] ], identifier[groups] ) identifier[Si_grouped] [ literal[string] ]= identifier[compute_grouped_sigma] ( identifier[Si] [ literal[string] ], identifier[groups] ) keyword[if] identifier[print_to_console] : identifier[print] ( literal[string] . identifier[format] ( literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ) ) keyword[for] identifier[j] keyword[in] identifier[list] ( identifier[range] ( identifier[number_of_groups] )): identifier[print] ( literal[string] . identifier[format] ( identifier[Si_grouped] [ literal[string] ][ identifier[j] ], identifier[Si_grouped] [ literal[string] ][ identifier[j] ], identifier[Si_grouped] [ literal[string] ][ identifier[j] ], identifier[Si_grouped] [ literal[string] ][ identifier[j] ], identifier[Si_grouped] [ literal[string] ][ identifier[j] ]) ) keyword[return] identifier[Si_grouped] keyword[else] : keyword[raise] identifier[RuntimeError] ( literal[string] )
def analyze(problem, X, Y, num_resamples=1000, conf_level=0.95, print_to_console=False, num_levels=4, seed=None): """Perform Morris Analysis on model outputs. Returns a dictionary with keys 'mu', 'mu_star', 'sigma', and 'mu_star_conf', where each entry is a list of parameters containing the indices in the same order as the parameter file. Arguments --------- problem : dict The problem definition X : numpy.matrix The NumPy matrix containing the model inputs of dtype=float Y : numpy.array The NumPy array containing the model outputs of dtype=float num_resamples : int The number of resamples used to compute the confidence intervals (default 1000) conf_level : float The confidence interval level (default 0.95) print_to_console : bool Print results directly to console (default False) num_levels : int The number of grid levels, must be identical to the value passed to SALib.sample.morris (default 4) Returns ------- Si : dict A dictionary of sensitivity indices containing the following entries. - `mu` - the mean elementary effect - `mu_star` - the absolute of the mean elementary effect - `sigma` - the standard deviation of the elementary effect - `mu_star_conf` - the bootstrapped confidence interval - `names` - the names of the parameters References ---------- .. [1] Morris, M. (1991). "Factorial Sampling Plans for Preliminary Computational Experiments." Technometrics, 33(2):161-174, doi:10.1080/00401706.1991.10484804. .. [2] Campolongo, F., J. Cariboni, and A. Saltelli (2007). "An effective screening design for sensitivity analysis of large models." Environmental Modelling & Software, 22(10):1509-1518, doi:10.1016/j.envsoft.2006.10.004. Examples -------- >>> X = morris.sample(problem, 1000, num_levels=4) >>> Y = Ishigami.evaluate(X) >>> Si = morris.analyze(problem, X, Y, conf_level=0.95, >>> print_to_console=True, num_levels=4) """ if seed: np.random.seed(seed) # depends on [control=['if'], data=[]] msg = "dtype of {} array must be 'float', float32 or float64" if X.dtype not in ['float', 'float32', 'float64']: raise ValueError(msg.format('X')) # depends on [control=['if'], data=[]] if Y.dtype not in ['float', 'float32', 'float64']: raise ValueError(msg.format('Y')) # depends on [control=['if'], data=[]] # Assume that there are no groups groups = None delta = compute_delta(num_levels) num_vars = problem['num_vars'] if (problem.get('groups') is None) & (Y.size % (num_vars + 1) == 0): num_trajectories = int(Y.size / (num_vars + 1)) # depends on [control=['if'], data=[]] elif problem.get('groups') is not None: (groups, unique_group_names) = compute_groups_matrix(problem['groups']) number_of_groups = len(unique_group_names) num_trajectories = int(Y.size / (number_of_groups + 1)) # depends on [control=['if'], data=[]] else: raise ValueError('Number of samples in model output file must bea multiple of (D+1), where D is the number ofparameters (or groups) in your parameter file.') ee = np.zeros((num_vars, num_trajectories)) ee = compute_elementary_effects(X, Y, int(Y.size / num_trajectories), delta) # Output the Mu, Mu*, and Sigma Values. Also return them in case this is # being called from Python Si = ResultDict(((k, [None] * num_vars) for k in ['names', 'mu', 'mu_star', 'sigma', 'mu_star_conf'])) Si['mu'] = np.average(ee, 1) Si['mu_star'] = np.average(np.abs(ee), 1) Si['sigma'] = np.std(ee, axis=1, ddof=1) Si['names'] = problem['names'] for j in range(num_vars): Si['mu_star_conf'][j] = compute_mu_star_confidence(ee[j, :], num_trajectories, num_resamples, conf_level) # depends on [control=['for'], data=['j']] if groups is None: if print_to_console: print('{0:<30} {1:>10} {2:>10} {3:>15} {4:>10}'.format('Parameter', 'Mu_Star', 'Mu', 'Mu_Star_Conf', 'Sigma')) for j in list(range(num_vars)): print('{0:30} {1:10.3f} {2:10.3f} {3:15.3f} {4:10.3f}'.format(Si['names'][j], Si['mu_star'][j], Si['mu'][j], Si['mu_star_conf'][j], Si['sigma'][j])) # depends on [control=['for'], data=['j']] # depends on [control=['if'], data=[]] return Si # depends on [control=['if'], data=[]] elif groups is not None: # if there are groups, then the elementary effects returned need to be # computed over the groups of variables, # rather than the individual variables Si_grouped = dict(((k, [None] * num_vars) for k in ['mu_star', 'mu_star_conf'])) Si_grouped['mu_star'] = compute_grouped_metric(Si['mu_star'], groups) Si_grouped['mu_star_conf'] = compute_grouped_metric(Si['mu_star_conf'], groups) Si_grouped['names'] = unique_group_names Si_grouped['sigma'] = compute_grouped_sigma(Si['sigma'], groups) Si_grouped['mu'] = compute_grouped_sigma(Si['mu'], groups) if print_to_console: print('{0:<30} {1:>10} {2:>10} {3:>15} {4:>10}'.format('Parameter', 'Mu_Star', 'Mu', 'Mu_Star_Conf', 'Sigma')) for j in list(range(number_of_groups)): print('{0:30} {1:10.3f} {2:10.3f} {3:15.3f} {4:10.3f}'.format(Si_grouped['names'][j], Si_grouped['mu_star'][j], Si_grouped['mu'][j], Si_grouped['mu_star_conf'][j], Si_grouped['sigma'][j])) # depends on [control=['for'], data=['j']] # depends on [control=['if'], data=[]] return Si_grouped # depends on [control=['if'], data=['groups']] else: raise RuntimeError('Could not determine which parameters should be returned')
def create_from_response_pdu(resp_pdu, req_pdu): """ Create instance from response PDU. Response PDU is required together with the number of registers read. :param resp_pdu: Byte array with request PDU. :param quantity: Number of coils read. :return: Instance of :class:`ReadCoils`. """ read_holding_registers = ReadHoldingRegisters() read_holding_registers.quantity = struct.unpack('>H', req_pdu[-2:])[0] read_holding_registers.byte_count = \ struct.unpack('>B', resp_pdu[1:2])[0] fmt = '>' + (conf.TYPE_CHAR * read_holding_registers.quantity) read_holding_registers.data = list(struct.unpack(fmt, resp_pdu[2:])) return read_holding_registers
def function[create_from_response_pdu, parameter[resp_pdu, req_pdu]]: constant[ Create instance from response PDU. Response PDU is required together with the number of registers read. :param resp_pdu: Byte array with request PDU. :param quantity: Number of coils read. :return: Instance of :class:`ReadCoils`. ] variable[read_holding_registers] assign[=] call[name[ReadHoldingRegisters], parameter[]] name[read_holding_registers].quantity assign[=] call[call[name[struct].unpack, parameter[constant[>H], call[name[req_pdu]][<ast.Slice object at 0x7da204347340>]]]][constant[0]] name[read_holding_registers].byte_count assign[=] call[call[name[struct].unpack, parameter[constant[>B], call[name[resp_pdu]][<ast.Slice object at 0x7da204344b20>]]]][constant[0]] variable[fmt] assign[=] binary_operation[constant[>] + binary_operation[name[conf].TYPE_CHAR * name[read_holding_registers].quantity]] name[read_holding_registers].data assign[=] call[name[list], parameter[call[name[struct].unpack, parameter[name[fmt], call[name[resp_pdu]][<ast.Slice object at 0x7da204344c70>]]]]] return[name[read_holding_registers]]
keyword[def] identifier[create_from_response_pdu] ( identifier[resp_pdu] , identifier[req_pdu] ): literal[string] identifier[read_holding_registers] = identifier[ReadHoldingRegisters] () identifier[read_holding_registers] . identifier[quantity] = identifier[struct] . identifier[unpack] ( literal[string] , identifier[req_pdu] [- literal[int] :])[ literal[int] ] identifier[read_holding_registers] . identifier[byte_count] = identifier[struct] . identifier[unpack] ( literal[string] , identifier[resp_pdu] [ literal[int] : literal[int] ])[ literal[int] ] identifier[fmt] = literal[string] +( identifier[conf] . identifier[TYPE_CHAR] * identifier[read_holding_registers] . identifier[quantity] ) identifier[read_holding_registers] . identifier[data] = identifier[list] ( identifier[struct] . identifier[unpack] ( identifier[fmt] , identifier[resp_pdu] [ literal[int] :])) keyword[return] identifier[read_holding_registers]
def create_from_response_pdu(resp_pdu, req_pdu): """ Create instance from response PDU. Response PDU is required together with the number of registers read. :param resp_pdu: Byte array with request PDU. :param quantity: Number of coils read. :return: Instance of :class:`ReadCoils`. """ read_holding_registers = ReadHoldingRegisters() read_holding_registers.quantity = struct.unpack('>H', req_pdu[-2:])[0] read_holding_registers.byte_count = struct.unpack('>B', resp_pdu[1:2])[0] fmt = '>' + conf.TYPE_CHAR * read_holding_registers.quantity read_holding_registers.data = list(struct.unpack(fmt, resp_pdu[2:])) return read_holding_registers
def create_all(engine, checkfirst=True): """Create the tables for Bio2BEL.""" Base.metadata.create_all(bind=engine, checkfirst=checkfirst)
def function[create_all, parameter[engine, checkfirst]]: constant[Create the tables for Bio2BEL.] call[name[Base].metadata.create_all, parameter[]]
keyword[def] identifier[create_all] ( identifier[engine] , identifier[checkfirst] = keyword[True] ): literal[string] identifier[Base] . identifier[metadata] . identifier[create_all] ( identifier[bind] = identifier[engine] , identifier[checkfirst] = identifier[checkfirst] )
def create_all(engine, checkfirst=True): """Create the tables for Bio2BEL.""" Base.metadata.create_all(bind=engine, checkfirst=checkfirst)
def get_grade_system_gradebook_assignment_session(self, proxy): """Gets the session for assigning grade system to gradebook mappings. arg: proxy (osid.proxy.Proxy): a proxy return: (osid.grading.GradeSystemGradebookSession) - a ``GradeSystemGradebookAssignmentSession`` raise: NullArgument - ``proxy`` is ``null`` raise: OperationFailed - unable to complete request raise: Unimplemented - ``supports_grade_system_gradebook_assignment()`` is ``false`` *compliance: optional -- This method must be implemented if ``supports_grade_system_gradebook_assignment()`` is ``true``.* """ if not self.supports_grade_system_gradebook_assignment(): raise errors.Unimplemented() # pylint: disable=no-member return sessions.GradeSystemGradebookAssignmentSession(proxy=proxy, runtime=self._runtime)
def function[get_grade_system_gradebook_assignment_session, parameter[self, proxy]]: constant[Gets the session for assigning grade system to gradebook mappings. arg: proxy (osid.proxy.Proxy): a proxy return: (osid.grading.GradeSystemGradebookSession) - a ``GradeSystemGradebookAssignmentSession`` raise: NullArgument - ``proxy`` is ``null`` raise: OperationFailed - unable to complete request raise: Unimplemented - ``supports_grade_system_gradebook_assignment()`` is ``false`` *compliance: optional -- This method must be implemented if ``supports_grade_system_gradebook_assignment()`` is ``true``.* ] if <ast.UnaryOp object at 0x7da18f812560> begin[:] <ast.Raise object at 0x7da18f810ee0> return[call[name[sessions].GradeSystemGradebookAssignmentSession, parameter[]]]
keyword[def] identifier[get_grade_system_gradebook_assignment_session] ( identifier[self] , identifier[proxy] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[supports_grade_system_gradebook_assignment] (): keyword[raise] identifier[errors] . identifier[Unimplemented] () keyword[return] identifier[sessions] . identifier[GradeSystemGradebookAssignmentSession] ( identifier[proxy] = identifier[proxy] , identifier[runtime] = identifier[self] . identifier[_runtime] )
def get_grade_system_gradebook_assignment_session(self, proxy): """Gets the session for assigning grade system to gradebook mappings. arg: proxy (osid.proxy.Proxy): a proxy return: (osid.grading.GradeSystemGradebookSession) - a ``GradeSystemGradebookAssignmentSession`` raise: NullArgument - ``proxy`` is ``null`` raise: OperationFailed - unable to complete request raise: Unimplemented - ``supports_grade_system_gradebook_assignment()`` is ``false`` *compliance: optional -- This method must be implemented if ``supports_grade_system_gradebook_assignment()`` is ``true``.* """ if not self.supports_grade_system_gradebook_assignment(): raise errors.Unimplemented() # depends on [control=['if'], data=[]] # pylint: disable=no-member return sessions.GradeSystemGradebookAssignmentSession(proxy=proxy, runtime=self._runtime)
def trim(self, start_time, end_time, strict=False): ''' Trim every annotation contained in the annotation array using `Annotation.trim` and return as a new `AnnotationArray`. See `Annotation.trim` for details about trimming. This function does not modify the annotations in the original annotation array. Parameters ---------- start_time : float The desired start time for the trimmed annotations in seconds. end_time The desired end time for trimmed annotations in seconds. Must be greater than ``start_time``. strict : bool When ``False`` (default) observations that lie at the boundaries of the trimming range (see `Annotation.trim` for details) will have their time and/or duration adjusted such that only the part of the observation that lies within the trim range is kept. When ``True`` such observations are discarded and not included in the trimmed annotation. Returns ------- trimmed_array : AnnotationArray An annotation array where every annotation has been trimmed. ''' trimmed_array = AnnotationArray() for ann in self: trimmed_array.append(ann.trim(start_time, end_time, strict=strict)) return trimmed_array
def function[trim, parameter[self, start_time, end_time, strict]]: constant[ Trim every annotation contained in the annotation array using `Annotation.trim` and return as a new `AnnotationArray`. See `Annotation.trim` for details about trimming. This function does not modify the annotations in the original annotation array. Parameters ---------- start_time : float The desired start time for the trimmed annotations in seconds. end_time The desired end time for trimmed annotations in seconds. Must be greater than ``start_time``. strict : bool When ``False`` (default) observations that lie at the boundaries of the trimming range (see `Annotation.trim` for details) will have their time and/or duration adjusted such that only the part of the observation that lies within the trim range is kept. When ``True`` such observations are discarded and not included in the trimmed annotation. Returns ------- trimmed_array : AnnotationArray An annotation array where every annotation has been trimmed. ] variable[trimmed_array] assign[=] call[name[AnnotationArray], parameter[]] for taget[name[ann]] in starred[name[self]] begin[:] call[name[trimmed_array].append, parameter[call[name[ann].trim, parameter[name[start_time], name[end_time]]]]] return[name[trimmed_array]]
keyword[def] identifier[trim] ( identifier[self] , identifier[start_time] , identifier[end_time] , identifier[strict] = keyword[False] ): literal[string] identifier[trimmed_array] = identifier[AnnotationArray] () keyword[for] identifier[ann] keyword[in] identifier[self] : identifier[trimmed_array] . identifier[append] ( identifier[ann] . identifier[trim] ( identifier[start_time] , identifier[end_time] , identifier[strict] = identifier[strict] )) keyword[return] identifier[trimmed_array]
def trim(self, start_time, end_time, strict=False): """ Trim every annotation contained in the annotation array using `Annotation.trim` and return as a new `AnnotationArray`. See `Annotation.trim` for details about trimming. This function does not modify the annotations in the original annotation array. Parameters ---------- start_time : float The desired start time for the trimmed annotations in seconds. end_time The desired end time for trimmed annotations in seconds. Must be greater than ``start_time``. strict : bool When ``False`` (default) observations that lie at the boundaries of the trimming range (see `Annotation.trim` for details) will have their time and/or duration adjusted such that only the part of the observation that lies within the trim range is kept. When ``True`` such observations are discarded and not included in the trimmed annotation. Returns ------- trimmed_array : AnnotationArray An annotation array where every annotation has been trimmed. """ trimmed_array = AnnotationArray() for ann in self: trimmed_array.append(ann.trim(start_time, end_time, strict=strict)) # depends on [control=['for'], data=['ann']] return trimmed_array
def wrap_sync(func): """Wraps a synchronous function into an asynchronous function.""" @functools.wraps(func) def wrapped(*args, **kwargs): fut = asyncio.Future() def green(): try: fut.set_result(func(*args, **kwargs)) except BaseException as e: fut.set_exception(e) greenlet.greenlet(green).switch() return fut return wrapped
def function[wrap_sync, parameter[func]]: constant[Wraps a synchronous function into an asynchronous function.] def function[wrapped, parameter[]]: variable[fut] assign[=] call[name[asyncio].Future, parameter[]] def function[green, parameter[]]: <ast.Try object at 0x7da1b1473760> call[call[name[greenlet].greenlet, parameter[name[green]]].switch, parameter[]] return[name[fut]] return[name[wrapped]]
keyword[def] identifier[wrap_sync] ( identifier[func] ): literal[string] @ identifier[functools] . identifier[wraps] ( identifier[func] ) keyword[def] identifier[wrapped] (* identifier[args] ,** identifier[kwargs] ): identifier[fut] = identifier[asyncio] . identifier[Future] () keyword[def] identifier[green] (): keyword[try] : identifier[fut] . identifier[set_result] ( identifier[func] (* identifier[args] ,** identifier[kwargs] )) keyword[except] identifier[BaseException] keyword[as] identifier[e] : identifier[fut] . identifier[set_exception] ( identifier[e] ) identifier[greenlet] . identifier[greenlet] ( identifier[green] ). identifier[switch] () keyword[return] identifier[fut] keyword[return] identifier[wrapped]
def wrap_sync(func): """Wraps a synchronous function into an asynchronous function.""" @functools.wraps(func) def wrapped(*args, **kwargs): fut = asyncio.Future() def green(): try: fut.set_result(func(*args, **kwargs)) # depends on [control=['try'], data=[]] except BaseException as e: fut.set_exception(e) # depends on [control=['except'], data=['e']] greenlet.greenlet(green).switch() return fut return wrapped
def delete_room(room, reason=''): """Deletes a MUC room from the XMPP server.""" if room.custom_server: return def _delete_room(xmpp): muc = xmpp.plugin['xep_0045'] muc.destroy(room.jid, reason=reason) current_plugin.logger.info('Deleting room %s', room.jid) _execute_xmpp(_delete_room) delete_logs(room)
def function[delete_room, parameter[room, reason]]: constant[Deletes a MUC room from the XMPP server.] if name[room].custom_server begin[:] return[None] def function[_delete_room, parameter[xmpp]]: variable[muc] assign[=] call[name[xmpp].plugin][constant[xep_0045]] call[name[muc].destroy, parameter[name[room].jid]] call[name[current_plugin].logger.info, parameter[constant[Deleting room %s], name[room].jid]] call[name[_execute_xmpp], parameter[name[_delete_room]]] call[name[delete_logs], parameter[name[room]]]
keyword[def] identifier[delete_room] ( identifier[room] , identifier[reason] = literal[string] ): literal[string] keyword[if] identifier[room] . identifier[custom_server] : keyword[return] keyword[def] identifier[_delete_room] ( identifier[xmpp] ): identifier[muc] = identifier[xmpp] . identifier[plugin] [ literal[string] ] identifier[muc] . identifier[destroy] ( identifier[room] . identifier[jid] , identifier[reason] = identifier[reason] ) identifier[current_plugin] . identifier[logger] . identifier[info] ( literal[string] , identifier[room] . identifier[jid] ) identifier[_execute_xmpp] ( identifier[_delete_room] ) identifier[delete_logs] ( identifier[room] )
def delete_room(room, reason=''): """Deletes a MUC room from the XMPP server.""" if room.custom_server: return # depends on [control=['if'], data=[]] def _delete_room(xmpp): muc = xmpp.plugin['xep_0045'] muc.destroy(room.jid, reason=reason) current_plugin.logger.info('Deleting room %s', room.jid) _execute_xmpp(_delete_room) delete_logs(room)
def init_layout(self): """ Initialize the layout of the toolkit widget. This method is called during the bottom-up pass. This method should initialize the layout of the widget. The child widgets will be fully initialized and layed out when this is called. This """ layout = self.layout #: Add the layout as a subview self.widget.addSubview(layout) #: Add all child widgets to the layout for child_widget in self.child_widgets(): layout.addArrangedSubview(child_widget)
def function[init_layout, parameter[self]]: constant[ Initialize the layout of the toolkit widget. This method is called during the bottom-up pass. This method should initialize the layout of the widget. The child widgets will be fully initialized and layed out when this is called. This ] variable[layout] assign[=] name[self].layout call[name[self].widget.addSubview, parameter[name[layout]]] for taget[name[child_widget]] in starred[call[name[self].child_widgets, parameter[]]] begin[:] call[name[layout].addArrangedSubview, parameter[name[child_widget]]]
keyword[def] identifier[init_layout] ( identifier[self] ): literal[string] identifier[layout] = identifier[self] . identifier[layout] identifier[self] . identifier[widget] . identifier[addSubview] ( identifier[layout] ) keyword[for] identifier[child_widget] keyword[in] identifier[self] . identifier[child_widgets] (): identifier[layout] . identifier[addArrangedSubview] ( identifier[child_widget] )
def init_layout(self): """ Initialize the layout of the toolkit widget. This method is called during the bottom-up pass. This method should initialize the layout of the widget. The child widgets will be fully initialized and layed out when this is called. This """ layout = self.layout #: Add the layout as a subview self.widget.addSubview(layout) #: Add all child widgets to the layout for child_widget in self.child_widgets(): layout.addArrangedSubview(child_widget) # depends on [control=['for'], data=['child_widget']]
def decryptMsg(self, ciphertext, textMsg=True): """ :type ciphertext: WhisperMessage :type textMsg: Bool set this to False if you are decrypting bytes instead of string """ if not self.sessionStore.containsSession(self.recipientId, self.deviceId): raise NoSessionException("No session for: %s, %s" % (self.recipientId, self.deviceId)) sessionRecord = self.sessionStore.loadSession(self.recipientId, self.deviceId) plaintext = self.decryptWithSessionRecord(sessionRecord, ciphertext) self.sessionStore.storeSession(self.recipientId, self.deviceId, sessionRecord) return plaintext
def function[decryptMsg, parameter[self, ciphertext, textMsg]]: constant[ :type ciphertext: WhisperMessage :type textMsg: Bool set this to False if you are decrypting bytes instead of string ] if <ast.UnaryOp object at 0x7da20c6e5870> begin[:] <ast.Raise object at 0x7da20c6e7a60> variable[sessionRecord] assign[=] call[name[self].sessionStore.loadSession, parameter[name[self].recipientId, name[self].deviceId]] variable[plaintext] assign[=] call[name[self].decryptWithSessionRecord, parameter[name[sessionRecord], name[ciphertext]]] call[name[self].sessionStore.storeSession, parameter[name[self].recipientId, name[self].deviceId, name[sessionRecord]]] return[name[plaintext]]
keyword[def] identifier[decryptMsg] ( identifier[self] , identifier[ciphertext] , identifier[textMsg] = keyword[True] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[sessionStore] . identifier[containsSession] ( identifier[self] . identifier[recipientId] , identifier[self] . identifier[deviceId] ): keyword[raise] identifier[NoSessionException] ( literal[string] %( identifier[self] . identifier[recipientId] , identifier[self] . identifier[deviceId] )) identifier[sessionRecord] = identifier[self] . identifier[sessionStore] . identifier[loadSession] ( identifier[self] . identifier[recipientId] , identifier[self] . identifier[deviceId] ) identifier[plaintext] = identifier[self] . identifier[decryptWithSessionRecord] ( identifier[sessionRecord] , identifier[ciphertext] ) identifier[self] . identifier[sessionStore] . identifier[storeSession] ( identifier[self] . identifier[recipientId] , identifier[self] . identifier[deviceId] , identifier[sessionRecord] ) keyword[return] identifier[plaintext]
def decryptMsg(self, ciphertext, textMsg=True): """ :type ciphertext: WhisperMessage :type textMsg: Bool set this to False if you are decrypting bytes instead of string """ if not self.sessionStore.containsSession(self.recipientId, self.deviceId): raise NoSessionException('No session for: %s, %s' % (self.recipientId, self.deviceId)) # depends on [control=['if'], data=[]] sessionRecord = self.sessionStore.loadSession(self.recipientId, self.deviceId) plaintext = self.decryptWithSessionRecord(sessionRecord, ciphertext) self.sessionStore.storeSession(self.recipientId, self.deviceId, sessionRecord) return plaintext
def diff_toDelta(self, diffs): """Crush the diff into an encoded string which describes the operations required to transform text1 into text2. E.g. =3\t-2\t+ing -> Keep 3 chars, delete 2 chars, insert 'ing'. Operations are tab-separated. Inserted text is escaped using %xx notation. Args: diffs: Array of diff tuples. Returns: Delta text. """ text = [] for (op, data) in diffs: if op == self.DIFF_INSERT: # High ascii will raise UnicodeDecodeError. Use Unicode instead. data = data.encode("utf-8") text.append("+" + urllib.quote(data, "!~*'();/?:@&=+$,# ")) elif op == self.DIFF_DELETE: text.append("-%d" % len(data)) elif op == self.DIFF_EQUAL: text.append("=%d" % len(data)) return "\t".join(text)
def function[diff_toDelta, parameter[self, diffs]]: constant[Crush the diff into an encoded string which describes the operations required to transform text1 into text2. E.g. =3 -2 +ing -> Keep 3 chars, delete 2 chars, insert 'ing'. Operations are tab-separated. Inserted text is escaped using %xx notation. Args: diffs: Array of diff tuples. Returns: Delta text. ] variable[text] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da20e9b1510>, <ast.Name object at 0x7da20e9b28c0>]]] in starred[name[diffs]] begin[:] if compare[name[op] equal[==] name[self].DIFF_INSERT] begin[:] variable[data] assign[=] call[name[data].encode, parameter[constant[utf-8]]] call[name[text].append, parameter[binary_operation[constant[+] + call[name[urllib].quote, parameter[name[data], constant[!~*'();/?:@&=+$,# ]]]]]] return[call[constant[ ].join, parameter[name[text]]]]
keyword[def] identifier[diff_toDelta] ( identifier[self] , identifier[diffs] ): literal[string] identifier[text] =[] keyword[for] ( identifier[op] , identifier[data] ) keyword[in] identifier[diffs] : keyword[if] identifier[op] == identifier[self] . identifier[DIFF_INSERT] : identifier[data] = identifier[data] . identifier[encode] ( literal[string] ) identifier[text] . identifier[append] ( literal[string] + identifier[urllib] . identifier[quote] ( identifier[data] , literal[string] )) keyword[elif] identifier[op] == identifier[self] . identifier[DIFF_DELETE] : identifier[text] . identifier[append] ( literal[string] % identifier[len] ( identifier[data] )) keyword[elif] identifier[op] == identifier[self] . identifier[DIFF_EQUAL] : identifier[text] . identifier[append] ( literal[string] % identifier[len] ( identifier[data] )) keyword[return] literal[string] . identifier[join] ( identifier[text] )
def diff_toDelta(self, diffs): """Crush the diff into an encoded string which describes the operations required to transform text1 into text2. E.g. =3 -2 +ing -> Keep 3 chars, delete 2 chars, insert 'ing'. Operations are tab-separated. Inserted text is escaped using %xx notation. Args: diffs: Array of diff tuples. Returns: Delta text. """ text = [] for (op, data) in diffs: if op == self.DIFF_INSERT: # High ascii will raise UnicodeDecodeError. Use Unicode instead. data = data.encode('utf-8') text.append('+' + urllib.quote(data, "!~*'();/?:@&=+$,# ")) # depends on [control=['if'], data=[]] elif op == self.DIFF_DELETE: text.append('-%d' % len(data)) # depends on [control=['if'], data=[]] elif op == self.DIFF_EQUAL: text.append('=%d' % len(data)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] return '\t'.join(text)
def update_project_language(self, project_id, language_code, data, fuzzy_trigger=None): """ Inserts / overwrites translations. >>> data = [ { "term": "Projects", "context": "project list", "translation": { "content": "Des projets", "fuzzy": 0 } } ] """ kwargs = {} if fuzzy_trigger is not None: kwargs['fuzzy_trigger'] = fuzzy_trigger data = self._run( url_path="languages/update", id=project_id, language=language_code, data=json.dumps(data), **kwargs ) return data['result']['translations']
def function[update_project_language, parameter[self, project_id, language_code, data, fuzzy_trigger]]: constant[ Inserts / overwrites translations. >>> data = [ { "term": "Projects", "context": "project list", "translation": { "content": "Des projets", "fuzzy": 0 } } ] ] variable[kwargs] assign[=] dictionary[[], []] if compare[name[fuzzy_trigger] is_not constant[None]] begin[:] call[name[kwargs]][constant[fuzzy_trigger]] assign[=] name[fuzzy_trigger] variable[data] assign[=] call[name[self]._run, parameter[]] return[call[call[name[data]][constant[result]]][constant[translations]]]
keyword[def] identifier[update_project_language] ( identifier[self] , identifier[project_id] , identifier[language_code] , identifier[data] , identifier[fuzzy_trigger] = keyword[None] ): literal[string] identifier[kwargs] ={} keyword[if] identifier[fuzzy_trigger] keyword[is] keyword[not] keyword[None] : identifier[kwargs] [ literal[string] ]= identifier[fuzzy_trigger] identifier[data] = identifier[self] . identifier[_run] ( identifier[url_path] = literal[string] , identifier[id] = identifier[project_id] , identifier[language] = identifier[language_code] , identifier[data] = identifier[json] . identifier[dumps] ( identifier[data] ), ** identifier[kwargs] ) keyword[return] identifier[data] [ literal[string] ][ literal[string] ]
def update_project_language(self, project_id, language_code, data, fuzzy_trigger=None): """ Inserts / overwrites translations. >>> data = [ { "term": "Projects", "context": "project list", "translation": { "content": "Des projets", "fuzzy": 0 } } ] """ kwargs = {} if fuzzy_trigger is not None: kwargs['fuzzy_trigger'] = fuzzy_trigger # depends on [control=['if'], data=['fuzzy_trigger']] data = self._run(url_path='languages/update', id=project_id, language=language_code, data=json.dumps(data), **kwargs) return data['result']['translations']
def get_relation(self, rel_id, resolve_missing=False): """ Get a relation by its ID. :param rel_id: The relation ID :type rel_id: Integer :param resolve_missing: Query the Overpass API if the relation is missing in the result set. :return: The relation :rtype: overpy.Relation :raises overpy.exception.DataIncomplete: The requested relation is not available in the result cache. :raises overpy.exception.DataIncomplete: If resolve_missing is True and the relation can't be resolved. """ relations = self.get_relations(rel_id=rel_id) if len(relations) == 0: if resolve_missing is False: raise exception.DataIncomplete("Resolve missing relations is disabled") query = ("\n" "[out:json];\n" "relation({relation_id});\n" "out body;\n" ) query = query.format( relation_id=rel_id ) tmp_result = self.api.query(query) self.expand(tmp_result) relations = self.get_relations(rel_id=rel_id) if len(relations) == 0: raise exception.DataIncomplete("Unable to resolve requested reference") return relations[0]
def function[get_relation, parameter[self, rel_id, resolve_missing]]: constant[ Get a relation by its ID. :param rel_id: The relation ID :type rel_id: Integer :param resolve_missing: Query the Overpass API if the relation is missing in the result set. :return: The relation :rtype: overpy.Relation :raises overpy.exception.DataIncomplete: The requested relation is not available in the result cache. :raises overpy.exception.DataIncomplete: If resolve_missing is True and the relation can't be resolved. ] variable[relations] assign[=] call[name[self].get_relations, parameter[]] if compare[call[name[len], parameter[name[relations]]] equal[==] constant[0]] begin[:] if compare[name[resolve_missing] is constant[False]] begin[:] <ast.Raise object at 0x7da1b0404460> variable[query] assign[=] constant[ [out:json]; relation({relation_id}); out body; ] variable[query] assign[=] call[name[query].format, parameter[]] variable[tmp_result] assign[=] call[name[self].api.query, parameter[name[query]]] call[name[self].expand, parameter[name[tmp_result]]] variable[relations] assign[=] call[name[self].get_relations, parameter[]] if compare[call[name[len], parameter[name[relations]]] equal[==] constant[0]] begin[:] <ast.Raise object at 0x7da1b0407a30> return[call[name[relations]][constant[0]]]
keyword[def] identifier[get_relation] ( identifier[self] , identifier[rel_id] , identifier[resolve_missing] = keyword[False] ): literal[string] identifier[relations] = identifier[self] . identifier[get_relations] ( identifier[rel_id] = identifier[rel_id] ) keyword[if] identifier[len] ( identifier[relations] )== literal[int] : keyword[if] identifier[resolve_missing] keyword[is] keyword[False] : keyword[raise] identifier[exception] . identifier[DataIncomplete] ( literal[string] ) identifier[query] =( literal[string] literal[string] literal[string] literal[string] ) identifier[query] = identifier[query] . identifier[format] ( identifier[relation_id] = identifier[rel_id] ) identifier[tmp_result] = identifier[self] . identifier[api] . identifier[query] ( identifier[query] ) identifier[self] . identifier[expand] ( identifier[tmp_result] ) identifier[relations] = identifier[self] . identifier[get_relations] ( identifier[rel_id] = identifier[rel_id] ) keyword[if] identifier[len] ( identifier[relations] )== literal[int] : keyword[raise] identifier[exception] . identifier[DataIncomplete] ( literal[string] ) keyword[return] identifier[relations] [ literal[int] ]
def get_relation(self, rel_id, resolve_missing=False): """ Get a relation by its ID. :param rel_id: The relation ID :type rel_id: Integer :param resolve_missing: Query the Overpass API if the relation is missing in the result set. :return: The relation :rtype: overpy.Relation :raises overpy.exception.DataIncomplete: The requested relation is not available in the result cache. :raises overpy.exception.DataIncomplete: If resolve_missing is True and the relation can't be resolved. """ relations = self.get_relations(rel_id=rel_id) if len(relations) == 0: if resolve_missing is False: raise exception.DataIncomplete('Resolve missing relations is disabled') # depends on [control=['if'], data=[]] query = '\n[out:json];\nrelation({relation_id});\nout body;\n' query = query.format(relation_id=rel_id) tmp_result = self.api.query(query) self.expand(tmp_result) relations = self.get_relations(rel_id=rel_id) # depends on [control=['if'], data=[]] if len(relations) == 0: raise exception.DataIncomplete('Unable to resolve requested reference') # depends on [control=['if'], data=[]] return relations[0]
def is_symbols_pair_complete(editor, symbol): """ Returns if the symbols pair is complete on current editor line. :param editor: Document editor. :type editor: QWidget :param symbol: Symbol to check. :type symbol: unicode :return: Is symbols pair complete. :rtype: bool """ symbols_pairs = get_editor_capability(editor, "symbols_pairs") if not symbols_pairs: return cursor = editor.textCursor() cursor.movePosition(QTextCursor.StartOfLine, QTextCursor.MoveAnchor) cursor.movePosition(QTextCursor.EndOfLine, QTextCursor.KeepAnchor) selected_text = foundations.strings.to_string(cursor.selectedText()) if symbol == symbols_pairs[symbol]: return selected_text.count(symbol) % 2 == 0 else: return selected_text.count(symbol) == selected_text.count(symbols_pairs[symbol])
def function[is_symbols_pair_complete, parameter[editor, symbol]]: constant[ Returns if the symbols pair is complete on current editor line. :param editor: Document editor. :type editor: QWidget :param symbol: Symbol to check. :type symbol: unicode :return: Is symbols pair complete. :rtype: bool ] variable[symbols_pairs] assign[=] call[name[get_editor_capability], parameter[name[editor], constant[symbols_pairs]]] if <ast.UnaryOp object at 0x7da1b0913790> begin[:] return[None] variable[cursor] assign[=] call[name[editor].textCursor, parameter[]] call[name[cursor].movePosition, parameter[name[QTextCursor].StartOfLine, name[QTextCursor].MoveAnchor]] call[name[cursor].movePosition, parameter[name[QTextCursor].EndOfLine, name[QTextCursor].KeepAnchor]] variable[selected_text] assign[=] call[name[foundations].strings.to_string, parameter[call[name[cursor].selectedText, parameter[]]]] if compare[name[symbol] equal[==] call[name[symbols_pairs]][name[symbol]]] begin[:] return[compare[binary_operation[call[name[selected_text].count, parameter[name[symbol]]] <ast.Mod object at 0x7da2590d6920> constant[2]] equal[==] constant[0]]]
keyword[def] identifier[is_symbols_pair_complete] ( identifier[editor] , identifier[symbol] ): literal[string] identifier[symbols_pairs] = identifier[get_editor_capability] ( identifier[editor] , literal[string] ) keyword[if] keyword[not] identifier[symbols_pairs] : keyword[return] identifier[cursor] = identifier[editor] . identifier[textCursor] () identifier[cursor] . identifier[movePosition] ( identifier[QTextCursor] . identifier[StartOfLine] , identifier[QTextCursor] . identifier[MoveAnchor] ) identifier[cursor] . identifier[movePosition] ( identifier[QTextCursor] . identifier[EndOfLine] , identifier[QTextCursor] . identifier[KeepAnchor] ) identifier[selected_text] = identifier[foundations] . identifier[strings] . identifier[to_string] ( identifier[cursor] . identifier[selectedText] ()) keyword[if] identifier[symbol] == identifier[symbols_pairs] [ identifier[symbol] ]: keyword[return] identifier[selected_text] . identifier[count] ( identifier[symbol] )% literal[int] == literal[int] keyword[else] : keyword[return] identifier[selected_text] . identifier[count] ( identifier[symbol] )== identifier[selected_text] . identifier[count] ( identifier[symbols_pairs] [ identifier[symbol] ])
def is_symbols_pair_complete(editor, symbol): """ Returns if the symbols pair is complete on current editor line. :param editor: Document editor. :type editor: QWidget :param symbol: Symbol to check. :type symbol: unicode :return: Is symbols pair complete. :rtype: bool """ symbols_pairs = get_editor_capability(editor, 'symbols_pairs') if not symbols_pairs: return # depends on [control=['if'], data=[]] cursor = editor.textCursor() cursor.movePosition(QTextCursor.StartOfLine, QTextCursor.MoveAnchor) cursor.movePosition(QTextCursor.EndOfLine, QTextCursor.KeepAnchor) selected_text = foundations.strings.to_string(cursor.selectedText()) if symbol == symbols_pairs[symbol]: return selected_text.count(symbol) % 2 == 0 # depends on [control=['if'], data=['symbol']] else: return selected_text.count(symbol) == selected_text.count(symbols_pairs[symbol])
def _init(self, domain, dtype, missing_value, window_safe, ndim, params): """ Parameters ---------- domain : zipline.pipeline.domain.Domain The domain of this term. dtype : np.dtype Dtype of this term's output. missing_value : object Missing value for this term. ndim : 1 or 2 The dimensionality of this term. params : tuple[(str, hashable)] Tuple of key/value pairs of additional parameters. """ self.domain = domain self.dtype = dtype self.missing_value = missing_value self.window_safe = window_safe self.ndim = ndim for name, value in params: if hasattr(self, name): raise TypeError( "Parameter {name!r} conflicts with already-present" " attribute with value {value!r}.".format( name=name, value=getattr(self, name), ) ) # TODO: Consider setting these values as attributes and replacing # the boilerplate in NumericalExpression, Rank, and # PercentileFilter. self.params = dict(params) # Make sure that subclasses call super() in their _validate() methods # by setting this flag. The base class implementation of _validate # should set this flag to True. self._subclass_called_super_validate = False self._validate() assert self._subclass_called_super_validate, ( "Term._validate() was not called.\n" "This probably means that you overrode _validate" " without calling super()." ) del self._subclass_called_super_validate return self
def function[_init, parameter[self, domain, dtype, missing_value, window_safe, ndim, params]]: constant[ Parameters ---------- domain : zipline.pipeline.domain.Domain The domain of this term. dtype : np.dtype Dtype of this term's output. missing_value : object Missing value for this term. ndim : 1 or 2 The dimensionality of this term. params : tuple[(str, hashable)] Tuple of key/value pairs of additional parameters. ] name[self].domain assign[=] name[domain] name[self].dtype assign[=] name[dtype] name[self].missing_value assign[=] name[missing_value] name[self].window_safe assign[=] name[window_safe] name[self].ndim assign[=] name[ndim] for taget[tuple[[<ast.Name object at 0x7da1b1ea2ec0>, <ast.Name object at 0x7da1b1ea12a0>]]] in starred[name[params]] begin[:] if call[name[hasattr], parameter[name[self], name[name]]] begin[:] <ast.Raise object at 0x7da1b1ea0670> name[self].params assign[=] call[name[dict], parameter[name[params]]] name[self]._subclass_called_super_validate assign[=] constant[False] call[name[self]._validate, parameter[]] assert[name[self]._subclass_called_super_validate] <ast.Delete object at 0x7da1b1ea1ea0> return[name[self]]
keyword[def] identifier[_init] ( identifier[self] , identifier[domain] , identifier[dtype] , identifier[missing_value] , identifier[window_safe] , identifier[ndim] , identifier[params] ): literal[string] identifier[self] . identifier[domain] = identifier[domain] identifier[self] . identifier[dtype] = identifier[dtype] identifier[self] . identifier[missing_value] = identifier[missing_value] identifier[self] . identifier[window_safe] = identifier[window_safe] identifier[self] . identifier[ndim] = identifier[ndim] keyword[for] identifier[name] , identifier[value] keyword[in] identifier[params] : keyword[if] identifier[hasattr] ( identifier[self] , identifier[name] ): keyword[raise] identifier[TypeError] ( literal[string] literal[string] . identifier[format] ( identifier[name] = identifier[name] , identifier[value] = identifier[getattr] ( identifier[self] , identifier[name] ), ) ) identifier[self] . identifier[params] = identifier[dict] ( identifier[params] ) identifier[self] . identifier[_subclass_called_super_validate] = keyword[False] identifier[self] . identifier[_validate] () keyword[assert] identifier[self] . identifier[_subclass_called_super_validate] ,( literal[string] literal[string] literal[string] ) keyword[del] identifier[self] . identifier[_subclass_called_super_validate] keyword[return] identifier[self]
def _init(self, domain, dtype, missing_value, window_safe, ndim, params): """ Parameters ---------- domain : zipline.pipeline.domain.Domain The domain of this term. dtype : np.dtype Dtype of this term's output. missing_value : object Missing value for this term. ndim : 1 or 2 The dimensionality of this term. params : tuple[(str, hashable)] Tuple of key/value pairs of additional parameters. """ self.domain = domain self.dtype = dtype self.missing_value = missing_value self.window_safe = window_safe self.ndim = ndim for (name, value) in params: if hasattr(self, name): raise TypeError('Parameter {name!r} conflicts with already-present attribute with value {value!r}.'.format(name=name, value=getattr(self, name))) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # TODO: Consider setting these values as attributes and replacing # the boilerplate in NumericalExpression, Rank, and # PercentileFilter. self.params = dict(params) # Make sure that subclasses call super() in their _validate() methods # by setting this flag. The base class implementation of _validate # should set this flag to True. self._subclass_called_super_validate = False self._validate() assert self._subclass_called_super_validate, 'Term._validate() was not called.\nThis probably means that you overrode _validate without calling super().' del self._subclass_called_super_validate return self
def _validate_frequency(cls, index, freq, **kwargs): """ Validate that a frequency is compatible with the values of a given Datetime Array/Index or Timedelta Array/Index Parameters ---------- index : DatetimeIndex or TimedeltaIndex The index on which to determine if the given frequency is valid freq : DateOffset The frequency to validate """ if is_period_dtype(cls): # Frequency validation is not meaningful for Period Array/Index return None inferred = index.inferred_freq if index.size == 0 or inferred == freq.freqstr: return None try: on_freq = cls._generate_range(start=index[0], end=None, periods=len(index), freq=freq, **kwargs) if not np.array_equal(index.asi8, on_freq.asi8): raise ValueError except ValueError as e: if "non-fixed" in str(e): # non-fixed frequencies are not meaningful for timedelta64; # we retain that error message raise e # GH#11587 the main way this is reached is if the `np.array_equal` # check above is False. This can also be reached if index[0] # is `NaT`, in which case the call to `cls._generate_range` will # raise a ValueError, which we re-raise with a more targeted # message. raise ValueError('Inferred frequency {infer} from passed values ' 'does not conform to passed frequency {passed}' .format(infer=inferred, passed=freq.freqstr))
def function[_validate_frequency, parameter[cls, index, freq]]: constant[ Validate that a frequency is compatible with the values of a given Datetime Array/Index or Timedelta Array/Index Parameters ---------- index : DatetimeIndex or TimedeltaIndex The index on which to determine if the given frequency is valid freq : DateOffset The frequency to validate ] if call[name[is_period_dtype], parameter[name[cls]]] begin[:] return[constant[None]] variable[inferred] assign[=] name[index].inferred_freq if <ast.BoolOp object at 0x7da1b26aee60> begin[:] return[constant[None]] <ast.Try object at 0x7da1b26ac220>
keyword[def] identifier[_validate_frequency] ( identifier[cls] , identifier[index] , identifier[freq] ,** identifier[kwargs] ): literal[string] keyword[if] identifier[is_period_dtype] ( identifier[cls] ): keyword[return] keyword[None] identifier[inferred] = identifier[index] . identifier[inferred_freq] keyword[if] identifier[index] . identifier[size] == literal[int] keyword[or] identifier[inferred] == identifier[freq] . identifier[freqstr] : keyword[return] keyword[None] keyword[try] : identifier[on_freq] = identifier[cls] . identifier[_generate_range] ( identifier[start] = identifier[index] [ literal[int] ], identifier[end] = keyword[None] , identifier[periods] = identifier[len] ( identifier[index] ), identifier[freq] = identifier[freq] , ** identifier[kwargs] ) keyword[if] keyword[not] identifier[np] . identifier[array_equal] ( identifier[index] . identifier[asi8] , identifier[on_freq] . identifier[asi8] ): keyword[raise] identifier[ValueError] keyword[except] identifier[ValueError] keyword[as] identifier[e] : keyword[if] literal[string] keyword[in] identifier[str] ( identifier[e] ): keyword[raise] identifier[e] keyword[raise] identifier[ValueError] ( literal[string] literal[string] . identifier[format] ( identifier[infer] = identifier[inferred] , identifier[passed] = identifier[freq] . identifier[freqstr] ))
def _validate_frequency(cls, index, freq, **kwargs): """ Validate that a frequency is compatible with the values of a given Datetime Array/Index or Timedelta Array/Index Parameters ---------- index : DatetimeIndex or TimedeltaIndex The index on which to determine if the given frequency is valid freq : DateOffset The frequency to validate """ if is_period_dtype(cls): # Frequency validation is not meaningful for Period Array/Index return None # depends on [control=['if'], data=[]] inferred = index.inferred_freq if index.size == 0 or inferred == freq.freqstr: return None # depends on [control=['if'], data=[]] try: on_freq = cls._generate_range(start=index[0], end=None, periods=len(index), freq=freq, **kwargs) if not np.array_equal(index.asi8, on_freq.asi8): raise ValueError # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except ValueError as e: if 'non-fixed' in str(e): # non-fixed frequencies are not meaningful for timedelta64; # we retain that error message raise e # depends on [control=['if'], data=[]] # GH#11587 the main way this is reached is if the `np.array_equal` # check above is False. This can also be reached if index[0] # is `NaT`, in which case the call to `cls._generate_range` will # raise a ValueError, which we re-raise with a more targeted # message. raise ValueError('Inferred frequency {infer} from passed values does not conform to passed frequency {passed}'.format(infer=inferred, passed=freq.freqstr)) # depends on [control=['except'], data=['e']]
def residuals(self,Y): """ Creates the model residuals Parameters ---------- Y : np.array The dependent variables Y Returns ---------- The model residuals """ return (Y-np.dot(self._create_B(Y),self._create_Z(Y)))
def function[residuals, parameter[self, Y]]: constant[ Creates the model residuals Parameters ---------- Y : np.array The dependent variables Y Returns ---------- The model residuals ] return[binary_operation[name[Y] - call[name[np].dot, parameter[call[name[self]._create_B, parameter[name[Y]]], call[name[self]._create_Z, parameter[name[Y]]]]]]]
keyword[def] identifier[residuals] ( identifier[self] , identifier[Y] ): literal[string] keyword[return] ( identifier[Y] - identifier[np] . identifier[dot] ( identifier[self] . identifier[_create_B] ( identifier[Y] ), identifier[self] . identifier[_create_Z] ( identifier[Y] )))
def residuals(self, Y): """ Creates the model residuals Parameters ---------- Y : np.array The dependent variables Y Returns ---------- The model residuals """ return Y - np.dot(self._create_B(Y), self._create_Z(Y))
def _set_remote(self, stream=False): """ Call :py:meth:`~._args_for_remote`; if the return value is not None, execute 'terraform remote config' with those arguments and ensure it exits 0. :param stream: whether or not to stream TF output in realtime :type stream: bool """ args = self._args_for_remote() if args is None: logger.debug('_args_for_remote() returned None; not configuring ' 'terraform remote') return logger.warning('Setting terraform remote config: %s', ' '.join(args)) args = ['config'] + args self._run_tf('remote', cmd_args=args, stream=stream) logger.info('Terraform remote configured.')
def function[_set_remote, parameter[self, stream]]: constant[ Call :py:meth:`~._args_for_remote`; if the return value is not None, execute 'terraform remote config' with those arguments and ensure it exits 0. :param stream: whether or not to stream TF output in realtime :type stream: bool ] variable[args] assign[=] call[name[self]._args_for_remote, parameter[]] if compare[name[args] is constant[None]] begin[:] call[name[logger].debug, parameter[constant[_args_for_remote() returned None; not configuring terraform remote]]] return[None] call[name[logger].warning, parameter[constant[Setting terraform remote config: %s], call[constant[ ].join, parameter[name[args]]]]] variable[args] assign[=] binary_operation[list[[<ast.Constant object at 0x7da1b0ac86a0>]] + name[args]] call[name[self]._run_tf, parameter[constant[remote]]] call[name[logger].info, parameter[constant[Terraform remote configured.]]]
keyword[def] identifier[_set_remote] ( identifier[self] , identifier[stream] = keyword[False] ): literal[string] identifier[args] = identifier[self] . identifier[_args_for_remote] () keyword[if] identifier[args] keyword[is] keyword[None] : identifier[logger] . identifier[debug] ( literal[string] literal[string] ) keyword[return] identifier[logger] . identifier[warning] ( literal[string] , literal[string] . identifier[join] ( identifier[args] )) identifier[args] =[ literal[string] ]+ identifier[args] identifier[self] . identifier[_run_tf] ( literal[string] , identifier[cmd_args] = identifier[args] , identifier[stream] = identifier[stream] ) identifier[logger] . identifier[info] ( literal[string] )
def _set_remote(self, stream=False): """ Call :py:meth:`~._args_for_remote`; if the return value is not None, execute 'terraform remote config' with those arguments and ensure it exits 0. :param stream: whether or not to stream TF output in realtime :type stream: bool """ args = self._args_for_remote() if args is None: logger.debug('_args_for_remote() returned None; not configuring terraform remote') return # depends on [control=['if'], data=[]] logger.warning('Setting terraform remote config: %s', ' '.join(args)) args = ['config'] + args self._run_tf('remote', cmd_args=args, stream=stream) logger.info('Terraform remote configured.')
def weight_unit(self, weight_unit): """Sets the weight_unit of this MeasurementSettings. :param weight_unit: The weight_unit of this MeasurementSettings. :type: str """ allowed_values = ["pound", "kilogram"] # noqa: E501 if weight_unit is not None and weight_unit not in allowed_values: raise ValueError( "Invalid value for `weight_unit` ({0}), must be one of {1}" # noqa: E501 .format(weight_unit, allowed_values) ) self._weight_unit = weight_unit
def function[weight_unit, parameter[self, weight_unit]]: constant[Sets the weight_unit of this MeasurementSettings. :param weight_unit: The weight_unit of this MeasurementSettings. :type: str ] variable[allowed_values] assign[=] list[[<ast.Constant object at 0x7da2054a4b50>, <ast.Constant object at 0x7da2054a6dd0>]] if <ast.BoolOp object at 0x7da2054a5570> begin[:] <ast.Raise object at 0x7da2054a6b30> name[self]._weight_unit assign[=] name[weight_unit]
keyword[def] identifier[weight_unit] ( identifier[self] , identifier[weight_unit] ): literal[string] identifier[allowed_values] =[ literal[string] , literal[string] ] keyword[if] identifier[weight_unit] keyword[is] keyword[not] keyword[None] keyword[and] identifier[weight_unit] keyword[not] keyword[in] identifier[allowed_values] : keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[weight_unit] , identifier[allowed_values] ) ) identifier[self] . identifier[_weight_unit] = identifier[weight_unit]
def weight_unit(self, weight_unit): """Sets the weight_unit of this MeasurementSettings. :param weight_unit: The weight_unit of this MeasurementSettings. :type: str """ allowed_values = ['pound', 'kilogram'] # noqa: E501 if weight_unit is not None and weight_unit not in allowed_values: # noqa: E501 raise ValueError('Invalid value for `weight_unit` ({0}), must be one of {1}'.format(weight_unit, allowed_values)) # depends on [control=['if'], data=[]] self._weight_unit = weight_unit
def write_to_file(self, filename, filetype=None): """Write the relaxation to a file. :param filename: The name of the file to write to. The type can be autodetected from the extension: .dat-s for SDPA, .task for mosek or .csv for human readable format. :type filename: str. :param filetype: Optional parameter to define the filetype. It can be "sdpa" for SDPA , "mosek" for Mosek, or "csv" for human readable format. :type filetype: str. """ if filetype == "sdpa" and not filename.endswith(".dat-s"): raise Exception("SDPA files must have .dat-s extension!") if filetype == "mosek" and not filename.endswith(".task"): raise Exception("Mosek files must have .task extension!") elif filetype is None and filename.endswith(".dat-s"): filetype = "sdpa" elif filetype is None and filename.endswith(".csv"): filetype = "csv" elif filetype is None and filename.endswith(".task"): filetype = "mosek" elif filetype is None: raise Exception("Cannot detect filetype from extension!") if filetype == "sdpa": write_to_sdpa(self, filename) elif filetype == "mosek": task = convert_to_mosek(self) task.writedata(filename) elif filetype == "csv": write_to_human_readable(self, filename) else: raise Exception("Unknown filetype")
def function[write_to_file, parameter[self, filename, filetype]]: constant[Write the relaxation to a file. :param filename: The name of the file to write to. The type can be autodetected from the extension: .dat-s for SDPA, .task for mosek or .csv for human readable format. :type filename: str. :param filetype: Optional parameter to define the filetype. It can be "sdpa" for SDPA , "mosek" for Mosek, or "csv" for human readable format. :type filetype: str. ] if <ast.BoolOp object at 0x7da1b10d44c0> begin[:] <ast.Raise object at 0x7da1b10d4f70> if <ast.BoolOp object at 0x7da1b10d5a50> begin[:] <ast.Raise object at 0x7da1b10d74f0> if compare[name[filetype] equal[==] constant[sdpa]] begin[:] call[name[write_to_sdpa], parameter[name[self], name[filename]]]
keyword[def] identifier[write_to_file] ( identifier[self] , identifier[filename] , identifier[filetype] = keyword[None] ): literal[string] keyword[if] identifier[filetype] == literal[string] keyword[and] keyword[not] identifier[filename] . identifier[endswith] ( literal[string] ): keyword[raise] identifier[Exception] ( literal[string] ) keyword[if] identifier[filetype] == literal[string] keyword[and] keyword[not] identifier[filename] . identifier[endswith] ( literal[string] ): keyword[raise] identifier[Exception] ( literal[string] ) keyword[elif] identifier[filetype] keyword[is] keyword[None] keyword[and] identifier[filename] . identifier[endswith] ( literal[string] ): identifier[filetype] = literal[string] keyword[elif] identifier[filetype] keyword[is] keyword[None] keyword[and] identifier[filename] . identifier[endswith] ( literal[string] ): identifier[filetype] = literal[string] keyword[elif] identifier[filetype] keyword[is] keyword[None] keyword[and] identifier[filename] . identifier[endswith] ( literal[string] ): identifier[filetype] = literal[string] keyword[elif] identifier[filetype] keyword[is] keyword[None] : keyword[raise] identifier[Exception] ( literal[string] ) keyword[if] identifier[filetype] == literal[string] : identifier[write_to_sdpa] ( identifier[self] , identifier[filename] ) keyword[elif] identifier[filetype] == literal[string] : identifier[task] = identifier[convert_to_mosek] ( identifier[self] ) identifier[task] . identifier[writedata] ( identifier[filename] ) keyword[elif] identifier[filetype] == literal[string] : identifier[write_to_human_readable] ( identifier[self] , identifier[filename] ) keyword[else] : keyword[raise] identifier[Exception] ( literal[string] )
def write_to_file(self, filename, filetype=None): """Write the relaxation to a file. :param filename: The name of the file to write to. The type can be autodetected from the extension: .dat-s for SDPA, .task for mosek or .csv for human readable format. :type filename: str. :param filetype: Optional parameter to define the filetype. It can be "sdpa" for SDPA , "mosek" for Mosek, or "csv" for human readable format. :type filetype: str. """ if filetype == 'sdpa' and (not filename.endswith('.dat-s')): raise Exception('SDPA files must have .dat-s extension!') # depends on [control=['if'], data=[]] if filetype == 'mosek' and (not filename.endswith('.task')): raise Exception('Mosek files must have .task extension!') # depends on [control=['if'], data=[]] elif filetype is None and filename.endswith('.dat-s'): filetype = 'sdpa' # depends on [control=['if'], data=[]] elif filetype is None and filename.endswith('.csv'): filetype = 'csv' # depends on [control=['if'], data=[]] elif filetype is None and filename.endswith('.task'): filetype = 'mosek' # depends on [control=['if'], data=[]] elif filetype is None: raise Exception('Cannot detect filetype from extension!') # depends on [control=['if'], data=[]] if filetype == 'sdpa': write_to_sdpa(self, filename) # depends on [control=['if'], data=[]] elif filetype == 'mosek': task = convert_to_mosek(self) task.writedata(filename) # depends on [control=['if'], data=[]] elif filetype == 'csv': write_to_human_readable(self, filename) # depends on [control=['if'], data=[]] else: raise Exception('Unknown filetype')
def reviews(self, packageName, filterByDevice=False, sort=2, nb_results=None, offset=None): """Browse reviews for an application Args: packageName (str): app unique ID. filterByDevice (bool): filter results for current device sort (int): sorting criteria (values are unknown) nb_results (int): max number of reviews to return offset (int): return reviews starting from an offset value Returns: dict object containing all the protobuf data returned from the api """ # TODO: select the number of reviews to return path = REVIEWS_URL + "?doc={}&sort={}".format(requests.utils.quote(packageName), sort) if nb_results is not None: path += "&n={}".format(nb_results) if offset is not None: path += "&o={}".format(offset) if filterByDevice: path += "&dfil=1" data = self.executeRequestApi2(path) output = [] for review in data.payload.reviewResponse.getResponse.review: output.append(utils.parseProtobufObj(review)) return output
def function[reviews, parameter[self, packageName, filterByDevice, sort, nb_results, offset]]: constant[Browse reviews for an application Args: packageName (str): app unique ID. filterByDevice (bool): filter results for current device sort (int): sorting criteria (values are unknown) nb_results (int): max number of reviews to return offset (int): return reviews starting from an offset value Returns: dict object containing all the protobuf data returned from the api ] variable[path] assign[=] binary_operation[name[REVIEWS_URL] + call[constant[?doc={}&sort={}].format, parameter[call[name[requests].utils.quote, parameter[name[packageName]]], name[sort]]]] if compare[name[nb_results] is_not constant[None]] begin[:] <ast.AugAssign object at 0x7da20e957a90> if compare[name[offset] is_not constant[None]] begin[:] <ast.AugAssign object at 0x7da20e957d30> if name[filterByDevice] begin[:] <ast.AugAssign object at 0x7da20e954100> variable[data] assign[=] call[name[self].executeRequestApi2, parameter[name[path]]] variable[output] assign[=] list[[]] for taget[name[review]] in starred[name[data].payload.reviewResponse.getResponse.review] begin[:] call[name[output].append, parameter[call[name[utils].parseProtobufObj, parameter[name[review]]]]] return[name[output]]
keyword[def] identifier[reviews] ( identifier[self] , identifier[packageName] , identifier[filterByDevice] = keyword[False] , identifier[sort] = literal[int] , identifier[nb_results] = keyword[None] , identifier[offset] = keyword[None] ): literal[string] identifier[path] = identifier[REVIEWS_URL] + literal[string] . identifier[format] ( identifier[requests] . identifier[utils] . identifier[quote] ( identifier[packageName] ), identifier[sort] ) keyword[if] identifier[nb_results] keyword[is] keyword[not] keyword[None] : identifier[path] += literal[string] . identifier[format] ( identifier[nb_results] ) keyword[if] identifier[offset] keyword[is] keyword[not] keyword[None] : identifier[path] += literal[string] . identifier[format] ( identifier[offset] ) keyword[if] identifier[filterByDevice] : identifier[path] += literal[string] identifier[data] = identifier[self] . identifier[executeRequestApi2] ( identifier[path] ) identifier[output] =[] keyword[for] identifier[review] keyword[in] identifier[data] . identifier[payload] . identifier[reviewResponse] . identifier[getResponse] . identifier[review] : identifier[output] . identifier[append] ( identifier[utils] . identifier[parseProtobufObj] ( identifier[review] )) keyword[return] identifier[output]
def reviews(self, packageName, filterByDevice=False, sort=2, nb_results=None, offset=None): """Browse reviews for an application Args: packageName (str): app unique ID. filterByDevice (bool): filter results for current device sort (int): sorting criteria (values are unknown) nb_results (int): max number of reviews to return offset (int): return reviews starting from an offset value Returns: dict object containing all the protobuf data returned from the api """ # TODO: select the number of reviews to return path = REVIEWS_URL + '?doc={}&sort={}'.format(requests.utils.quote(packageName), sort) if nb_results is not None: path += '&n={}'.format(nb_results) # depends on [control=['if'], data=['nb_results']] if offset is not None: path += '&o={}'.format(offset) # depends on [control=['if'], data=['offset']] if filterByDevice: path += '&dfil=1' # depends on [control=['if'], data=[]] data = self.executeRequestApi2(path) output = [] for review in data.payload.reviewResponse.getResponse.review: output.append(utils.parseProtobufObj(review)) # depends on [control=['for'], data=['review']] return output