code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def setdict(self, D=None, B=None): """Set dictionary array.""" if D is not None: self.D = np.asarray(D, dtype=self.dtype) if B is not None: self.B = np.asarray(B, dtype=self.dtype) if B is not None or not hasattr(self, 'Gamma'): self.Gamma, self.Q = np.linalg.eigh(self.B.T.dot(self.B)) self.Gamma = np.abs(self.Gamma) if D is not None or not hasattr(self, 'Df'): self.Df = sl.rfftn(self.D, self.cri.Nv, self.cri.axisN) # Fold square root of Gamma into the dictionary array to enable # use of the solvedbi_sm solver shpg = [1] * len(self.cri.shpD) shpg[self.cri.axisC] = self.Gamma.shape[0] Gamma2 = np.sqrt(self.Gamma).reshape(shpg) self.gDf = Gamma2 * self.Df if self.opt['HighMemSolve']: self.c = sl.solvedbd_sm_c( self.gDf, np.conj(self.gDf), (self.mu / self.rho) * self.GHGf + 1.0, self.cri.axisM) else: self.c = None
def function[setdict, parameter[self, D, B]]: constant[Set dictionary array.] if compare[name[D] is_not constant[None]] begin[:] name[self].D assign[=] call[name[np].asarray, parameter[name[D]]] if compare[name[B] is_not constant[None]] begin[:] name[self].B assign[=] call[name[np].asarray, parameter[name[B]]] if <ast.BoolOp object at 0x7da1b0619870> begin[:] <ast.Tuple object at 0x7da1b0619b10> assign[=] call[name[np].linalg.eigh, parameter[call[name[self].B.T.dot, parameter[name[self].B]]]] name[self].Gamma assign[=] call[name[np].abs, parameter[name[self].Gamma]] if <ast.BoolOp object at 0x7da1b06d2650> begin[:] name[self].Df assign[=] call[name[sl].rfftn, parameter[name[self].D, name[self].cri.Nv, name[self].cri.axisN]] variable[shpg] assign[=] binary_operation[list[[<ast.Constant object at 0x7da1b06d3640>]] * call[name[len], parameter[name[self].cri.shpD]]] call[name[shpg]][name[self].cri.axisC] assign[=] call[name[self].Gamma.shape][constant[0]] variable[Gamma2] assign[=] call[call[name[np].sqrt, parameter[name[self].Gamma]].reshape, parameter[name[shpg]]] name[self].gDf assign[=] binary_operation[name[Gamma2] * name[self].Df] if call[name[self].opt][constant[HighMemSolve]] begin[:] name[self].c assign[=] call[name[sl].solvedbd_sm_c, parameter[name[self].gDf, call[name[np].conj, parameter[name[self].gDf]], binary_operation[binary_operation[binary_operation[name[self].mu / name[self].rho] * name[self].GHGf] + constant[1.0]], name[self].cri.axisM]]
keyword[def] identifier[setdict] ( identifier[self] , identifier[D] = keyword[None] , identifier[B] = keyword[None] ): literal[string] keyword[if] identifier[D] keyword[is] keyword[not] keyword[None] : identifier[self] . identifier[D] = identifier[np] . identifier[asarray] ( identifier[D] , identifier[dtype] = identifier[self] . identifier[dtype] ) keyword[if] identifier[B] keyword[is] keyword[not] keyword[None] : identifier[self] . identifier[B] = identifier[np] . identifier[asarray] ( identifier[B] , identifier[dtype] = identifier[self] . identifier[dtype] ) keyword[if] identifier[B] keyword[is] keyword[not] keyword[None] keyword[or] keyword[not] identifier[hasattr] ( identifier[self] , literal[string] ): identifier[self] . identifier[Gamma] , identifier[self] . identifier[Q] = identifier[np] . identifier[linalg] . identifier[eigh] ( identifier[self] . identifier[B] . identifier[T] . identifier[dot] ( identifier[self] . identifier[B] )) identifier[self] . identifier[Gamma] = identifier[np] . identifier[abs] ( identifier[self] . identifier[Gamma] ) keyword[if] identifier[D] keyword[is] keyword[not] keyword[None] keyword[or] keyword[not] identifier[hasattr] ( identifier[self] , literal[string] ): identifier[self] . identifier[Df] = identifier[sl] . identifier[rfftn] ( identifier[self] . identifier[D] , identifier[self] . identifier[cri] . identifier[Nv] , identifier[self] . identifier[cri] . identifier[axisN] ) identifier[shpg] =[ literal[int] ]* identifier[len] ( identifier[self] . identifier[cri] . identifier[shpD] ) identifier[shpg] [ identifier[self] . identifier[cri] . identifier[axisC] ]= identifier[self] . identifier[Gamma] . identifier[shape] [ literal[int] ] identifier[Gamma2] = identifier[np] . identifier[sqrt] ( identifier[self] . identifier[Gamma] ). identifier[reshape] ( identifier[shpg] ) identifier[self] . identifier[gDf] = identifier[Gamma2] * identifier[self] . identifier[Df] keyword[if] identifier[self] . identifier[opt] [ literal[string] ]: identifier[self] . identifier[c] = identifier[sl] . identifier[solvedbd_sm_c] ( identifier[self] . identifier[gDf] , identifier[np] . identifier[conj] ( identifier[self] . identifier[gDf] ), ( identifier[self] . identifier[mu] / identifier[self] . identifier[rho] )* identifier[self] . identifier[GHGf] + literal[int] , identifier[self] . identifier[cri] . identifier[axisM] ) keyword[else] : identifier[self] . identifier[c] = keyword[None]
def setdict(self, D=None, B=None): """Set dictionary array.""" if D is not None: self.D = np.asarray(D, dtype=self.dtype) # depends on [control=['if'], data=['D']] if B is not None: self.B = np.asarray(B, dtype=self.dtype) # depends on [control=['if'], data=['B']] if B is not None or not hasattr(self, 'Gamma'): (self.Gamma, self.Q) = np.linalg.eigh(self.B.T.dot(self.B)) self.Gamma = np.abs(self.Gamma) # depends on [control=['if'], data=[]] if D is not None or not hasattr(self, 'Df'): self.Df = sl.rfftn(self.D, self.cri.Nv, self.cri.axisN) # depends on [control=['if'], data=[]] # Fold square root of Gamma into the dictionary array to enable # use of the solvedbi_sm solver shpg = [1] * len(self.cri.shpD) shpg[self.cri.axisC] = self.Gamma.shape[0] Gamma2 = np.sqrt(self.Gamma).reshape(shpg) self.gDf = Gamma2 * self.Df if self.opt['HighMemSolve']: self.c = sl.solvedbd_sm_c(self.gDf, np.conj(self.gDf), self.mu / self.rho * self.GHGf + 1.0, self.cri.axisM) # depends on [control=['if'], data=[]] else: self.c = None
def get_slac_default_args(job_time=1500): """ Create a batch job interface object. Parameters ---------- job_time : int Expected max length of the job, in seconds. This is used to select the batch queue and set the job_check_sleep parameter that sets how often we check for job completion. """ slac_default_args = dict(lsf_args={'W': job_time, 'R': '\"select[rhel60&&!fell]\"'}, max_jobs=500, time_per_cycle=15, jobs_per_cycle=20, max_job_age=90, no_batch=False) return slac_default_args.copy()
def function[get_slac_default_args, parameter[job_time]]: constant[ Create a batch job interface object. Parameters ---------- job_time : int Expected max length of the job, in seconds. This is used to select the batch queue and set the job_check_sleep parameter that sets how often we check for job completion. ] variable[slac_default_args] assign[=] call[name[dict], parameter[]] return[call[name[slac_default_args].copy, parameter[]]]
keyword[def] identifier[get_slac_default_args] ( identifier[job_time] = literal[int] ): literal[string] identifier[slac_default_args] = identifier[dict] ( identifier[lsf_args] ={ literal[string] : identifier[job_time] , literal[string] : literal[string] }, identifier[max_jobs] = literal[int] , identifier[time_per_cycle] = literal[int] , identifier[jobs_per_cycle] = literal[int] , identifier[max_job_age] = literal[int] , identifier[no_batch] = keyword[False] ) keyword[return] identifier[slac_default_args] . identifier[copy] ()
def get_slac_default_args(job_time=1500): """ Create a batch job interface object. Parameters ---------- job_time : int Expected max length of the job, in seconds. This is used to select the batch queue and set the job_check_sleep parameter that sets how often we check for job completion. """ slac_default_args = dict(lsf_args={'W': job_time, 'R': '"select[rhel60&&!fell]"'}, max_jobs=500, time_per_cycle=15, jobs_per_cycle=20, max_job_age=90, no_batch=False) return slac_default_args.copy()
def export(self): """ Exports a network as a networkx MultiDiGraph intermediate representation suitable for visualization. :return: networkx MultiDiGraph """ graph = nx.MultiDiGraph() # Add regions to graph as nodes, annotated by name regions = self.network.getRegions() for idx in xrange(regions.getCount()): regionPair = regions.getByIndex(idx) regionName = regionPair[0] graph.add_node(regionName, label=regionName) # Add links between regions to graph as edges, annotate by input-output # name pairs for linkName, link in self.network.getLinks(): graph.add_edge(link.getSrcRegionName(), link.getDestRegionName(), src=link.getSrcOutputName(), dest=link.getDestInputName()) return graph
def function[export, parameter[self]]: constant[ Exports a network as a networkx MultiDiGraph intermediate representation suitable for visualization. :return: networkx MultiDiGraph ] variable[graph] assign[=] call[name[nx].MultiDiGraph, parameter[]] variable[regions] assign[=] call[name[self].network.getRegions, parameter[]] for taget[name[idx]] in starred[call[name[xrange], parameter[call[name[regions].getCount, parameter[]]]]] begin[:] variable[regionPair] assign[=] call[name[regions].getByIndex, parameter[name[idx]]] variable[regionName] assign[=] call[name[regionPair]][constant[0]] call[name[graph].add_node, parameter[name[regionName]]] for taget[tuple[[<ast.Name object at 0x7da20e9b2b00>, <ast.Name object at 0x7da20e9b3550>]]] in starred[call[name[self].network.getLinks, parameter[]]] begin[:] call[name[graph].add_edge, parameter[call[name[link].getSrcRegionName, parameter[]], call[name[link].getDestRegionName, parameter[]]]] return[name[graph]]
keyword[def] identifier[export] ( identifier[self] ): literal[string] identifier[graph] = identifier[nx] . identifier[MultiDiGraph] () identifier[regions] = identifier[self] . identifier[network] . identifier[getRegions] () keyword[for] identifier[idx] keyword[in] identifier[xrange] ( identifier[regions] . identifier[getCount] ()): identifier[regionPair] = identifier[regions] . identifier[getByIndex] ( identifier[idx] ) identifier[regionName] = identifier[regionPair] [ literal[int] ] identifier[graph] . identifier[add_node] ( identifier[regionName] , identifier[label] = identifier[regionName] ) keyword[for] identifier[linkName] , identifier[link] keyword[in] identifier[self] . identifier[network] . identifier[getLinks] (): identifier[graph] . identifier[add_edge] ( identifier[link] . identifier[getSrcRegionName] (), identifier[link] . identifier[getDestRegionName] (), identifier[src] = identifier[link] . identifier[getSrcOutputName] (), identifier[dest] = identifier[link] . identifier[getDestInputName] ()) keyword[return] identifier[graph]
def export(self): """ Exports a network as a networkx MultiDiGraph intermediate representation suitable for visualization. :return: networkx MultiDiGraph """ graph = nx.MultiDiGraph() # Add regions to graph as nodes, annotated by name regions = self.network.getRegions() for idx in xrange(regions.getCount()): regionPair = regions.getByIndex(idx) regionName = regionPair[0] graph.add_node(regionName, label=regionName) # depends on [control=['for'], data=['idx']] # Add links between regions to graph as edges, annotate by input-output # name pairs for (linkName, link) in self.network.getLinks(): graph.add_edge(link.getSrcRegionName(), link.getDestRegionName(), src=link.getSrcOutputName(), dest=link.getDestInputName()) # depends on [control=['for'], data=[]] return graph
def allocate(self): """Builds the context and the Hooks.""" self.logger.debug("Allocating environment.") self._allocate() self.logger.debug("Environment successfully allocated.")
def function[allocate, parameter[self]]: constant[Builds the context and the Hooks.] call[name[self].logger.debug, parameter[constant[Allocating environment.]]] call[name[self]._allocate, parameter[]] call[name[self].logger.debug, parameter[constant[Environment successfully allocated.]]]
keyword[def] identifier[allocate] ( identifier[self] ): literal[string] identifier[self] . identifier[logger] . identifier[debug] ( literal[string] ) identifier[self] . identifier[_allocate] () identifier[self] . identifier[logger] . identifier[debug] ( literal[string] )
def allocate(self): """Builds the context and the Hooks.""" self.logger.debug('Allocating environment.') self._allocate() self.logger.debug('Environment successfully allocated.')
def plot_aligned(pca, sparse=True, **kwargs): """ Plots the residuals of a principal component analysis of attiude data. """ colormap = kwargs.pop('colormap',None) A = pca.rotated() # Flip the result if upside down if pca.normal[2] < 0: A[:,-1] *= -1 minmax = [(A[:,i].min(),A[:,i].max()) for i in range(3)] lengths = [j-i for i,j in minmax] if sparse: i = 1 l = len(A) if l > 10000: i = N.ceil(l/10000) A = A[::int(i)] log.info("Plotting with {} points".format(len(A))) w = 8 ratio = (lengths[2]*2+lengths[1])/lengths[0]*1.5 h = w*ratio if h < 3: h = 3 r = (lengths[1]+5)/(lengths[2]+5) if r > 5: r = 5 fig = Figure(figsize=(w,h)) fig.canvas = FigureCanvas(fig) def setup_axes(): gs = GridSpec(3,1, height_ratios=[r,1,1]) kwargs = dict() axes = [] for g in gs: ax = fig.add_subplot(g,**kwargs) kwargs['sharex'] = ax yield ax axes = list(setup_axes()) fig.subplots_adjust(hspace=0, wspace=0.1) #lengths = attitude.pca.singular_values[::-1] titles = ( "Plan view", "Long cross-section", "Short cross-section") ylabels = ( "Meters", "Residuals (m)", "Residuals (m)") colors = ['cornflowerblue','red'] hyp = sampling_axes(pca) vertical = vector(0,0,1) for title,ax,(a,b),ylabel in zip(titles,axes, [(0,1),(0,2),(1,2)],ylabels): kw = dict(linewidth=2, alpha=0.5) bounds = minmax[a] if b != 2: ax.plot(bounds,(0,0), c=colors[a], **kw) ax.plot((0,0),minmax[b], c=colors[b], **kw) else: ax.plot(bounds,(-10,-10), c=colors[a], **kw) v0 = N.zeros(3) v0[a] = 1 axes = N.array([v0,vertical]) ax_ = (axes@N.diag(hyp)@axes.T).T ax_ = N.sqrt(ax_) l1 = N.linalg.norm(ax_[:-1]) l2 = N.linalg.norm(ax_[-1]) ang_error = 2*N.degrees(N.arctan2(l2,l1)) title += ": {:.0f} m long, angular error (95% CI): {:.2f}º".format(lengths[a], ang_error) bounds = minmax[0] x_ = N.linspace(bounds[0]*1.2,bounds[1]*1.2,100) err = HyperbolicErrors(hyp,x_,axes=axes) err.plot(ax, fc='#cccccc', alpha=0.3) x,y = A[:,a], A[:,b] kw = dict(alpha=0.5, zorder=5) if colormap is None: ax.plot(x,y,c="#555555", linestyle='None', marker='.',**kw) else: ax.scatter(x,y,c=A[:,-1], cmap=colormap, **kw) #ax.set_aspect("equal") ax.text(0.01,.99,title, verticalalignment='top', transform=ax.transAxes) #ax.autoscale(tight=True) ax.yaxis.set_ticks([]) ax.xaxis.set_ticks_position('bottom') if a != 1: pass #ax.xaxis.set_ticks([]) #ax.spines['bottom'].set_color('none') for spine in ax.spines.values(): spine.set_visible(False) ax.text(0.99,0.99,"Max residual: {:.1f} m".format(lengths[2]), verticalalignment='bottom', ha='right', transform=ax.transAxes) ax.set_xlabel("Meters") return fig
def function[plot_aligned, parameter[pca, sparse]]: constant[ Plots the residuals of a principal component analysis of attiude data. ] variable[colormap] assign[=] call[name[kwargs].pop, parameter[constant[colormap], constant[None]]] variable[A] assign[=] call[name[pca].rotated, parameter[]] if compare[call[name[pca].normal][constant[2]] less[<] constant[0]] begin[:] <ast.AugAssign object at 0x7da1b185e590> variable[minmax] assign[=] <ast.ListComp object at 0x7da1b185d600> variable[lengths] assign[=] <ast.ListComp object at 0x7da1b1841210> if name[sparse] begin[:] variable[i] assign[=] constant[1] variable[l] assign[=] call[name[len], parameter[name[A]]] if compare[name[l] greater[>] constant[10000]] begin[:] variable[i] assign[=] call[name[N].ceil, parameter[binary_operation[name[l] / constant[10000]]]] variable[A] assign[=] call[name[A]][<ast.Slice object at 0x7da1b1841cc0>] call[name[log].info, parameter[call[constant[Plotting with {} points].format, parameter[call[name[len], parameter[name[A]]]]]]] variable[w] assign[=] constant[8] variable[ratio] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[call[name[lengths]][constant[2]] * constant[2]] + call[name[lengths]][constant[1]]] / call[name[lengths]][constant[0]]] * constant[1.5]] variable[h] assign[=] binary_operation[name[w] * name[ratio]] if compare[name[h] less[<] constant[3]] begin[:] variable[h] assign[=] constant[3] variable[r] assign[=] binary_operation[binary_operation[call[name[lengths]][constant[1]] + constant[5]] / binary_operation[call[name[lengths]][constant[2]] + constant[5]]] if compare[name[r] greater[>] constant[5]] begin[:] variable[r] assign[=] constant[5] variable[fig] assign[=] call[name[Figure], parameter[]] name[fig].canvas assign[=] call[name[FigureCanvas], parameter[name[fig]]] def function[setup_axes, parameter[]]: variable[gs] assign[=] call[name[GridSpec], parameter[constant[3], constant[1]]] variable[kwargs] assign[=] call[name[dict], parameter[]] variable[axes] assign[=] list[[]] for taget[name[g]] in starred[name[gs]] begin[:] variable[ax] assign[=] call[name[fig].add_subplot, parameter[name[g]]] call[name[kwargs]][constant[sharex]] assign[=] name[ax] <ast.Yield object at 0x7da1b1836470> variable[axes] assign[=] call[name[list], parameter[call[name[setup_axes], parameter[]]]] call[name[fig].subplots_adjust, parameter[]] variable[titles] assign[=] tuple[[<ast.Constant object at 0x7da1b18359c0>, <ast.Constant object at 0x7da1b18370a0>, <ast.Constant object at 0x7da1b18349a0>]] variable[ylabels] assign[=] tuple[[<ast.Constant object at 0x7da1b1836e60>, <ast.Constant object at 0x7da1b18351b0>, <ast.Constant object at 0x7da1b18375b0>]] variable[colors] assign[=] list[[<ast.Constant object at 0x7da1b18355d0>, <ast.Constant object at 0x7da1b1835150>]] variable[hyp] assign[=] call[name[sampling_axes], parameter[name[pca]]] variable[vertical] assign[=] call[name[vector], parameter[constant[0], constant[0], constant[1]]] for taget[tuple[[<ast.Name object at 0x7da1b1835db0>, <ast.Name object at 0x7da1b18378e0>, <ast.Tuple object at 0x7da1b18352d0>, <ast.Name object at 0x7da1b1835390>]]] in starred[call[name[zip], parameter[name[titles], name[axes], list[[<ast.Tuple object at 0x7da1b1834b50>, <ast.Tuple object at 0x7da1b18366e0>, <ast.Tuple object at 0x7da1b1837040>]], name[ylabels]]]] begin[:] variable[kw] assign[=] call[name[dict], parameter[]] variable[bounds] assign[=] call[name[minmax]][name[a]] if compare[name[b] not_equal[!=] constant[2]] begin[:] call[name[ax].plot, parameter[name[bounds], tuple[[<ast.Constant object at 0x7da1b190e590>, <ast.Constant object at 0x7da1b190f820>]]]] call[name[ax].plot, parameter[tuple[[<ast.Constant object at 0x7da1b190c5b0>, <ast.Constant object at 0x7da1b190dd80>]], call[name[minmax]][name[b]]]] <ast.Tuple object at 0x7da1b190fd60> assign[=] tuple[[<ast.Subscript object at 0x7da1b190db10>, <ast.Subscript object at 0x7da1b190d060>]] variable[kw] assign[=] call[name[dict], parameter[]] if compare[name[colormap] is constant[None]] begin[:] call[name[ax].plot, parameter[name[x], name[y]]] call[name[ax].text, parameter[constant[0.01], constant[0.99], name[title]]] call[name[ax].yaxis.set_ticks, parameter[list[[]]]] call[name[ax].xaxis.set_ticks_position, parameter[constant[bottom]]] if compare[name[a] not_equal[!=] constant[1]] begin[:] pass for taget[name[spine]] in starred[call[name[ax].spines.values, parameter[]]] begin[:] call[name[spine].set_visible, parameter[constant[False]]] call[name[ax].text, parameter[constant[0.99], constant[0.99], call[constant[Max residual: {:.1f} m].format, parameter[call[name[lengths]][constant[2]]]]]] call[name[ax].set_xlabel, parameter[constant[Meters]]] return[name[fig]]
keyword[def] identifier[plot_aligned] ( identifier[pca] , identifier[sparse] = keyword[True] ,** identifier[kwargs] ): literal[string] identifier[colormap] = identifier[kwargs] . identifier[pop] ( literal[string] , keyword[None] ) identifier[A] = identifier[pca] . identifier[rotated] () keyword[if] identifier[pca] . identifier[normal] [ literal[int] ]< literal[int] : identifier[A] [:,- literal[int] ]*=- literal[int] identifier[minmax] =[( identifier[A] [:, identifier[i] ]. identifier[min] (), identifier[A] [:, identifier[i] ]. identifier[max] ()) keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] )] identifier[lengths] =[ identifier[j] - identifier[i] keyword[for] identifier[i] , identifier[j] keyword[in] identifier[minmax] ] keyword[if] identifier[sparse] : identifier[i] = literal[int] identifier[l] = identifier[len] ( identifier[A] ) keyword[if] identifier[l] > literal[int] : identifier[i] = identifier[N] . identifier[ceil] ( identifier[l] / literal[int] ) identifier[A] = identifier[A] [:: identifier[int] ( identifier[i] )] identifier[log] . identifier[info] ( literal[string] . identifier[format] ( identifier[len] ( identifier[A] ))) identifier[w] = literal[int] identifier[ratio] =( identifier[lengths] [ literal[int] ]* literal[int] + identifier[lengths] [ literal[int] ])/ identifier[lengths] [ literal[int] ]* literal[int] identifier[h] = identifier[w] * identifier[ratio] keyword[if] identifier[h] < literal[int] : identifier[h] = literal[int] identifier[r] =( identifier[lengths] [ literal[int] ]+ literal[int] )/( identifier[lengths] [ literal[int] ]+ literal[int] ) keyword[if] identifier[r] > literal[int] : identifier[r] = literal[int] identifier[fig] = identifier[Figure] ( identifier[figsize] =( identifier[w] , identifier[h] )) identifier[fig] . identifier[canvas] = identifier[FigureCanvas] ( identifier[fig] ) keyword[def] identifier[setup_axes] (): identifier[gs] = identifier[GridSpec] ( literal[int] , literal[int] , identifier[height_ratios] =[ identifier[r] , literal[int] , literal[int] ]) identifier[kwargs] = identifier[dict] () identifier[axes] =[] keyword[for] identifier[g] keyword[in] identifier[gs] : identifier[ax] = identifier[fig] . identifier[add_subplot] ( identifier[g] ,** identifier[kwargs] ) identifier[kwargs] [ literal[string] ]= identifier[ax] keyword[yield] identifier[ax] identifier[axes] = identifier[list] ( identifier[setup_axes] ()) identifier[fig] . identifier[subplots_adjust] ( identifier[hspace] = literal[int] , identifier[wspace] = literal[int] ) identifier[titles] =( literal[string] , literal[string] , literal[string] ) identifier[ylabels] =( literal[string] , literal[string] , literal[string] ) identifier[colors] =[ literal[string] , literal[string] ] identifier[hyp] = identifier[sampling_axes] ( identifier[pca] ) identifier[vertical] = identifier[vector] ( literal[int] , literal[int] , literal[int] ) keyword[for] identifier[title] , identifier[ax] ,( identifier[a] , identifier[b] ), identifier[ylabel] keyword[in] identifier[zip] ( identifier[titles] , identifier[axes] , [( literal[int] , literal[int] ),( literal[int] , literal[int] ),( literal[int] , literal[int] )], identifier[ylabels] ): identifier[kw] = identifier[dict] ( identifier[linewidth] = literal[int] , identifier[alpha] = literal[int] ) identifier[bounds] = identifier[minmax] [ identifier[a] ] keyword[if] identifier[b] != literal[int] : identifier[ax] . identifier[plot] ( identifier[bounds] ,( literal[int] , literal[int] ), identifier[c] = identifier[colors] [ identifier[a] ],** identifier[kw] ) identifier[ax] . identifier[plot] (( literal[int] , literal[int] ), identifier[minmax] [ identifier[b] ], identifier[c] = identifier[colors] [ identifier[b] ],** identifier[kw] ) keyword[else] : identifier[ax] . identifier[plot] ( identifier[bounds] ,(- literal[int] ,- literal[int] ), identifier[c] = identifier[colors] [ identifier[a] ],** identifier[kw] ) identifier[v0] = identifier[N] . identifier[zeros] ( literal[int] ) identifier[v0] [ identifier[a] ]= literal[int] identifier[axes] = identifier[N] . identifier[array] ([ identifier[v0] , identifier[vertical] ]) identifier[ax_] =( identifier[axes] @ identifier[N] . identifier[diag] ( identifier[hyp] )@ identifier[axes] . identifier[T] ). identifier[T] identifier[ax_] = identifier[N] . identifier[sqrt] ( identifier[ax_] ) identifier[l1] = identifier[N] . identifier[linalg] . identifier[norm] ( identifier[ax_] [:- literal[int] ]) identifier[l2] = identifier[N] . identifier[linalg] . identifier[norm] ( identifier[ax_] [- literal[int] ]) identifier[ang_error] = literal[int] * identifier[N] . identifier[degrees] ( identifier[N] . identifier[arctan2] ( identifier[l2] , identifier[l1] )) identifier[title] += literal[string] . identifier[format] ( identifier[lengths] [ identifier[a] ], identifier[ang_error] ) identifier[bounds] = identifier[minmax] [ literal[int] ] identifier[x_] = identifier[N] . identifier[linspace] ( identifier[bounds] [ literal[int] ]* literal[int] , identifier[bounds] [ literal[int] ]* literal[int] , literal[int] ) identifier[err] = identifier[HyperbolicErrors] ( identifier[hyp] , identifier[x_] , identifier[axes] = identifier[axes] ) identifier[err] . identifier[plot] ( identifier[ax] , identifier[fc] = literal[string] , identifier[alpha] = literal[int] ) identifier[x] , identifier[y] = identifier[A] [:, identifier[a] ], identifier[A] [:, identifier[b] ] identifier[kw] = identifier[dict] ( identifier[alpha] = literal[int] , identifier[zorder] = literal[int] ) keyword[if] identifier[colormap] keyword[is] keyword[None] : identifier[ax] . identifier[plot] ( identifier[x] , identifier[y] , identifier[c] = literal[string] , identifier[linestyle] = literal[string] , identifier[marker] = literal[string] ,** identifier[kw] ) keyword[else] : identifier[ax] . identifier[scatter] ( identifier[x] , identifier[y] , identifier[c] = identifier[A] [:,- literal[int] ], identifier[cmap] = identifier[colormap] ,** identifier[kw] ) identifier[ax] . identifier[text] ( literal[int] , literal[int] , identifier[title] , identifier[verticalalignment] = literal[string] , identifier[transform] = identifier[ax] . identifier[transAxes] ) identifier[ax] . identifier[yaxis] . identifier[set_ticks] ([]) identifier[ax] . identifier[xaxis] . identifier[set_ticks_position] ( literal[string] ) keyword[if] identifier[a] != literal[int] : keyword[pass] keyword[for] identifier[spine] keyword[in] identifier[ax] . identifier[spines] . identifier[values] (): identifier[spine] . identifier[set_visible] ( keyword[False] ) identifier[ax] . identifier[text] ( literal[int] , literal[int] , literal[string] . identifier[format] ( identifier[lengths] [ literal[int] ]), identifier[verticalalignment] = literal[string] , identifier[ha] = literal[string] , identifier[transform] = identifier[ax] . identifier[transAxes] ) identifier[ax] . identifier[set_xlabel] ( literal[string] ) keyword[return] identifier[fig]
def plot_aligned(pca, sparse=True, **kwargs): """ Plots the residuals of a principal component analysis of attiude data. """ colormap = kwargs.pop('colormap', None) A = pca.rotated() # Flip the result if upside down if pca.normal[2] < 0: A[:, -1] *= -1 # depends on [control=['if'], data=[]] minmax = [(A[:, i].min(), A[:, i].max()) for i in range(3)] lengths = [j - i for (i, j) in minmax] if sparse: i = 1 l = len(A) if l > 10000: i = N.ceil(l / 10000) A = A[::int(i)] # depends on [control=['if'], data=['l']] # depends on [control=['if'], data=[]] log.info('Plotting with {} points'.format(len(A))) w = 8 ratio = (lengths[2] * 2 + lengths[1]) / lengths[0] * 1.5 h = w * ratio if h < 3: h = 3 # depends on [control=['if'], data=['h']] r = (lengths[1] + 5) / (lengths[2] + 5) if r > 5: r = 5 # depends on [control=['if'], data=['r']] fig = Figure(figsize=(w, h)) fig.canvas = FigureCanvas(fig) def setup_axes(): gs = GridSpec(3, 1, height_ratios=[r, 1, 1]) kwargs = dict() axes = [] for g in gs: ax = fig.add_subplot(g, **kwargs) kwargs['sharex'] = ax yield ax # depends on [control=['for'], data=['g']] axes = list(setup_axes()) fig.subplots_adjust(hspace=0, wspace=0.1) #lengths = attitude.pca.singular_values[::-1] titles = ('Plan view', 'Long cross-section', 'Short cross-section') ylabels = ('Meters', 'Residuals (m)', 'Residuals (m)') colors = ['cornflowerblue', 'red'] hyp = sampling_axes(pca) vertical = vector(0, 0, 1) for (title, ax, (a, b), ylabel) in zip(titles, axes, [(0, 1), (0, 2), (1, 2)], ylabels): kw = dict(linewidth=2, alpha=0.5) bounds = minmax[a] if b != 2: ax.plot(bounds, (0, 0), c=colors[a], **kw) ax.plot((0, 0), minmax[b], c=colors[b], **kw) # depends on [control=['if'], data=['b']] else: ax.plot(bounds, (-10, -10), c=colors[a], **kw) v0 = N.zeros(3) v0[a] = 1 axes = N.array([v0, vertical]) ax_ = (axes @ N.diag(hyp) @ axes.T).T ax_ = N.sqrt(ax_) l1 = N.linalg.norm(ax_[:-1]) l2 = N.linalg.norm(ax_[-1]) ang_error = 2 * N.degrees(N.arctan2(l2, l1)) title += ': {:.0f} m long, angular error (95% CI): {:.2f}º'.format(lengths[a], ang_error) bounds = minmax[0] x_ = N.linspace(bounds[0] * 1.2, bounds[1] * 1.2, 100) err = HyperbolicErrors(hyp, x_, axes=axes) err.plot(ax, fc='#cccccc', alpha=0.3) (x, y) = (A[:, a], A[:, b]) kw = dict(alpha=0.5, zorder=5) if colormap is None: ax.plot(x, y, c='#555555', linestyle='None', marker='.', **kw) # depends on [control=['if'], data=[]] else: ax.scatter(x, y, c=A[:, -1], cmap=colormap, **kw) #ax.set_aspect("equal") ax.text(0.01, 0.99, title, verticalalignment='top', transform=ax.transAxes) #ax.autoscale(tight=True) ax.yaxis.set_ticks([]) ax.xaxis.set_ticks_position('bottom') if a != 1: pass # depends on [control=['if'], data=[]] #ax.xaxis.set_ticks([]) #ax.spines['bottom'].set_color('none') for spine in ax.spines.values(): spine.set_visible(False) # depends on [control=['for'], data=['spine']] # depends on [control=['for'], data=[]] ax.text(0.99, 0.99, 'Max residual: {:.1f} m'.format(lengths[2]), verticalalignment='bottom', ha='right', transform=ax.transAxes) ax.set_xlabel('Meters') return fig
def object2xml(self, data): r"""Convert python object to xml string. :param data: data for build xml. If don't provide the ``root`` option, type of ``data`` must be dict and ``len(data) == 1``. :rtype: str or unicode .. versionadded:: 1.2 """ if not self.__options['encoding']: self.set_options(encoding=self.__encoding) if self.__options['header_declare']: self.__tree.append(self.build_xml_header()) root = self.__options['root'] if not root: assert (isinstance(data, utils.DictTypes) and len(data) == 1), \ 'if root not specified, the data that dict object and length must be one required.' root, data = data.items()[0] self.build_tree(data, root) xml = unicode(''.join(self.__tree).strip()) if self.__options['encoding'] != self.__encoding: xml = xml.encode(self.__options['encoding'], errors=self.__options['errors']) return xml
def function[object2xml, parameter[self, data]]: constant[Convert python object to xml string. :param data: data for build xml. If don't provide the ``root`` option, type of ``data`` must be dict and ``len(data) == 1``. :rtype: str or unicode .. versionadded:: 1.2 ] if <ast.UnaryOp object at 0x7da1b25ed2a0> begin[:] call[name[self].set_options, parameter[]] if call[name[self].__options][constant[header_declare]] begin[:] call[name[self].__tree.append, parameter[call[name[self].build_xml_header, parameter[]]]] variable[root] assign[=] call[name[self].__options][constant[root]] if <ast.UnaryOp object at 0x7da18f00d420> begin[:] assert[<ast.BoolOp object at 0x7da18f00ecb0>] <ast.Tuple object at 0x7da18f00f7f0> assign[=] call[call[name[data].items, parameter[]]][constant[0]] call[name[self].build_tree, parameter[name[data], name[root]]] variable[xml] assign[=] call[name[unicode], parameter[call[call[constant[].join, parameter[name[self].__tree]].strip, parameter[]]]] if compare[call[name[self].__options][constant[encoding]] not_equal[!=] name[self].__encoding] begin[:] variable[xml] assign[=] call[name[xml].encode, parameter[call[name[self].__options][constant[encoding]]]] return[name[xml]]
keyword[def] identifier[object2xml] ( identifier[self] , identifier[data] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[__options] [ literal[string] ]: identifier[self] . identifier[set_options] ( identifier[encoding] = identifier[self] . identifier[__encoding] ) keyword[if] identifier[self] . identifier[__options] [ literal[string] ]: identifier[self] . identifier[__tree] . identifier[append] ( identifier[self] . identifier[build_xml_header] ()) identifier[root] = identifier[self] . identifier[__options] [ literal[string] ] keyword[if] keyword[not] identifier[root] : keyword[assert] ( identifier[isinstance] ( identifier[data] , identifier[utils] . identifier[DictTypes] ) keyword[and] identifier[len] ( identifier[data] )== literal[int] ), literal[string] identifier[root] , identifier[data] = identifier[data] . identifier[items] ()[ literal[int] ] identifier[self] . identifier[build_tree] ( identifier[data] , identifier[root] ) identifier[xml] = identifier[unicode] ( literal[string] . identifier[join] ( identifier[self] . identifier[__tree] ). identifier[strip] ()) keyword[if] identifier[self] . identifier[__options] [ literal[string] ]!= identifier[self] . identifier[__encoding] : identifier[xml] = identifier[xml] . identifier[encode] ( identifier[self] . identifier[__options] [ literal[string] ], identifier[errors] = identifier[self] . identifier[__options] [ literal[string] ]) keyword[return] identifier[xml]
def object2xml(self, data): """Convert python object to xml string. :param data: data for build xml. If don't provide the ``root`` option, type of ``data`` must be dict and ``len(data) == 1``. :rtype: str or unicode .. versionadded:: 1.2 """ if not self.__options['encoding']: self.set_options(encoding=self.__encoding) # depends on [control=['if'], data=[]] if self.__options['header_declare']: self.__tree.append(self.build_xml_header()) # depends on [control=['if'], data=[]] root = self.__options['root'] if not root: assert isinstance(data, utils.DictTypes) and len(data) == 1, 'if root not specified, the data that dict object and length must be one required.' (root, data) = data.items()[0] # depends on [control=['if'], data=[]] self.build_tree(data, root) xml = unicode(''.join(self.__tree).strip()) if self.__options['encoding'] != self.__encoding: xml = xml.encode(self.__options['encoding'], errors=self.__options['errors']) # depends on [control=['if'], data=[]] return xml
def tool(_progname=sys.argv[0], _argv=sys.argv[1:], _syspath=sys.path, _findMachines=findMachines, _print=print): """ Entry point for command line utility. """ DESCRIPTION = """ Visualize automat.MethodicalMachines as graphviz graphs. """ EPILOG = """ You must have the graphviz tool suite installed. Please visit http://www.graphviz.org for more information. """ if _syspath[0]: _syspath.insert(0, '') argumentParser = argparse.ArgumentParser( prog=_progname, description=DESCRIPTION, epilog=EPILOG) argumentParser.add_argument('fqpn', help="A Fully Qualified Path name" " representing where to find machines.") argumentParser.add_argument('--quiet', '-q', help="suppress output", default=False, action="store_true") argumentParser.add_argument('--dot-directory', '-d', help="Where to write out .dot files.", default=".automat_visualize") argumentParser.add_argument('--image-directory', '-i', help="Where to write out image files.", default=".automat_visualize") argumentParser.add_argument('--image-type', '-t', help="The image format.", choices=graphviz.FORMATS, default='png') argumentParser.add_argument('--view', '-v', help="View rendered graphs with" " default image viewer", default=False, action="store_true") args = argumentParser.parse_args(_argv) explicitlySaveDot = (args.dot_directory and (not args.image_directory or args.image_directory != args.dot_directory)) if args.quiet: def _print(*args): pass for fqpn, machine in _findMachines(args.fqpn): _print(fqpn, '...discovered') digraph = machine.asDigraph() if explicitlySaveDot: digraph.save(filename="{}.dot".format(fqpn), directory=args.dot_directory) _print(fqpn, "...wrote dot into", args.dot_directory) if args.image_directory: deleteDot = not args.dot_directory or explicitlySaveDot digraph.format = args.image_type digraph.render(filename="{}.dot".format(fqpn), directory=args.image_directory, view=args.view, cleanup=deleteDot) if deleteDot: msg = "...wrote image into" else: msg = "...wrote image and dot into" _print(fqpn, msg, args.image_directory)
def function[tool, parameter[_progname, _argv, _syspath, _findMachines, _print]]: constant[ Entry point for command line utility. ] variable[DESCRIPTION] assign[=] constant[ Visualize automat.MethodicalMachines as graphviz graphs. ] variable[EPILOG] assign[=] constant[ You must have the graphviz tool suite installed. Please visit http://www.graphviz.org for more information. ] if call[name[_syspath]][constant[0]] begin[:] call[name[_syspath].insert, parameter[constant[0], constant[]]] variable[argumentParser] assign[=] call[name[argparse].ArgumentParser, parameter[]] call[name[argumentParser].add_argument, parameter[constant[fqpn]]] call[name[argumentParser].add_argument, parameter[constant[--quiet], constant[-q]]] call[name[argumentParser].add_argument, parameter[constant[--dot-directory], constant[-d]]] call[name[argumentParser].add_argument, parameter[constant[--image-directory], constant[-i]]] call[name[argumentParser].add_argument, parameter[constant[--image-type], constant[-t]]] call[name[argumentParser].add_argument, parameter[constant[--view], constant[-v]]] variable[args] assign[=] call[name[argumentParser].parse_args, parameter[name[_argv]]] variable[explicitlySaveDot] assign[=] <ast.BoolOp object at 0x7da1b0e39750> if name[args].quiet begin[:] def function[_print, parameter[]]: pass for taget[tuple[[<ast.Name object at 0x7da1b0e39240>, <ast.Name object at 0x7da1b0e387f0>]]] in starred[call[name[_findMachines], parameter[name[args].fqpn]]] begin[:] call[name[_print], parameter[name[fqpn], constant[...discovered]]] variable[digraph] assign[=] call[name[machine].asDigraph, parameter[]] if name[explicitlySaveDot] begin[:] call[name[digraph].save, parameter[]] call[name[_print], parameter[name[fqpn], constant[...wrote dot into], name[args].dot_directory]] if name[args].image_directory begin[:] variable[deleteDot] assign[=] <ast.BoolOp object at 0x7da1b0c4d330> name[digraph].format assign[=] name[args].image_type call[name[digraph].render, parameter[]] if name[deleteDot] begin[:] variable[msg] assign[=] constant[...wrote image into] call[name[_print], parameter[name[fqpn], name[msg], name[args].image_directory]]
keyword[def] identifier[tool] ( identifier[_progname] = identifier[sys] . identifier[argv] [ literal[int] ], identifier[_argv] = identifier[sys] . identifier[argv] [ literal[int] :], identifier[_syspath] = identifier[sys] . identifier[path] , identifier[_findMachines] = identifier[findMachines] , identifier[_print] = identifier[print] ): literal[string] identifier[DESCRIPTION] = literal[string] identifier[EPILOG] = literal[string] keyword[if] identifier[_syspath] [ literal[int] ]: identifier[_syspath] . identifier[insert] ( literal[int] , literal[string] ) identifier[argumentParser] = identifier[argparse] . identifier[ArgumentParser] ( identifier[prog] = identifier[_progname] , identifier[description] = identifier[DESCRIPTION] , identifier[epilog] = identifier[EPILOG] ) identifier[argumentParser] . identifier[add_argument] ( literal[string] , identifier[help] = literal[string] literal[string] ) identifier[argumentParser] . identifier[add_argument] ( literal[string] , literal[string] , identifier[help] = literal[string] , identifier[default] = keyword[False] , identifier[action] = literal[string] ) identifier[argumentParser] . identifier[add_argument] ( literal[string] , literal[string] , identifier[help] = literal[string] , identifier[default] = literal[string] ) identifier[argumentParser] . identifier[add_argument] ( literal[string] , literal[string] , identifier[help] = literal[string] , identifier[default] = literal[string] ) identifier[argumentParser] . identifier[add_argument] ( literal[string] , literal[string] , identifier[help] = literal[string] , identifier[choices] = identifier[graphviz] . identifier[FORMATS] , identifier[default] = literal[string] ) identifier[argumentParser] . identifier[add_argument] ( literal[string] , literal[string] , identifier[help] = literal[string] literal[string] , identifier[default] = keyword[False] , identifier[action] = literal[string] ) identifier[args] = identifier[argumentParser] . identifier[parse_args] ( identifier[_argv] ) identifier[explicitlySaveDot] =( identifier[args] . identifier[dot_directory] keyword[and] ( keyword[not] identifier[args] . identifier[image_directory] keyword[or] identifier[args] . identifier[image_directory] != identifier[args] . identifier[dot_directory] )) keyword[if] identifier[args] . identifier[quiet] : keyword[def] identifier[_print] (* identifier[args] ): keyword[pass] keyword[for] identifier[fqpn] , identifier[machine] keyword[in] identifier[_findMachines] ( identifier[args] . identifier[fqpn] ): identifier[_print] ( identifier[fqpn] , literal[string] ) identifier[digraph] = identifier[machine] . identifier[asDigraph] () keyword[if] identifier[explicitlySaveDot] : identifier[digraph] . identifier[save] ( identifier[filename] = literal[string] . identifier[format] ( identifier[fqpn] ), identifier[directory] = identifier[args] . identifier[dot_directory] ) identifier[_print] ( identifier[fqpn] , literal[string] , identifier[args] . identifier[dot_directory] ) keyword[if] identifier[args] . identifier[image_directory] : identifier[deleteDot] = keyword[not] identifier[args] . identifier[dot_directory] keyword[or] identifier[explicitlySaveDot] identifier[digraph] . identifier[format] = identifier[args] . identifier[image_type] identifier[digraph] . identifier[render] ( identifier[filename] = literal[string] . identifier[format] ( identifier[fqpn] ), identifier[directory] = identifier[args] . identifier[image_directory] , identifier[view] = identifier[args] . identifier[view] , identifier[cleanup] = identifier[deleteDot] ) keyword[if] identifier[deleteDot] : identifier[msg] = literal[string] keyword[else] : identifier[msg] = literal[string] identifier[_print] ( identifier[fqpn] , identifier[msg] , identifier[args] . identifier[image_directory] )
def tool(_progname=sys.argv[0], _argv=sys.argv[1:], _syspath=sys.path, _findMachines=findMachines, _print=print): """ Entry point for command line utility. """ DESCRIPTION = '\n Visualize automat.MethodicalMachines as graphviz graphs.\n ' EPILOG = '\n You must have the graphviz tool suite installed. Please visit\n http://www.graphviz.org for more information.\n ' if _syspath[0]: _syspath.insert(0, '') # depends on [control=['if'], data=[]] argumentParser = argparse.ArgumentParser(prog=_progname, description=DESCRIPTION, epilog=EPILOG) argumentParser.add_argument('fqpn', help='A Fully Qualified Path name representing where to find machines.') argumentParser.add_argument('--quiet', '-q', help='suppress output', default=False, action='store_true') argumentParser.add_argument('--dot-directory', '-d', help='Where to write out .dot files.', default='.automat_visualize') argumentParser.add_argument('--image-directory', '-i', help='Where to write out image files.', default='.automat_visualize') argumentParser.add_argument('--image-type', '-t', help='The image format.', choices=graphviz.FORMATS, default='png') argumentParser.add_argument('--view', '-v', help='View rendered graphs with default image viewer', default=False, action='store_true') args = argumentParser.parse_args(_argv) explicitlySaveDot = args.dot_directory and (not args.image_directory or args.image_directory != args.dot_directory) if args.quiet: def _print(*args): pass # depends on [control=['if'], data=[]] for (fqpn, machine) in _findMachines(args.fqpn): _print(fqpn, '...discovered') digraph = machine.asDigraph() if explicitlySaveDot: digraph.save(filename='{}.dot'.format(fqpn), directory=args.dot_directory) _print(fqpn, '...wrote dot into', args.dot_directory) # depends on [control=['if'], data=[]] if args.image_directory: deleteDot = not args.dot_directory or explicitlySaveDot digraph.format = args.image_type digraph.render(filename='{}.dot'.format(fqpn), directory=args.image_directory, view=args.view, cleanup=deleteDot) if deleteDot: msg = '...wrote image into' # depends on [control=['if'], data=[]] else: msg = '...wrote image and dot into' _print(fqpn, msg, args.image_directory) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
def client_new(): """Create new client.""" form = ClientForm(request.form) if form.validate_on_submit(): c = Client(user_id=current_user.get_id()) c.gen_salt() form.populate_obj(c) db.session.add(c) db.session.commit() return redirect(url_for('.client_view', client_id=c.client_id)) return render_template( 'invenio_oauth2server/settings/client_new.html', form=form, )
def function[client_new, parameter[]]: constant[Create new client.] variable[form] assign[=] call[name[ClientForm], parameter[name[request].form]] if call[name[form].validate_on_submit, parameter[]] begin[:] variable[c] assign[=] call[name[Client], parameter[]] call[name[c].gen_salt, parameter[]] call[name[form].populate_obj, parameter[name[c]]] call[name[db].session.add, parameter[name[c]]] call[name[db].session.commit, parameter[]] return[call[name[redirect], parameter[call[name[url_for], parameter[constant[.client_view]]]]]] return[call[name[render_template], parameter[constant[invenio_oauth2server/settings/client_new.html]]]]
keyword[def] identifier[client_new] (): literal[string] identifier[form] = identifier[ClientForm] ( identifier[request] . identifier[form] ) keyword[if] identifier[form] . identifier[validate_on_submit] (): identifier[c] = identifier[Client] ( identifier[user_id] = identifier[current_user] . identifier[get_id] ()) identifier[c] . identifier[gen_salt] () identifier[form] . identifier[populate_obj] ( identifier[c] ) identifier[db] . identifier[session] . identifier[add] ( identifier[c] ) identifier[db] . identifier[session] . identifier[commit] () keyword[return] identifier[redirect] ( identifier[url_for] ( literal[string] , identifier[client_id] = identifier[c] . identifier[client_id] )) keyword[return] identifier[render_template] ( literal[string] , identifier[form] = identifier[form] , )
def client_new(): """Create new client.""" form = ClientForm(request.form) if form.validate_on_submit(): c = Client(user_id=current_user.get_id()) c.gen_salt() form.populate_obj(c) db.session.add(c) db.session.commit() return redirect(url_for('.client_view', client_id=c.client_id)) # depends on [control=['if'], data=[]] return render_template('invenio_oauth2server/settings/client_new.html', form=form)
def walk_train_dirs(root_dir: str) -> Iterable[Tuple[str, Iterable[str]]]: """ Modify os.walk with the following: - return only root_dir and sub-dirs - return only training sub-dirs - stop recursion at training dirs :param root_dir: root dir to be walked :return: generator of (root_dir, training sub-dirs) pairs """ if is_train_dir(root_dir): yield '', [root_dir] return for dir_, subdirs, _ in os.walk(root_dir, topdown=True): # filter train sub-dirs train_subdirs = [subdir for subdir in subdirs if is_train_dir(path.join(dir_, subdir))] # stop the recursion at the train sub-dirs for subdir in train_subdirs: subdirs.remove(subdir) yield dir_, train_subdirs
def function[walk_train_dirs, parameter[root_dir]]: constant[ Modify os.walk with the following: - return only root_dir and sub-dirs - return only training sub-dirs - stop recursion at training dirs :param root_dir: root dir to be walked :return: generator of (root_dir, training sub-dirs) pairs ] if call[name[is_train_dir], parameter[name[root_dir]]] begin[:] <ast.Yield object at 0x7da20c6aa6b0> return[None] for taget[tuple[[<ast.Name object at 0x7da18fe91690>, <ast.Name object at 0x7da18fe93b20>, <ast.Name object at 0x7da18fe91870>]]] in starred[call[name[os].walk, parameter[name[root_dir]]]] begin[:] variable[train_subdirs] assign[=] <ast.ListComp object at 0x7da18fe93490> for taget[name[subdir]] in starred[name[train_subdirs]] begin[:] call[name[subdirs].remove, parameter[name[subdir]]] <ast.Yield object at 0x7da18fe937f0>
keyword[def] identifier[walk_train_dirs] ( identifier[root_dir] : identifier[str] )-> identifier[Iterable] [ identifier[Tuple] [ identifier[str] , identifier[Iterable] [ identifier[str] ]]]: literal[string] keyword[if] identifier[is_train_dir] ( identifier[root_dir] ): keyword[yield] literal[string] ,[ identifier[root_dir] ] keyword[return] keyword[for] identifier[dir_] , identifier[subdirs] , identifier[_] keyword[in] identifier[os] . identifier[walk] ( identifier[root_dir] , identifier[topdown] = keyword[True] ): identifier[train_subdirs] =[ identifier[subdir] keyword[for] identifier[subdir] keyword[in] identifier[subdirs] keyword[if] identifier[is_train_dir] ( identifier[path] . identifier[join] ( identifier[dir_] , identifier[subdir] ))] keyword[for] identifier[subdir] keyword[in] identifier[train_subdirs] : identifier[subdirs] . identifier[remove] ( identifier[subdir] ) keyword[yield] identifier[dir_] , identifier[train_subdirs]
def walk_train_dirs(root_dir: str) -> Iterable[Tuple[str, Iterable[str]]]: """ Modify os.walk with the following: - return only root_dir and sub-dirs - return only training sub-dirs - stop recursion at training dirs :param root_dir: root dir to be walked :return: generator of (root_dir, training sub-dirs) pairs """ if is_train_dir(root_dir): yield ('', [root_dir]) return # depends on [control=['if'], data=[]] for (dir_, subdirs, _) in os.walk(root_dir, topdown=True): # filter train sub-dirs train_subdirs = [subdir for subdir in subdirs if is_train_dir(path.join(dir_, subdir))] # stop the recursion at the train sub-dirs for subdir in train_subdirs: subdirs.remove(subdir) # depends on [control=['for'], data=['subdir']] yield (dir_, train_subdirs) # depends on [control=['for'], data=[]]
def getlist(self, name): """ Retrieve given property from class/instance, ensuring it is a list. Also determine whether the list contains simple text/numeric values or nested dictionaries (a "complex" list) """ value = self.getvalue(name) complex = {} def str_value(val): # TODO: nonlocal complex if isinstance(val, dict): complex['complex'] = True return val else: return str(val) if value is None: pass else: value = [str_value(val) for val in as_list(value)] return value, bool(complex)
def function[getlist, parameter[self, name]]: constant[ Retrieve given property from class/instance, ensuring it is a list. Also determine whether the list contains simple text/numeric values or nested dictionaries (a "complex" list) ] variable[value] assign[=] call[name[self].getvalue, parameter[name[name]]] variable[complex] assign[=] dictionary[[], []] def function[str_value, parameter[val]]: if call[name[isinstance], parameter[name[val], name[dict]]] begin[:] call[name[complex]][constant[complex]] assign[=] constant[True] return[name[val]] if compare[name[value] is constant[None]] begin[:] pass return[tuple[[<ast.Name object at 0x7da1b118c2b0>, <ast.Call object at 0x7da1b118c970>]]]
keyword[def] identifier[getlist] ( identifier[self] , identifier[name] ): literal[string] identifier[value] = identifier[self] . identifier[getvalue] ( identifier[name] ) identifier[complex] ={} keyword[def] identifier[str_value] ( identifier[val] ): keyword[if] identifier[isinstance] ( identifier[val] , identifier[dict] ): identifier[complex] [ literal[string] ]= keyword[True] keyword[return] identifier[val] keyword[else] : keyword[return] identifier[str] ( identifier[val] ) keyword[if] identifier[value] keyword[is] keyword[None] : keyword[pass] keyword[else] : identifier[value] =[ identifier[str_value] ( identifier[val] ) keyword[for] identifier[val] keyword[in] identifier[as_list] ( identifier[value] )] keyword[return] identifier[value] , identifier[bool] ( identifier[complex] )
def getlist(self, name): """ Retrieve given property from class/instance, ensuring it is a list. Also determine whether the list contains simple text/numeric values or nested dictionaries (a "complex" list) """ value = self.getvalue(name) complex = {} def str_value(val): # TODO: nonlocal complex if isinstance(val, dict): complex['complex'] = True return val # depends on [control=['if'], data=[]] else: return str(val) if value is None: pass # depends on [control=['if'], data=[]] else: value = [str_value(val) for val in as_list(value)] return (value, bool(complex))
def _send(self, command): """ Sends a raw line to the server. :param command: line to send. :type command: unicode """ command = command.encode('utf-8') log.debug('>> ' + command) self.conn.oqueue.put(command)
def function[_send, parameter[self, command]]: constant[ Sends a raw line to the server. :param command: line to send. :type command: unicode ] variable[command] assign[=] call[name[command].encode, parameter[constant[utf-8]]] call[name[log].debug, parameter[binary_operation[constant[>> ] + name[command]]]] call[name[self].conn.oqueue.put, parameter[name[command]]]
keyword[def] identifier[_send] ( identifier[self] , identifier[command] ): literal[string] identifier[command] = identifier[command] . identifier[encode] ( literal[string] ) identifier[log] . identifier[debug] ( literal[string] + identifier[command] ) identifier[self] . identifier[conn] . identifier[oqueue] . identifier[put] ( identifier[command] )
def _send(self, command): """ Sends a raw line to the server. :param command: line to send. :type command: unicode """ command = command.encode('utf-8') log.debug('>> ' + command) self.conn.oqueue.put(command)
def getPrimeFactors(n): """ Get all the prime factor of given integer @param n integer @return list [1, ..., n] """ lo = [1] n2 = n // 2 k = 2 for k in range(2, n2 + 1): if (n // k)*k == n: lo.append(k) return lo + [n, ]
def function[getPrimeFactors, parameter[n]]: constant[ Get all the prime factor of given integer @param n integer @return list [1, ..., n] ] variable[lo] assign[=] list[[<ast.Constant object at 0x7da2049627d0>]] variable[n2] assign[=] binary_operation[name[n] <ast.FloorDiv object at 0x7da2590d6bc0> constant[2]] variable[k] assign[=] constant[2] for taget[name[k]] in starred[call[name[range], parameter[constant[2], binary_operation[name[n2] + constant[1]]]]] begin[:] if compare[binary_operation[binary_operation[name[n] <ast.FloorDiv object at 0x7da2590d6bc0> name[k]] * name[k]] equal[==] name[n]] begin[:] call[name[lo].append, parameter[name[k]]] return[binary_operation[name[lo] + list[[<ast.Name object at 0x7da204962b90>]]]]
keyword[def] identifier[getPrimeFactors] ( identifier[n] ): literal[string] identifier[lo] =[ literal[int] ] identifier[n2] = identifier[n] // literal[int] identifier[k] = literal[int] keyword[for] identifier[k] keyword[in] identifier[range] ( literal[int] , identifier[n2] + literal[int] ): keyword[if] ( identifier[n] // identifier[k] )* identifier[k] == identifier[n] : identifier[lo] . identifier[append] ( identifier[k] ) keyword[return] identifier[lo] +[ identifier[n] ,]
def getPrimeFactors(n): """ Get all the prime factor of given integer @param n integer @return list [1, ..., n] """ lo = [1] n2 = n // 2 k = 2 for k in range(2, n2 + 1): if n // k * k == n: lo.append(k) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['k']] return lo + [n]
def resolve_id(resolver, identifier, name='object'): """Resolves a single id using a resolver function. :param resolver: function that resolves ids. Should return None or a list of ids. :param string identifier: a string identifier used to resolve ids :param string name: the object type, to be used in error messages """ try: return int(identifier) except ValueError: pass # It was worth a shot ids = resolver(identifier) if len(ids) == 0: raise exceptions.CLIAbort("Error: Unable to find %s '%s'" % (name, identifier)) if len(ids) > 1: raise exceptions.CLIAbort( "Error: Multiple %s found for '%s': %s" % (name, identifier, ', '.join([str(_id) for _id in ids]))) return ids[0]
def function[resolve_id, parameter[resolver, identifier, name]]: constant[Resolves a single id using a resolver function. :param resolver: function that resolves ids. Should return None or a list of ids. :param string identifier: a string identifier used to resolve ids :param string name: the object type, to be used in error messages ] <ast.Try object at 0x7da20e956aa0> variable[ids] assign[=] call[name[resolver], parameter[name[identifier]]] if compare[call[name[len], parameter[name[ids]]] equal[==] constant[0]] begin[:] <ast.Raise object at 0x7da20e955930> if compare[call[name[len], parameter[name[ids]]] greater[>] constant[1]] begin[:] <ast.Raise object at 0x7da20c7967d0> return[call[name[ids]][constant[0]]]
keyword[def] identifier[resolve_id] ( identifier[resolver] , identifier[identifier] , identifier[name] = literal[string] ): literal[string] keyword[try] : keyword[return] identifier[int] ( identifier[identifier] ) keyword[except] identifier[ValueError] : keyword[pass] identifier[ids] = identifier[resolver] ( identifier[identifier] ) keyword[if] identifier[len] ( identifier[ids] )== literal[int] : keyword[raise] identifier[exceptions] . identifier[CLIAbort] ( literal[string] %( identifier[name] , identifier[identifier] )) keyword[if] identifier[len] ( identifier[ids] )> literal[int] : keyword[raise] identifier[exceptions] . identifier[CLIAbort] ( literal[string] % ( identifier[name] , identifier[identifier] , literal[string] . identifier[join] ([ identifier[str] ( identifier[_id] ) keyword[for] identifier[_id] keyword[in] identifier[ids] ]))) keyword[return] identifier[ids] [ literal[int] ]
def resolve_id(resolver, identifier, name='object'): """Resolves a single id using a resolver function. :param resolver: function that resolves ids. Should return None or a list of ids. :param string identifier: a string identifier used to resolve ids :param string name: the object type, to be used in error messages """ try: return int(identifier) # depends on [control=['try'], data=[]] except ValueError: pass # It was worth a shot # depends on [control=['except'], data=[]] ids = resolver(identifier) if len(ids) == 0: raise exceptions.CLIAbort("Error: Unable to find %s '%s'" % (name, identifier)) # depends on [control=['if'], data=[]] if len(ids) > 1: raise exceptions.CLIAbort("Error: Multiple %s found for '%s': %s" % (name, identifier, ', '.join([str(_id) for _id in ids]))) # depends on [control=['if'], data=[]] return ids[0]
def add_column(filename,column,formula,force=False): """ Add a column to a FITS file. ADW: Could this be replaced by a ftool? """ columns = parse_formula(formula) logger.info("Running file: %s"%filename) logger.debug(" Reading columns: %s"%columns) data = fitsio.read(filename,columns=columns) logger.debug(' Evaluating formula: %s'%formula) col = eval(formula) col = np.asarray(col,dtype=[(column,col.dtype)]) insert_columns(filename,col,force=force) return True
def function[add_column, parameter[filename, column, formula, force]]: constant[ Add a column to a FITS file. ADW: Could this be replaced by a ftool? ] variable[columns] assign[=] call[name[parse_formula], parameter[name[formula]]] call[name[logger].info, parameter[binary_operation[constant[Running file: %s] <ast.Mod object at 0x7da2590d6920> name[filename]]]] call[name[logger].debug, parameter[binary_operation[constant[ Reading columns: %s] <ast.Mod object at 0x7da2590d6920> name[columns]]]] variable[data] assign[=] call[name[fitsio].read, parameter[name[filename]]] call[name[logger].debug, parameter[binary_operation[constant[ Evaluating formula: %s] <ast.Mod object at 0x7da2590d6920> name[formula]]]] variable[col] assign[=] call[name[eval], parameter[name[formula]]] variable[col] assign[=] call[name[np].asarray, parameter[name[col]]] call[name[insert_columns], parameter[name[filename], name[col]]] return[constant[True]]
keyword[def] identifier[add_column] ( identifier[filename] , identifier[column] , identifier[formula] , identifier[force] = keyword[False] ): literal[string] identifier[columns] = identifier[parse_formula] ( identifier[formula] ) identifier[logger] . identifier[info] ( literal[string] % identifier[filename] ) identifier[logger] . identifier[debug] ( literal[string] % identifier[columns] ) identifier[data] = identifier[fitsio] . identifier[read] ( identifier[filename] , identifier[columns] = identifier[columns] ) identifier[logger] . identifier[debug] ( literal[string] % identifier[formula] ) identifier[col] = identifier[eval] ( identifier[formula] ) identifier[col] = identifier[np] . identifier[asarray] ( identifier[col] , identifier[dtype] =[( identifier[column] , identifier[col] . identifier[dtype] )]) identifier[insert_columns] ( identifier[filename] , identifier[col] , identifier[force] = identifier[force] ) keyword[return] keyword[True]
def add_column(filename, column, formula, force=False): """ Add a column to a FITS file. ADW: Could this be replaced by a ftool? """ columns = parse_formula(formula) logger.info('Running file: %s' % filename) logger.debug(' Reading columns: %s' % columns) data = fitsio.read(filename, columns=columns) logger.debug(' Evaluating formula: %s' % formula) col = eval(formula) col = np.asarray(col, dtype=[(column, col.dtype)]) insert_columns(filename, col, force=force) return True
def to_file(self, path, precision='%.2g'): """ Create a CSV report of the trackables :param path: path to file :param precision: numeric string formatter """ table_info = self.get_table_info() def dump_rows(rows): if len(rows) > 1: for row in rows: csv_writer.writerow(row) csv_writer.writerow([]) with open(path, 'wb') as _f: csv_writer = csv.writer(_f) state_rows = [['States']] state_rows += [['Name', 'Description', 'State', 'Number of Changes']] for state in self.states: state_rows.append([state.name, table_info[state.name]['description'], state.state, state.count]) dump_rows(state_rows) stat_rows = [['Statistics']] stat_rows += [['Name', 'Description', 'Total', 'Average']] for stat in self.statistics: if stat.name == '__submissions__': continue stat_rows.append([stat.name, table_info[stat.name]['description'], stat.count, stat.get_average(0)]) dump_rows(stat_rows) timer_rows = [['Timers']] timer_rows += [['Name', 'Description', 'Average Seconds', 'Total Seconds', 'Total Minutes', 'Total Hours', 'Total Days']] for timer in self.timers: timer_rows.append([timer.name, table_info[timer.name]['description'], precision % timer.get_average(0), precision % timer.total_seconds, precision % timer.total_minutes, precision % timer.total_hours, precision % timer.total_days]) dump_rows(timer_rows) sequence_rows = [['Sequences']] sequence_rows += [['Name', 'Description', 'Sequence', 'Number of Completions']] for sequence in self.sequences: checkpoints = '-->'.join(map(str, sequence.get_checkpoints())) sequence_rows.append([sequence.name, table_info[sequence.name]['description'], checkpoints, sequence.count]) dump_rows(sequence_rows)
def function[to_file, parameter[self, path, precision]]: constant[ Create a CSV report of the trackables :param path: path to file :param precision: numeric string formatter ] variable[table_info] assign[=] call[name[self].get_table_info, parameter[]] def function[dump_rows, parameter[rows]]: if compare[call[name[len], parameter[name[rows]]] greater[>] constant[1]] begin[:] for taget[name[row]] in starred[name[rows]] begin[:] call[name[csv_writer].writerow, parameter[name[row]]] call[name[csv_writer].writerow, parameter[list[[]]]] with call[name[open], parameter[name[path], constant[wb]]] begin[:] variable[csv_writer] assign[=] call[name[csv].writer, parameter[name[_f]]] variable[state_rows] assign[=] list[[<ast.List object at 0x7da20c796680>]] <ast.AugAssign object at 0x7da20c796020> for taget[name[state]] in starred[name[self].states] begin[:] call[name[state_rows].append, parameter[list[[<ast.Attribute object at 0x7da20c795060>, <ast.Subscript object at 0x7da20c795420>, <ast.Attribute object at 0x7da20c7955a0>, <ast.Attribute object at 0x7da20c794b80>]]]] call[name[dump_rows], parameter[name[state_rows]]] variable[stat_rows] assign[=] list[[<ast.List object at 0x7da20c794cd0>]] <ast.AugAssign object at 0x7da20c795b70> for taget[name[stat]] in starred[name[self].statistics] begin[:] if compare[name[stat].name equal[==] constant[__submissions__]] begin[:] continue call[name[stat_rows].append, parameter[list[[<ast.Attribute object at 0x7da20c794100>, <ast.Subscript object at 0x7da20c794f70>, <ast.Attribute object at 0x7da20c794940>, <ast.Call object at 0x7da20c794c70>]]]] call[name[dump_rows], parameter[name[stat_rows]]] variable[timer_rows] assign[=] list[[<ast.List object at 0x7da20c795e40>]] <ast.AugAssign object at 0x7da20c7945b0> for taget[name[timer]] in starred[name[self].timers] begin[:] call[name[timer_rows].append, parameter[list[[<ast.Attribute object at 0x7da1b09e9a20>, <ast.Subscript object at 0x7da1b09e9540>, <ast.BinOp object at 0x7da1b09e9f30>, <ast.BinOp object at 0x7da1b09eb700>, <ast.BinOp object at 0x7da1b09ea9e0>, <ast.BinOp object at 0x7da1b09e97e0>, <ast.BinOp object at 0x7da1b09ebf40>]]]] call[name[dump_rows], parameter[name[timer_rows]]] variable[sequence_rows] assign[=] list[[<ast.List object at 0x7da1b09e8160>]] <ast.AugAssign object at 0x7da1b09eb460> for taget[name[sequence]] in starred[name[self].sequences] begin[:] variable[checkpoints] assign[=] call[constant[-->].join, parameter[call[name[map], parameter[name[str], call[name[sequence].get_checkpoints, parameter[]]]]]] call[name[sequence_rows].append, parameter[list[[<ast.Attribute object at 0x7da1b09eb8e0>, <ast.Subscript object at 0x7da1b09ead70>, <ast.Name object at 0x7da1b09ebb20>, <ast.Attribute object at 0x7da1b09e9210>]]]] call[name[dump_rows], parameter[name[sequence_rows]]]
keyword[def] identifier[to_file] ( identifier[self] , identifier[path] , identifier[precision] = literal[string] ): literal[string] identifier[table_info] = identifier[self] . identifier[get_table_info] () keyword[def] identifier[dump_rows] ( identifier[rows] ): keyword[if] identifier[len] ( identifier[rows] )> literal[int] : keyword[for] identifier[row] keyword[in] identifier[rows] : identifier[csv_writer] . identifier[writerow] ( identifier[row] ) identifier[csv_writer] . identifier[writerow] ([]) keyword[with] identifier[open] ( identifier[path] , literal[string] ) keyword[as] identifier[_f] : identifier[csv_writer] = identifier[csv] . identifier[writer] ( identifier[_f] ) identifier[state_rows] =[[ literal[string] ]] identifier[state_rows] +=[[ literal[string] , literal[string] , literal[string] , literal[string] ]] keyword[for] identifier[state] keyword[in] identifier[self] . identifier[states] : identifier[state_rows] . identifier[append] ([ identifier[state] . identifier[name] , identifier[table_info] [ identifier[state] . identifier[name] ][ literal[string] ], identifier[state] . identifier[state] , identifier[state] . identifier[count] ]) identifier[dump_rows] ( identifier[state_rows] ) identifier[stat_rows] =[[ literal[string] ]] identifier[stat_rows] +=[[ literal[string] , literal[string] , literal[string] , literal[string] ]] keyword[for] identifier[stat] keyword[in] identifier[self] . identifier[statistics] : keyword[if] identifier[stat] . identifier[name] == literal[string] : keyword[continue] identifier[stat_rows] . identifier[append] ([ identifier[stat] . identifier[name] , identifier[table_info] [ identifier[stat] . identifier[name] ][ literal[string] ], identifier[stat] . identifier[count] , identifier[stat] . identifier[get_average] ( literal[int] )]) identifier[dump_rows] ( identifier[stat_rows] ) identifier[timer_rows] =[[ literal[string] ]] identifier[timer_rows] +=[[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]] keyword[for] identifier[timer] keyword[in] identifier[self] . identifier[timers] : identifier[timer_rows] . identifier[append] ([ identifier[timer] . identifier[name] , identifier[table_info] [ identifier[timer] . identifier[name] ][ literal[string] ], identifier[precision] % identifier[timer] . identifier[get_average] ( literal[int] ), identifier[precision] % identifier[timer] . identifier[total_seconds] , identifier[precision] % identifier[timer] . identifier[total_minutes] , identifier[precision] % identifier[timer] . identifier[total_hours] , identifier[precision] % identifier[timer] . identifier[total_days] ]) identifier[dump_rows] ( identifier[timer_rows] ) identifier[sequence_rows] =[[ literal[string] ]] identifier[sequence_rows] +=[[ literal[string] , literal[string] , literal[string] , literal[string] ]] keyword[for] identifier[sequence] keyword[in] identifier[self] . identifier[sequences] : identifier[checkpoints] = literal[string] . identifier[join] ( identifier[map] ( identifier[str] , identifier[sequence] . identifier[get_checkpoints] ())) identifier[sequence_rows] . identifier[append] ([ identifier[sequence] . identifier[name] , identifier[table_info] [ identifier[sequence] . identifier[name] ][ literal[string] ], identifier[checkpoints] , identifier[sequence] . identifier[count] ]) identifier[dump_rows] ( identifier[sequence_rows] )
def to_file(self, path, precision='%.2g'): """ Create a CSV report of the trackables :param path: path to file :param precision: numeric string formatter """ table_info = self.get_table_info() def dump_rows(rows): if len(rows) > 1: for row in rows: csv_writer.writerow(row) # depends on [control=['for'], data=['row']] csv_writer.writerow([]) # depends on [control=['if'], data=[]] with open(path, 'wb') as _f: csv_writer = csv.writer(_f) state_rows = [['States']] state_rows += [['Name', 'Description', 'State', 'Number of Changes']] for state in self.states: state_rows.append([state.name, table_info[state.name]['description'], state.state, state.count]) # depends on [control=['for'], data=['state']] dump_rows(state_rows) stat_rows = [['Statistics']] stat_rows += [['Name', 'Description', 'Total', 'Average']] for stat in self.statistics: if stat.name == '__submissions__': continue # depends on [control=['if'], data=[]] stat_rows.append([stat.name, table_info[stat.name]['description'], stat.count, stat.get_average(0)]) dump_rows(stat_rows) # depends on [control=['for'], data=['stat']] timer_rows = [['Timers']] timer_rows += [['Name', 'Description', 'Average Seconds', 'Total Seconds', 'Total Minutes', 'Total Hours', 'Total Days']] for timer in self.timers: timer_rows.append([timer.name, table_info[timer.name]['description'], precision % timer.get_average(0), precision % timer.total_seconds, precision % timer.total_minutes, precision % timer.total_hours, precision % timer.total_days]) dump_rows(timer_rows) # depends on [control=['for'], data=['timer']] sequence_rows = [['Sequences']] sequence_rows += [['Name', 'Description', 'Sequence', 'Number of Completions']] for sequence in self.sequences: checkpoints = '-->'.join(map(str, sequence.get_checkpoints())) sequence_rows.append([sequence.name, table_info[sequence.name]['description'], checkpoints, sequence.count]) dump_rows(sequence_rows) # depends on [control=['for'], data=['sequence']] # depends on [control=['with'], data=['_f']]
def replace(self, **kwargs): """ Return a :class:`.Date` with one or more components replaced with new values. """ return Date(kwargs.get("year", self.__year), kwargs.get("month", self.__month), kwargs.get("day", self.__day))
def function[replace, parameter[self]]: constant[ Return a :class:`.Date` with one or more components replaced with new values. ] return[call[name[Date], parameter[call[name[kwargs].get, parameter[constant[year], name[self].__year]], call[name[kwargs].get, parameter[constant[month], name[self].__month]], call[name[kwargs].get, parameter[constant[day], name[self].__day]]]]]
keyword[def] identifier[replace] ( identifier[self] ,** identifier[kwargs] ): literal[string] keyword[return] identifier[Date] ( identifier[kwargs] . identifier[get] ( literal[string] , identifier[self] . identifier[__year] ), identifier[kwargs] . identifier[get] ( literal[string] , identifier[self] . identifier[__month] ), identifier[kwargs] . identifier[get] ( literal[string] , identifier[self] . identifier[__day] ))
def replace(self, **kwargs): """ Return a :class:`.Date` with one or more components replaced with new values. """ return Date(kwargs.get('year', self.__year), kwargs.get('month', self.__month), kwargs.get('day', self.__day))
def ReadHuntClientResourcesStats(self, hunt_id): """Read/calculate hunt client resources stats.""" result = rdf_stats.ClientResourcesStats() for f in self._GetHuntFlows(hunt_id): cr = rdf_client_stats.ClientResources( session_id="%s/%s" % (f.client_id, f.flow_id), client_id=f.client_id, cpu_usage=f.cpu_time_used, network_bytes_sent=f.network_bytes_sent) result.RegisterResources(cr) # TODO(user): remove this hack when compatibility with AFF4 is not # important. return rdf_stats.ClientResourcesStats.FromSerializedString( result.SerializeToString())
def function[ReadHuntClientResourcesStats, parameter[self, hunt_id]]: constant[Read/calculate hunt client resources stats.] variable[result] assign[=] call[name[rdf_stats].ClientResourcesStats, parameter[]] for taget[name[f]] in starred[call[name[self]._GetHuntFlows, parameter[name[hunt_id]]]] begin[:] variable[cr] assign[=] call[name[rdf_client_stats].ClientResources, parameter[]] call[name[result].RegisterResources, parameter[name[cr]]] return[call[name[rdf_stats].ClientResourcesStats.FromSerializedString, parameter[call[name[result].SerializeToString, parameter[]]]]]
keyword[def] identifier[ReadHuntClientResourcesStats] ( identifier[self] , identifier[hunt_id] ): literal[string] identifier[result] = identifier[rdf_stats] . identifier[ClientResourcesStats] () keyword[for] identifier[f] keyword[in] identifier[self] . identifier[_GetHuntFlows] ( identifier[hunt_id] ): identifier[cr] = identifier[rdf_client_stats] . identifier[ClientResources] ( identifier[session_id] = literal[string] %( identifier[f] . identifier[client_id] , identifier[f] . identifier[flow_id] ), identifier[client_id] = identifier[f] . identifier[client_id] , identifier[cpu_usage] = identifier[f] . identifier[cpu_time_used] , identifier[network_bytes_sent] = identifier[f] . identifier[network_bytes_sent] ) identifier[result] . identifier[RegisterResources] ( identifier[cr] ) keyword[return] identifier[rdf_stats] . identifier[ClientResourcesStats] . identifier[FromSerializedString] ( identifier[result] . identifier[SerializeToString] ())
def ReadHuntClientResourcesStats(self, hunt_id): """Read/calculate hunt client resources stats.""" result = rdf_stats.ClientResourcesStats() for f in self._GetHuntFlows(hunt_id): cr = rdf_client_stats.ClientResources(session_id='%s/%s' % (f.client_id, f.flow_id), client_id=f.client_id, cpu_usage=f.cpu_time_used, network_bytes_sent=f.network_bytes_sent) result.RegisterResources(cr) # depends on [control=['for'], data=['f']] # TODO(user): remove this hack when compatibility with AFF4 is not # important. return rdf_stats.ClientResourcesStats.FromSerializedString(result.SerializeToString())
def ShouldRetry(self, exception): """Returns true if should retry based on the passed-in exception. :param (errors.HTTPFailure instance) exception: :rtype: boolean """ if (self.current_retry_attempt_count < self._max_retry_attempt_count) and self.needsRetry(exception.status_code): self.current_retry_attempt_count += 1 return True return False
def function[ShouldRetry, parameter[self, exception]]: constant[Returns true if should retry based on the passed-in exception. :param (errors.HTTPFailure instance) exception: :rtype: boolean ] if <ast.BoolOp object at 0x7da1b18e79d0> begin[:] <ast.AugAssign object at 0x7da1b18e6680> return[constant[True]] return[constant[False]]
keyword[def] identifier[ShouldRetry] ( identifier[self] , identifier[exception] ): literal[string] keyword[if] ( identifier[self] . identifier[current_retry_attempt_count] < identifier[self] . identifier[_max_retry_attempt_count] ) keyword[and] identifier[self] . identifier[needsRetry] ( identifier[exception] . identifier[status_code] ): identifier[self] . identifier[current_retry_attempt_count] += literal[int] keyword[return] keyword[True] keyword[return] keyword[False]
def ShouldRetry(self, exception): """Returns true if should retry based on the passed-in exception. :param (errors.HTTPFailure instance) exception: :rtype: boolean """ if self.current_retry_attempt_count < self._max_retry_attempt_count and self.needsRetry(exception.status_code): self.current_retry_attempt_count += 1 return True # depends on [control=['if'], data=[]] return False
def event_lookup_query(self, id, **kwargs): """ Query the Yelp Event Lookup API. documentation: https://www.yelp.com/developers/documentation/v3/event required parameters: * id - event ID """ if not id: raise ValueError('A valid event ID (parameter "id") must be provided.') return self._query(EVENT_LOOKUP_API_URL.format(id), **kwargs)
def function[event_lookup_query, parameter[self, id]]: constant[ Query the Yelp Event Lookup API. documentation: https://www.yelp.com/developers/documentation/v3/event required parameters: * id - event ID ] if <ast.UnaryOp object at 0x7da18bcc8190> begin[:] <ast.Raise object at 0x7da18bcca6e0> return[call[name[self]._query, parameter[call[name[EVENT_LOOKUP_API_URL].format, parameter[name[id]]]]]]
keyword[def] identifier[event_lookup_query] ( identifier[self] , identifier[id] ,** identifier[kwargs] ): literal[string] keyword[if] keyword[not] identifier[id] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[return] identifier[self] . identifier[_query] ( identifier[EVENT_LOOKUP_API_URL] . identifier[format] ( identifier[id] ),** identifier[kwargs] )
def event_lookup_query(self, id, **kwargs): """ Query the Yelp Event Lookup API. documentation: https://www.yelp.com/developers/documentation/v3/event required parameters: * id - event ID """ if not id: raise ValueError('A valid event ID (parameter "id") must be provided.') # depends on [control=['if'], data=[]] return self._query(EVENT_LOOKUP_API_URL.format(id), **kwargs)
def query_other_gene_name(): """ Returns list of alternative short name by query query parameters --- tags: - Query functions parameters: - name: type_ in: query type: string required: false description: Alternative short name default: CVAP - name: name in: query type: string required: false description: Alternative short name default: CVAP - name: entry_name in: query type: string required: false description: UniProt entry name default: A4_HUMAN - name: limit in: query type: integer required: false description: limit of results numbers default: 10 """ args = get_args( request_args=request.args, allowed_str_args=['name', 'entry_name'], allowed_int_args=['limit'] ) return jsonify(query.other_gene_name(**args))
def function[query_other_gene_name, parameter[]]: constant[ Returns list of alternative short name by query query parameters --- tags: - Query functions parameters: - name: type_ in: query type: string required: false description: Alternative short name default: CVAP - name: name in: query type: string required: false description: Alternative short name default: CVAP - name: entry_name in: query type: string required: false description: UniProt entry name default: A4_HUMAN - name: limit in: query type: integer required: false description: limit of results numbers default: 10 ] variable[args] assign[=] call[name[get_args], parameter[]] return[call[name[jsonify], parameter[call[name[query].other_gene_name, parameter[]]]]]
keyword[def] identifier[query_other_gene_name] (): literal[string] identifier[args] = identifier[get_args] ( identifier[request_args] = identifier[request] . identifier[args] , identifier[allowed_str_args] =[ literal[string] , literal[string] ], identifier[allowed_int_args] =[ literal[string] ] ) keyword[return] identifier[jsonify] ( identifier[query] . identifier[other_gene_name] (** identifier[args] ))
def query_other_gene_name(): """ Returns list of alternative short name by query query parameters --- tags: - Query functions parameters: - name: type_ in: query type: string required: false description: Alternative short name default: CVAP - name: name in: query type: string required: false description: Alternative short name default: CVAP - name: entry_name in: query type: string required: false description: UniProt entry name default: A4_HUMAN - name: limit in: query type: integer required: false description: limit of results numbers default: 10 """ args = get_args(request_args=request.args, allowed_str_args=['name', 'entry_name'], allowed_int_args=['limit']) return jsonify(query.other_gene_name(**args))
def isdir(self, relpath, rsc=None): """ Returns whether or not the resource is a directory. :return <bool> """ filepath = self.find(relpath, rsc) if filepath.startswith(':'): resource = QtCore.QResource(filepath) return not resource.isFile() else: return os.path.isdir(filepath)
def function[isdir, parameter[self, relpath, rsc]]: constant[ Returns whether or not the resource is a directory. :return <bool> ] variable[filepath] assign[=] call[name[self].find, parameter[name[relpath], name[rsc]]] if call[name[filepath].startswith, parameter[constant[:]]] begin[:] variable[resource] assign[=] call[name[QtCore].QResource, parameter[name[filepath]]] return[<ast.UnaryOp object at 0x7da1b24c5210>]
keyword[def] identifier[isdir] ( identifier[self] , identifier[relpath] , identifier[rsc] = keyword[None] ): literal[string] identifier[filepath] = identifier[self] . identifier[find] ( identifier[relpath] , identifier[rsc] ) keyword[if] identifier[filepath] . identifier[startswith] ( literal[string] ): identifier[resource] = identifier[QtCore] . identifier[QResource] ( identifier[filepath] ) keyword[return] keyword[not] identifier[resource] . identifier[isFile] () keyword[else] : keyword[return] identifier[os] . identifier[path] . identifier[isdir] ( identifier[filepath] )
def isdir(self, relpath, rsc=None): """ Returns whether or not the resource is a directory. :return <bool> """ filepath = self.find(relpath, rsc) if filepath.startswith(':'): resource = QtCore.QResource(filepath) return not resource.isFile() # depends on [control=['if'], data=[]] else: return os.path.isdir(filepath)
def any_validator(obj, validators, **kwargs): """ Attempt multiple validators on an object. - If any pass, then all validation passes. - Otherwise, raise all of the errors. """ if not len(validators) > 1: raise ValueError( "any_validator requires at least 2 validator. Only got " "{0}".format(len(validators)) ) errors = ErrorDict() for key, validator in validators.items(): try: validator(obj, **kwargs) except ValidationError as err: errors[key] = err.detail else: break else: if len(errors) == 1: # Special case for a single error. Just raise it as if it was the # only validator run. error = errors.values()[0] raise ValidationError(error) else: # Raise all of the errors with the key namespaces. errors.raise_()
def function[any_validator, parameter[obj, validators]]: constant[ Attempt multiple validators on an object. - If any pass, then all validation passes. - Otherwise, raise all of the errors. ] if <ast.UnaryOp object at 0x7da1b0fcca90> begin[:] <ast.Raise object at 0x7da1b0fcd8d0> variable[errors] assign[=] call[name[ErrorDict], parameter[]] for taget[tuple[[<ast.Name object at 0x7da1b0fcc820>, <ast.Name object at 0x7da1b0fcfb50>]]] in starred[call[name[validators].items, parameter[]]] begin[:] <ast.Try object at 0x7da1b0fcd390>
keyword[def] identifier[any_validator] ( identifier[obj] , identifier[validators] ,** identifier[kwargs] ): literal[string] keyword[if] keyword[not] identifier[len] ( identifier[validators] )> literal[int] : keyword[raise] identifier[ValueError] ( literal[string] literal[string] . identifier[format] ( identifier[len] ( identifier[validators] )) ) identifier[errors] = identifier[ErrorDict] () keyword[for] identifier[key] , identifier[validator] keyword[in] identifier[validators] . identifier[items] (): keyword[try] : identifier[validator] ( identifier[obj] ,** identifier[kwargs] ) keyword[except] identifier[ValidationError] keyword[as] identifier[err] : identifier[errors] [ identifier[key] ]= identifier[err] . identifier[detail] keyword[else] : keyword[break] keyword[else] : keyword[if] identifier[len] ( identifier[errors] )== literal[int] : identifier[error] = identifier[errors] . identifier[values] ()[ literal[int] ] keyword[raise] identifier[ValidationError] ( identifier[error] ) keyword[else] : identifier[errors] . identifier[raise_] ()
def any_validator(obj, validators, **kwargs): """ Attempt multiple validators on an object. - If any pass, then all validation passes. - Otherwise, raise all of the errors. """ if not len(validators) > 1: raise ValueError('any_validator requires at least 2 validator. Only got {0}'.format(len(validators))) # depends on [control=['if'], data=[]] errors = ErrorDict() for (key, validator) in validators.items(): try: validator(obj, **kwargs) # depends on [control=['try'], data=[]] except ValidationError as err: errors[key] = err.detail # depends on [control=['except'], data=['err']] else: break # depends on [control=['for'], data=[]] else: if len(errors) == 1: # Special case for a single error. Just raise it as if it was the # only validator run. error = errors.values()[0] raise ValidationError(error) # depends on [control=['if'], data=[]] else: # Raise all of the errors with the key namespaces. errors.raise_()
def rename_acquisition(self, plate_name, name, new_name): '''Renames an acquisition. Parameters ---------- plate_name: str name of the parent plate name: str name of the acquisition that should be renamed new_name: str name that should be given to the acquisition See also -------- :func:`tmserver.api.acquisition.update_acquisition` :class:`tmlib.models.acquisition.Acquisition` ''' logger.info( 'rename acquisistion "%s" of experiment "%s", plate "%s"', name, self.experiment_name, plate_name ) content = {'name': new_name} acquisition_id = self._get_acquisition_id(plate_name, name) url = self._build_api_url( '/experiments/{experiment_id}/acquisitions/{acquisition_id}'.format( experiment_id=self._experiment_id, acquisition_id=acquisition_id ) ) res = self._session.put(url, json=content) res.raise_for_status()
def function[rename_acquisition, parameter[self, plate_name, name, new_name]]: constant[Renames an acquisition. Parameters ---------- plate_name: str name of the parent plate name: str name of the acquisition that should be renamed new_name: str name that should be given to the acquisition See also -------- :func:`tmserver.api.acquisition.update_acquisition` :class:`tmlib.models.acquisition.Acquisition` ] call[name[logger].info, parameter[constant[rename acquisistion "%s" of experiment "%s", plate "%s"], name[name], name[self].experiment_name, name[plate_name]]] variable[content] assign[=] dictionary[[<ast.Constant object at 0x7da2054a4880>], [<ast.Name object at 0x7da2054a6bf0>]] variable[acquisition_id] assign[=] call[name[self]._get_acquisition_id, parameter[name[plate_name], name[name]]] variable[url] assign[=] call[name[self]._build_api_url, parameter[call[constant[/experiments/{experiment_id}/acquisitions/{acquisition_id}].format, parameter[]]]] variable[res] assign[=] call[name[self]._session.put, parameter[name[url]]] call[name[res].raise_for_status, parameter[]]
keyword[def] identifier[rename_acquisition] ( identifier[self] , identifier[plate_name] , identifier[name] , identifier[new_name] ): literal[string] identifier[logger] . identifier[info] ( literal[string] , identifier[name] , identifier[self] . identifier[experiment_name] , identifier[plate_name] ) identifier[content] ={ literal[string] : identifier[new_name] } identifier[acquisition_id] = identifier[self] . identifier[_get_acquisition_id] ( identifier[plate_name] , identifier[name] ) identifier[url] = identifier[self] . identifier[_build_api_url] ( literal[string] . identifier[format] ( identifier[experiment_id] = identifier[self] . identifier[_experiment_id] , identifier[acquisition_id] = identifier[acquisition_id] ) ) identifier[res] = identifier[self] . identifier[_session] . identifier[put] ( identifier[url] , identifier[json] = identifier[content] ) identifier[res] . identifier[raise_for_status] ()
def rename_acquisition(self, plate_name, name, new_name): """Renames an acquisition. Parameters ---------- plate_name: str name of the parent plate name: str name of the acquisition that should be renamed new_name: str name that should be given to the acquisition See also -------- :func:`tmserver.api.acquisition.update_acquisition` :class:`tmlib.models.acquisition.Acquisition` """ logger.info('rename acquisistion "%s" of experiment "%s", plate "%s"', name, self.experiment_name, plate_name) content = {'name': new_name} acquisition_id = self._get_acquisition_id(plate_name, name) url = self._build_api_url('/experiments/{experiment_id}/acquisitions/{acquisition_id}'.format(experiment_id=self._experiment_id, acquisition_id=acquisition_id)) res = self._session.put(url, json=content) res.raise_for_status()
def Click(self, x: int = None, y: int = None, ratioX: float = 0.5, ratioY: float = 0.5, simulateMove: bool = True, waitTime: float = OPERATION_WAIT_TIME) -> None: """ x: int, if < 0, click self.BoundingRectangle.right + x, if not None, ignore ratioX. y: int, if < 0, click self.BoundingRectangle.bottom + y, if not None, ignore ratioY. ratioX: float. ratioY: float. simulateMove: bool, if True, first move cursor to control smoothly. waitTime: float. Click(), Click(ratioX=0.5, ratioY=0.5): click center. Click(10, 10): click left+10, top+10. Click(-10, -10): click right-10, bottom-10. """ point = self.MoveCursorToInnerPos(x, y, ratioX, ratioY, simulateMove) if point: Click(point[0], point[1], waitTime)
def function[Click, parameter[self, x, y, ratioX, ratioY, simulateMove, waitTime]]: constant[ x: int, if < 0, click self.BoundingRectangle.right + x, if not None, ignore ratioX. y: int, if < 0, click self.BoundingRectangle.bottom + y, if not None, ignore ratioY. ratioX: float. ratioY: float. simulateMove: bool, if True, first move cursor to control smoothly. waitTime: float. Click(), Click(ratioX=0.5, ratioY=0.5): click center. Click(10, 10): click left+10, top+10. Click(-10, -10): click right-10, bottom-10. ] variable[point] assign[=] call[name[self].MoveCursorToInnerPos, parameter[name[x], name[y], name[ratioX], name[ratioY], name[simulateMove]]] if name[point] begin[:] call[name[Click], parameter[call[name[point]][constant[0]], call[name[point]][constant[1]], name[waitTime]]]
keyword[def] identifier[Click] ( identifier[self] , identifier[x] : identifier[int] = keyword[None] , identifier[y] : identifier[int] = keyword[None] , identifier[ratioX] : identifier[float] = literal[int] , identifier[ratioY] : identifier[float] = literal[int] , identifier[simulateMove] : identifier[bool] = keyword[True] , identifier[waitTime] : identifier[float] = identifier[OPERATION_WAIT_TIME] )-> keyword[None] : literal[string] identifier[point] = identifier[self] . identifier[MoveCursorToInnerPos] ( identifier[x] , identifier[y] , identifier[ratioX] , identifier[ratioY] , identifier[simulateMove] ) keyword[if] identifier[point] : identifier[Click] ( identifier[point] [ literal[int] ], identifier[point] [ literal[int] ], identifier[waitTime] )
def Click(self, x: int=None, y: int=None, ratioX: float=0.5, ratioY: float=0.5, simulateMove: bool=True, waitTime: float=OPERATION_WAIT_TIME) -> None: """ x: int, if < 0, click self.BoundingRectangle.right + x, if not None, ignore ratioX. y: int, if < 0, click self.BoundingRectangle.bottom + y, if not None, ignore ratioY. ratioX: float. ratioY: float. simulateMove: bool, if True, first move cursor to control smoothly. waitTime: float. Click(), Click(ratioX=0.5, ratioY=0.5): click center. Click(10, 10): click left+10, top+10. Click(-10, -10): click right-10, bottom-10. """ point = self.MoveCursorToInnerPos(x, y, ratioX, ratioY, simulateMove) if point: Click(point[0], point[1], waitTime) # depends on [control=['if'], data=[]]
def solve_limited(self, assumptions=[]): """ Solve internal formula using given budgets for conflicts and propagations. """ if self.minicard: if self.use_timer: start_time = time.clock() # saving default SIGINT handler def_sigint_handler = signal.signal(signal.SIGINT, signal.SIG_DFL) self.status = pysolvers.minicard_solve_lim(self.minicard, assumptions) # recovering default SIGINT handler def_sigint_handler = signal.signal(signal.SIGINT, def_sigint_handler) if self.use_timer: self.call_time = time.clock() - start_time self.accu_time += self.call_time return self.status
def function[solve_limited, parameter[self, assumptions]]: constant[ Solve internal formula using given budgets for conflicts and propagations. ] if name[self].minicard begin[:] if name[self].use_timer begin[:] variable[start_time] assign[=] call[name[time].clock, parameter[]] variable[def_sigint_handler] assign[=] call[name[signal].signal, parameter[name[signal].SIGINT, name[signal].SIG_DFL]] name[self].status assign[=] call[name[pysolvers].minicard_solve_lim, parameter[name[self].minicard, name[assumptions]]] variable[def_sigint_handler] assign[=] call[name[signal].signal, parameter[name[signal].SIGINT, name[def_sigint_handler]]] if name[self].use_timer begin[:] name[self].call_time assign[=] binary_operation[call[name[time].clock, parameter[]] - name[start_time]] <ast.AugAssign object at 0x7da18f7210f0> return[name[self].status]
keyword[def] identifier[solve_limited] ( identifier[self] , identifier[assumptions] =[]): literal[string] keyword[if] identifier[self] . identifier[minicard] : keyword[if] identifier[self] . identifier[use_timer] : identifier[start_time] = identifier[time] . identifier[clock] () identifier[def_sigint_handler] = identifier[signal] . identifier[signal] ( identifier[signal] . identifier[SIGINT] , identifier[signal] . identifier[SIG_DFL] ) identifier[self] . identifier[status] = identifier[pysolvers] . identifier[minicard_solve_lim] ( identifier[self] . identifier[minicard] , identifier[assumptions] ) identifier[def_sigint_handler] = identifier[signal] . identifier[signal] ( identifier[signal] . identifier[SIGINT] , identifier[def_sigint_handler] ) keyword[if] identifier[self] . identifier[use_timer] : identifier[self] . identifier[call_time] = identifier[time] . identifier[clock] ()- identifier[start_time] identifier[self] . identifier[accu_time] += identifier[self] . identifier[call_time] keyword[return] identifier[self] . identifier[status]
def solve_limited(self, assumptions=[]): """ Solve internal formula using given budgets for conflicts and propagations. """ if self.minicard: if self.use_timer: start_time = time.clock() # depends on [control=['if'], data=[]] # saving default SIGINT handler def_sigint_handler = signal.signal(signal.SIGINT, signal.SIG_DFL) self.status = pysolvers.minicard_solve_lim(self.minicard, assumptions) # recovering default SIGINT handler def_sigint_handler = signal.signal(signal.SIGINT, def_sigint_handler) if self.use_timer: self.call_time = time.clock() - start_time self.accu_time += self.call_time # depends on [control=['if'], data=[]] return self.status # depends on [control=['if'], data=[]]
def BE_vs_clean_SE(self, delu_dict, delu_default=0, plot_eads=False, annotate_monolayer=True, JPERM2=False): """ For each facet, plot the clean surface energy against the most stable binding energy. Args: delu_dict (Dict): Dictionary of the chemical potentials to be set as constant. Note the key should be a sympy Symbol object of the format: Symbol("delu_el") where el is the name of the element. delu_default (float): Default value for all unset chemical potentials plot_eads (bool): Option to plot the adsorption energy (binding energy multiplied by number of adsorbates) instead. annotate_monolayer (bool): Whether or not to label each data point with its monolayer (adsorbate density per unit primiitve area) JPERM2 (bool): Whether to plot surface energy in /m^2 (True) or eV/A^2 (False) Returns: (Plot): Plot of clean surface energy vs binding energy for all facets. """ plt = pretty_plot(width=8, height=7) for hkl in self.all_slab_entries.keys(): for clean_entry in self.all_slab_entries[hkl].keys(): all_delu_dict = self.set_all_variables(delu_dict, delu_default) if self.all_slab_entries[hkl][clean_entry]: clean_se = self.as_coeffs_dict[clean_entry] se = sub_chempots(clean_se, all_delu_dict) for ads_entry in self.all_slab_entries[hkl][clean_entry]: ml = ads_entry.get_monolayer be = ads_entry.gibbs_binding_energy(eads=plot_eads) # Now plot the surface energy vs binding energy plt.scatter(se, be) if annotate_monolayer: plt.annotate("%.2f" % (ml), xy=[se, be], xytext=[se, be]) plt.xlabel(r"Surface energy ($J/m^2$)") if JPERM2 \ else plt.xlabel(r"Surface energy ($eV/\AA^2$)") plt.ylabel("Adsorption Energy (eV)") if plot_eads \ else plt.ylabel("Binding Energy (eV)") plt.tight_layout() plt.xticks(rotation=60) return plt
def function[BE_vs_clean_SE, parameter[self, delu_dict, delu_default, plot_eads, annotate_monolayer, JPERM2]]: constant[ For each facet, plot the clean surface energy against the most stable binding energy. Args: delu_dict (Dict): Dictionary of the chemical potentials to be set as constant. Note the key should be a sympy Symbol object of the format: Symbol("delu_el") where el is the name of the element. delu_default (float): Default value for all unset chemical potentials plot_eads (bool): Option to plot the adsorption energy (binding energy multiplied by number of adsorbates) instead. annotate_monolayer (bool): Whether or not to label each data point with its monolayer (adsorbate density per unit primiitve area) JPERM2 (bool): Whether to plot surface energy in /m^2 (True) or eV/A^2 (False) Returns: (Plot): Plot of clean surface energy vs binding energy for all facets. ] variable[plt] assign[=] call[name[pretty_plot], parameter[]] for taget[name[hkl]] in starred[call[name[self].all_slab_entries.keys, parameter[]]] begin[:] for taget[name[clean_entry]] in starred[call[call[name[self].all_slab_entries][name[hkl]].keys, parameter[]]] begin[:] variable[all_delu_dict] assign[=] call[name[self].set_all_variables, parameter[name[delu_dict], name[delu_default]]] if call[call[name[self].all_slab_entries][name[hkl]]][name[clean_entry]] begin[:] variable[clean_se] assign[=] call[name[self].as_coeffs_dict][name[clean_entry]] variable[se] assign[=] call[name[sub_chempots], parameter[name[clean_se], name[all_delu_dict]]] for taget[name[ads_entry]] in starred[call[call[name[self].all_slab_entries][name[hkl]]][name[clean_entry]]] begin[:] variable[ml] assign[=] name[ads_entry].get_monolayer variable[be] assign[=] call[name[ads_entry].gibbs_binding_energy, parameter[]] call[name[plt].scatter, parameter[name[se], name[be]]] if name[annotate_monolayer] begin[:] call[name[plt].annotate, parameter[binary_operation[constant[%.2f] <ast.Mod object at 0x7da2590d6920> name[ml]]]] <ast.IfExp object at 0x7da1b1cb48b0> <ast.IfExp object at 0x7da1b1cb4550> call[name[plt].tight_layout, parameter[]] call[name[plt].xticks, parameter[]] return[name[plt]]
keyword[def] identifier[BE_vs_clean_SE] ( identifier[self] , identifier[delu_dict] , identifier[delu_default] = literal[int] , identifier[plot_eads] = keyword[False] , identifier[annotate_monolayer] = keyword[True] , identifier[JPERM2] = keyword[False] ): literal[string] identifier[plt] = identifier[pretty_plot] ( identifier[width] = literal[int] , identifier[height] = literal[int] ) keyword[for] identifier[hkl] keyword[in] identifier[self] . identifier[all_slab_entries] . identifier[keys] (): keyword[for] identifier[clean_entry] keyword[in] identifier[self] . identifier[all_slab_entries] [ identifier[hkl] ]. identifier[keys] (): identifier[all_delu_dict] = identifier[self] . identifier[set_all_variables] ( identifier[delu_dict] , identifier[delu_default] ) keyword[if] identifier[self] . identifier[all_slab_entries] [ identifier[hkl] ][ identifier[clean_entry] ]: identifier[clean_se] = identifier[self] . identifier[as_coeffs_dict] [ identifier[clean_entry] ] identifier[se] = identifier[sub_chempots] ( identifier[clean_se] , identifier[all_delu_dict] ) keyword[for] identifier[ads_entry] keyword[in] identifier[self] . identifier[all_slab_entries] [ identifier[hkl] ][ identifier[clean_entry] ]: identifier[ml] = identifier[ads_entry] . identifier[get_monolayer] identifier[be] = identifier[ads_entry] . identifier[gibbs_binding_energy] ( identifier[eads] = identifier[plot_eads] ) identifier[plt] . identifier[scatter] ( identifier[se] , identifier[be] ) keyword[if] identifier[annotate_monolayer] : identifier[plt] . identifier[annotate] ( literal[string] %( identifier[ml] ), identifier[xy] =[ identifier[se] , identifier[be] ], identifier[xytext] =[ identifier[se] , identifier[be] ]) identifier[plt] . identifier[xlabel] ( literal[string] ) keyword[if] identifier[JPERM2] keyword[else] identifier[plt] . identifier[xlabel] ( literal[string] ) identifier[plt] . identifier[ylabel] ( literal[string] ) keyword[if] identifier[plot_eads] keyword[else] identifier[plt] . identifier[ylabel] ( literal[string] ) identifier[plt] . identifier[tight_layout] () identifier[plt] . identifier[xticks] ( identifier[rotation] = literal[int] ) keyword[return] identifier[plt]
def BE_vs_clean_SE(self, delu_dict, delu_default=0, plot_eads=False, annotate_monolayer=True, JPERM2=False): """ For each facet, plot the clean surface energy against the most stable binding energy. Args: delu_dict (Dict): Dictionary of the chemical potentials to be set as constant. Note the key should be a sympy Symbol object of the format: Symbol("delu_el") where el is the name of the element. delu_default (float): Default value for all unset chemical potentials plot_eads (bool): Option to plot the adsorption energy (binding energy multiplied by number of adsorbates) instead. annotate_monolayer (bool): Whether or not to label each data point with its monolayer (adsorbate density per unit primiitve area) JPERM2 (bool): Whether to plot surface energy in /m^2 (True) or eV/A^2 (False) Returns: (Plot): Plot of clean surface energy vs binding energy for all facets. """ plt = pretty_plot(width=8, height=7) for hkl in self.all_slab_entries.keys(): for clean_entry in self.all_slab_entries[hkl].keys(): all_delu_dict = self.set_all_variables(delu_dict, delu_default) if self.all_slab_entries[hkl][clean_entry]: clean_se = self.as_coeffs_dict[clean_entry] se = sub_chempots(clean_se, all_delu_dict) for ads_entry in self.all_slab_entries[hkl][clean_entry]: ml = ads_entry.get_monolayer be = ads_entry.gibbs_binding_energy(eads=plot_eads) # Now plot the surface energy vs binding energy plt.scatter(se, be) if annotate_monolayer: plt.annotate('%.2f' % ml, xy=[se, be], xytext=[se, be]) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['ads_entry']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['clean_entry']] # depends on [control=['for'], data=['hkl']] plt.xlabel('Surface energy ($J/m^2$)') if JPERM2 else plt.xlabel('Surface energy ($eV/\\AA^2$)') plt.ylabel('Adsorption Energy (eV)') if plot_eads else plt.ylabel('Binding Energy (eV)') plt.tight_layout() plt.xticks(rotation=60) return plt
def _comprise(dict1,dict2): ''' dict1 = {'a':1,'b':2,'c':3,'d':4} dict2 = {'b':2,'c':3} _comprise(dict1,dict2) ''' len_1 = dict1.__len__() len_2 = dict2.__len__() if(len_2>len_1): return(False) else: for k2 in dict2: v2 = dict2[k2] if(k2 in dict1): v1 = dict1[k2] if(v1 == v2): return(True) else: return(False) else: return(False)
def function[_comprise, parameter[dict1, dict2]]: constant[ dict1 = {'a':1,'b':2,'c':3,'d':4} dict2 = {'b':2,'c':3} _comprise(dict1,dict2) ] variable[len_1] assign[=] call[name[dict1].__len__, parameter[]] variable[len_2] assign[=] call[name[dict2].__len__, parameter[]] if compare[name[len_2] greater[>] name[len_1]] begin[:] return[constant[False]]
keyword[def] identifier[_comprise] ( identifier[dict1] , identifier[dict2] ): literal[string] identifier[len_1] = identifier[dict1] . identifier[__len__] () identifier[len_2] = identifier[dict2] . identifier[__len__] () keyword[if] ( identifier[len_2] > identifier[len_1] ): keyword[return] ( keyword[False] ) keyword[else] : keyword[for] identifier[k2] keyword[in] identifier[dict2] : identifier[v2] = identifier[dict2] [ identifier[k2] ] keyword[if] ( identifier[k2] keyword[in] identifier[dict1] ): identifier[v1] = identifier[dict1] [ identifier[k2] ] keyword[if] ( identifier[v1] == identifier[v2] ): keyword[return] ( keyword[True] ) keyword[else] : keyword[return] ( keyword[False] ) keyword[else] : keyword[return] ( keyword[False] )
def _comprise(dict1, dict2): """ dict1 = {'a':1,'b':2,'c':3,'d':4} dict2 = {'b':2,'c':3} _comprise(dict1,dict2) """ len_1 = dict1.__len__() len_2 = dict2.__len__() if len_2 > len_1: return False # depends on [control=['if'], data=[]] else: for k2 in dict2: v2 = dict2[k2] if k2 in dict1: v1 = dict1[k2] if v1 == v2: return True # depends on [control=['if'], data=[]] else: return False # depends on [control=['if'], data=['k2', 'dict1']] else: return False # depends on [control=['for'], data=['k2']]
def changebasis(uu, vv, nn, pps): """ For a list of points given in standard coordinates (in terms of e1, e2 and e3), returns the same list expressed in the basis (uu, vv, nn), which is supposed to be orthonormal. :param uu: First vector of the basis :param vv: Second vector of the basis :param nn: Third vector of the bais :param pps: List of points in basis (e1, e2, e3) :return: List of points in basis (uu, vv, nn) """ MM = np.zeros([3, 3], np.float) for ii in range(3): MM[ii, 0] = uu[ii] MM[ii, 1] = vv[ii] MM[ii, 2] = nn[ii] PP = np.linalg.inv(MM) newpps = list() for pp in pps: newpps.append(matrixTimesVector(PP, pp)) return newpps
def function[changebasis, parameter[uu, vv, nn, pps]]: constant[ For a list of points given in standard coordinates (in terms of e1, e2 and e3), returns the same list expressed in the basis (uu, vv, nn), which is supposed to be orthonormal. :param uu: First vector of the basis :param vv: Second vector of the basis :param nn: Third vector of the bais :param pps: List of points in basis (e1, e2, e3) :return: List of points in basis (uu, vv, nn) ] variable[MM] assign[=] call[name[np].zeros, parameter[list[[<ast.Constant object at 0x7da2041d8e80>, <ast.Constant object at 0x7da2041d99f0>]], name[np].float]] for taget[name[ii]] in starred[call[name[range], parameter[constant[3]]]] begin[:] call[name[MM]][tuple[[<ast.Name object at 0x7da2041d9690>, <ast.Constant object at 0x7da2041d8670>]]] assign[=] call[name[uu]][name[ii]] call[name[MM]][tuple[[<ast.Name object at 0x7da2041da0e0>, <ast.Constant object at 0x7da2041d8880>]]] assign[=] call[name[vv]][name[ii]] call[name[MM]][tuple[[<ast.Name object at 0x7da2041da3b0>, <ast.Constant object at 0x7da2041d8af0>]]] assign[=] call[name[nn]][name[ii]] variable[PP] assign[=] call[name[np].linalg.inv, parameter[name[MM]]] variable[newpps] assign[=] call[name[list], parameter[]] for taget[name[pp]] in starred[name[pps]] begin[:] call[name[newpps].append, parameter[call[name[matrixTimesVector], parameter[name[PP], name[pp]]]]] return[name[newpps]]
keyword[def] identifier[changebasis] ( identifier[uu] , identifier[vv] , identifier[nn] , identifier[pps] ): literal[string] identifier[MM] = identifier[np] . identifier[zeros] ([ literal[int] , literal[int] ], identifier[np] . identifier[float] ) keyword[for] identifier[ii] keyword[in] identifier[range] ( literal[int] ): identifier[MM] [ identifier[ii] , literal[int] ]= identifier[uu] [ identifier[ii] ] identifier[MM] [ identifier[ii] , literal[int] ]= identifier[vv] [ identifier[ii] ] identifier[MM] [ identifier[ii] , literal[int] ]= identifier[nn] [ identifier[ii] ] identifier[PP] = identifier[np] . identifier[linalg] . identifier[inv] ( identifier[MM] ) identifier[newpps] = identifier[list] () keyword[for] identifier[pp] keyword[in] identifier[pps] : identifier[newpps] . identifier[append] ( identifier[matrixTimesVector] ( identifier[PP] , identifier[pp] )) keyword[return] identifier[newpps]
def changebasis(uu, vv, nn, pps): """ For a list of points given in standard coordinates (in terms of e1, e2 and e3), returns the same list expressed in the basis (uu, vv, nn), which is supposed to be orthonormal. :param uu: First vector of the basis :param vv: Second vector of the basis :param nn: Third vector of the bais :param pps: List of points in basis (e1, e2, e3) :return: List of points in basis (uu, vv, nn) """ MM = np.zeros([3, 3], np.float) for ii in range(3): MM[ii, 0] = uu[ii] MM[ii, 1] = vv[ii] MM[ii, 2] = nn[ii] # depends on [control=['for'], data=['ii']] PP = np.linalg.inv(MM) newpps = list() for pp in pps: newpps.append(matrixTimesVector(PP, pp)) # depends on [control=['for'], data=['pp']] return newpps
def spin_up_instance(self, command, job_name): """Start an instance in the VPC in the first available subnet. N instances will be started if nodes_per_block > 1. Not supported. We only do 1 node per block. Parameters ---------- command : str Command string to execute on the node. job_name : str Name associated with the instances. """ command = Template(template_string).substitute(jobname=job_name, user_script=command, linger=str(self.linger).lower(), worker_init=self.worker_init) instance_type = self.instance_type subnet = self.sn_ids[0] ami_id = self.image_id total_instances = len(self.instances) if float(self.spot_max_bid) > 0: spot_options = { 'MarketType': 'spot', 'SpotOptions': { 'MaxPrice': str(self.spot_max_bid), 'SpotInstanceType': 'one-time', 'InstanceInterruptionBehavior': 'terminate' } } else: spot_options = {} if total_instances > self.max_nodes: logger.warn("Exceeded instance limit ({}). Cannot continue\n".format(self.max_nodes)) return [None] try: tag_spec = [{"ResourceType": "instance", "Tags": [{'Key': 'Name', 'Value': job_name}]}] instance = self.ec2.create_instances( MinCount=1, MaxCount=1, InstanceType=instance_type, ImageId=ami_id, KeyName=self.key_name, SubnetId=subnet, SecurityGroupIds=[self.sg_id], TagSpecifications=tag_spec, InstanceMarketOptions=spot_options, InstanceInitiatedShutdownBehavior='terminate', IamInstanceProfile={'Arn': self.iam_instance_profile_arn}, UserData=command ) except ClientError as e: print(e) logger.error(e.response) return [None] except Exception as e: logger.error("Request for EC2 resources failed : {0}".format(e)) return [None] self.instances.append(instance[0].id) logger.info( "Started up 1 instance {} . Instance type:{}".format(instance[0].id, instance_type) ) return instance
def function[spin_up_instance, parameter[self, command, job_name]]: constant[Start an instance in the VPC in the first available subnet. N instances will be started if nodes_per_block > 1. Not supported. We only do 1 node per block. Parameters ---------- command : str Command string to execute on the node. job_name : str Name associated with the instances. ] variable[command] assign[=] call[call[name[Template], parameter[name[template_string]]].substitute, parameter[]] variable[instance_type] assign[=] name[self].instance_type variable[subnet] assign[=] call[name[self].sn_ids][constant[0]] variable[ami_id] assign[=] name[self].image_id variable[total_instances] assign[=] call[name[len], parameter[name[self].instances]] if compare[call[name[float], parameter[name[self].spot_max_bid]] greater[>] constant[0]] begin[:] variable[spot_options] assign[=] dictionary[[<ast.Constant object at 0x7da1b0137400>, <ast.Constant object at 0x7da1b020fd30>], [<ast.Constant object at 0x7da1b020d630>, <ast.Dict object at 0x7da1b020db40>]] if compare[name[total_instances] greater[>] name[self].max_nodes] begin[:] call[name[logger].warn, parameter[call[constant[Exceeded instance limit ({}). Cannot continue ].format, parameter[name[self].max_nodes]]]] return[list[[<ast.Constant object at 0x7da1b020f760>]]] <ast.Try object at 0x7da1b020db10> call[name[self].instances.append, parameter[call[name[instance]][constant[0]].id]] call[name[logger].info, parameter[call[constant[Started up 1 instance {} . Instance type:{}].format, parameter[call[name[instance]][constant[0]].id, name[instance_type]]]]] return[name[instance]]
keyword[def] identifier[spin_up_instance] ( identifier[self] , identifier[command] , identifier[job_name] ): literal[string] identifier[command] = identifier[Template] ( identifier[template_string] ). identifier[substitute] ( identifier[jobname] = identifier[job_name] , identifier[user_script] = identifier[command] , identifier[linger] = identifier[str] ( identifier[self] . identifier[linger] ). identifier[lower] (), identifier[worker_init] = identifier[self] . identifier[worker_init] ) identifier[instance_type] = identifier[self] . identifier[instance_type] identifier[subnet] = identifier[self] . identifier[sn_ids] [ literal[int] ] identifier[ami_id] = identifier[self] . identifier[image_id] identifier[total_instances] = identifier[len] ( identifier[self] . identifier[instances] ) keyword[if] identifier[float] ( identifier[self] . identifier[spot_max_bid] )> literal[int] : identifier[spot_options] ={ literal[string] : literal[string] , literal[string] :{ literal[string] : identifier[str] ( identifier[self] . identifier[spot_max_bid] ), literal[string] : literal[string] , literal[string] : literal[string] } } keyword[else] : identifier[spot_options] ={} keyword[if] identifier[total_instances] > identifier[self] . identifier[max_nodes] : identifier[logger] . identifier[warn] ( literal[string] . identifier[format] ( identifier[self] . identifier[max_nodes] )) keyword[return] [ keyword[None] ] keyword[try] : identifier[tag_spec] =[{ literal[string] : literal[string] , literal[string] :[{ literal[string] : literal[string] , literal[string] : identifier[job_name] }]}] identifier[instance] = identifier[self] . identifier[ec2] . identifier[create_instances] ( identifier[MinCount] = literal[int] , identifier[MaxCount] = literal[int] , identifier[InstanceType] = identifier[instance_type] , identifier[ImageId] = identifier[ami_id] , identifier[KeyName] = identifier[self] . identifier[key_name] , identifier[SubnetId] = identifier[subnet] , identifier[SecurityGroupIds] =[ identifier[self] . identifier[sg_id] ], identifier[TagSpecifications] = identifier[tag_spec] , identifier[InstanceMarketOptions] = identifier[spot_options] , identifier[InstanceInitiatedShutdownBehavior] = literal[string] , identifier[IamInstanceProfile] ={ literal[string] : identifier[self] . identifier[iam_instance_profile_arn] }, identifier[UserData] = identifier[command] ) keyword[except] identifier[ClientError] keyword[as] identifier[e] : identifier[print] ( identifier[e] ) identifier[logger] . identifier[error] ( identifier[e] . identifier[response] ) keyword[return] [ keyword[None] ] keyword[except] identifier[Exception] keyword[as] identifier[e] : identifier[logger] . identifier[error] ( literal[string] . identifier[format] ( identifier[e] )) keyword[return] [ keyword[None] ] identifier[self] . identifier[instances] . identifier[append] ( identifier[instance] [ literal[int] ]. identifier[id] ) identifier[logger] . identifier[info] ( literal[string] . identifier[format] ( identifier[instance] [ literal[int] ]. identifier[id] , identifier[instance_type] ) ) keyword[return] identifier[instance]
def spin_up_instance(self, command, job_name): """Start an instance in the VPC in the first available subnet. N instances will be started if nodes_per_block > 1. Not supported. We only do 1 node per block. Parameters ---------- command : str Command string to execute on the node. job_name : str Name associated with the instances. """ command = Template(template_string).substitute(jobname=job_name, user_script=command, linger=str(self.linger).lower(), worker_init=self.worker_init) instance_type = self.instance_type subnet = self.sn_ids[0] ami_id = self.image_id total_instances = len(self.instances) if float(self.spot_max_bid) > 0: spot_options = {'MarketType': 'spot', 'SpotOptions': {'MaxPrice': str(self.spot_max_bid), 'SpotInstanceType': 'one-time', 'InstanceInterruptionBehavior': 'terminate'}} # depends on [control=['if'], data=[]] else: spot_options = {} if total_instances > self.max_nodes: logger.warn('Exceeded instance limit ({}). Cannot continue\n'.format(self.max_nodes)) return [None] # depends on [control=['if'], data=[]] try: tag_spec = [{'ResourceType': 'instance', 'Tags': [{'Key': 'Name', 'Value': job_name}]}] instance = self.ec2.create_instances(MinCount=1, MaxCount=1, InstanceType=instance_type, ImageId=ami_id, KeyName=self.key_name, SubnetId=subnet, SecurityGroupIds=[self.sg_id], TagSpecifications=tag_spec, InstanceMarketOptions=spot_options, InstanceInitiatedShutdownBehavior='terminate', IamInstanceProfile={'Arn': self.iam_instance_profile_arn}, UserData=command) # depends on [control=['try'], data=[]] except ClientError as e: print(e) logger.error(e.response) return [None] # depends on [control=['except'], data=['e']] except Exception as e: logger.error('Request for EC2 resources failed : {0}'.format(e)) return [None] # depends on [control=['except'], data=['e']] self.instances.append(instance[0].id) logger.info('Started up 1 instance {} . Instance type:{}'.format(instance[0].id, instance_type)) return instance
def setup(app): """Allow this package to be used as Sphinx extension. This is also called from the top-level ``__init__.py``. :type app: sphinx.application.Sphinx """ from .patches import patch_django_for_autodoc # When running, make sure Django doesn't execute querysets patch_django_for_autodoc() # Generate docstrings for Django model fields # Register the docstring processor with sphinx app.connect('autodoc-process-docstring', improve_model_docstring) # influence skip rules app.connect("autodoc-skip-member", autodoc_skip)
def function[setup, parameter[app]]: constant[Allow this package to be used as Sphinx extension. This is also called from the top-level ``__init__.py``. :type app: sphinx.application.Sphinx ] from relative_module[patches] import module[patch_django_for_autodoc] call[name[patch_django_for_autodoc], parameter[]] call[name[app].connect, parameter[constant[autodoc-process-docstring], name[improve_model_docstring]]] call[name[app].connect, parameter[constant[autodoc-skip-member], name[autodoc_skip]]]
keyword[def] identifier[setup] ( identifier[app] ): literal[string] keyword[from] . identifier[patches] keyword[import] identifier[patch_django_for_autodoc] identifier[patch_django_for_autodoc] () identifier[app] . identifier[connect] ( literal[string] , identifier[improve_model_docstring] ) identifier[app] . identifier[connect] ( literal[string] , identifier[autodoc_skip] )
def setup(app): """Allow this package to be used as Sphinx extension. This is also called from the top-level ``__init__.py``. :type app: sphinx.application.Sphinx """ from .patches import patch_django_for_autodoc # When running, make sure Django doesn't execute querysets patch_django_for_autodoc() # Generate docstrings for Django model fields # Register the docstring processor with sphinx app.connect('autodoc-process-docstring', improve_model_docstring) # influence skip rules app.connect('autodoc-skip-member', autodoc_skip)
def _compute_site_response_term(self, C, sites, pga1000): """ Compute and return site response model term This GMPE adopts the same site response scaling model of Walling et al (2008) as implemented in the Abrahamson & Silva (2008) GMPE. The functional form is retained here. """ vs_star = sites.vs30.copy() vs_star[vs_star > 1000.0] = 1000. arg = vs_star / C["vlin"] site_resp_term = C["theta12"] * np.log(arg) # Get linear scaling term idx = sites.vs30 >= C["vlin"] site_resp_term[idx] += (C["b"] * self.CONSTS["n"] * np.log(arg[idx])) # Get nonlinear scaling term idx = np.logical_not(idx) site_resp_term[idx] += ( -C["b"] * np.log(pga1000[idx] + self.CONSTS["c"]) + C["b"] * np.log(pga1000[idx] + self.CONSTS["c"] * (arg[idx] ** self.CONSTS["n"]))) return site_resp_term
def function[_compute_site_response_term, parameter[self, C, sites, pga1000]]: constant[ Compute and return site response model term This GMPE adopts the same site response scaling model of Walling et al (2008) as implemented in the Abrahamson & Silva (2008) GMPE. The functional form is retained here. ] variable[vs_star] assign[=] call[name[sites].vs30.copy, parameter[]] call[name[vs_star]][compare[name[vs_star] greater[>] constant[1000.0]]] assign[=] constant[1000.0] variable[arg] assign[=] binary_operation[name[vs_star] / call[name[C]][constant[vlin]]] variable[site_resp_term] assign[=] binary_operation[call[name[C]][constant[theta12]] * call[name[np].log, parameter[name[arg]]]] variable[idx] assign[=] compare[name[sites].vs30 greater_or_equal[>=] call[name[C]][constant[vlin]]] <ast.AugAssign object at 0x7da1b15ce800> variable[idx] assign[=] call[name[np].logical_not, parameter[name[idx]]] <ast.AugAssign object at 0x7da1b15cc520> return[name[site_resp_term]]
keyword[def] identifier[_compute_site_response_term] ( identifier[self] , identifier[C] , identifier[sites] , identifier[pga1000] ): literal[string] identifier[vs_star] = identifier[sites] . identifier[vs30] . identifier[copy] () identifier[vs_star] [ identifier[vs_star] > literal[int] ]= literal[int] identifier[arg] = identifier[vs_star] / identifier[C] [ literal[string] ] identifier[site_resp_term] = identifier[C] [ literal[string] ]* identifier[np] . identifier[log] ( identifier[arg] ) identifier[idx] = identifier[sites] . identifier[vs30] >= identifier[C] [ literal[string] ] identifier[site_resp_term] [ identifier[idx] ]+=( identifier[C] [ literal[string] ]* identifier[self] . identifier[CONSTS] [ literal[string] ]* identifier[np] . identifier[log] ( identifier[arg] [ identifier[idx] ])) identifier[idx] = identifier[np] . identifier[logical_not] ( identifier[idx] ) identifier[site_resp_term] [ identifier[idx] ]+=( - identifier[C] [ literal[string] ]* identifier[np] . identifier[log] ( identifier[pga1000] [ identifier[idx] ]+ identifier[self] . identifier[CONSTS] [ literal[string] ])+ identifier[C] [ literal[string] ]* identifier[np] . identifier[log] ( identifier[pga1000] [ identifier[idx] ]+ identifier[self] . identifier[CONSTS] [ literal[string] ]* ( identifier[arg] [ identifier[idx] ]** identifier[self] . identifier[CONSTS] [ literal[string] ]))) keyword[return] identifier[site_resp_term]
def _compute_site_response_term(self, C, sites, pga1000): """ Compute and return site response model term This GMPE adopts the same site response scaling model of Walling et al (2008) as implemented in the Abrahamson & Silva (2008) GMPE. The functional form is retained here. """ vs_star = sites.vs30.copy() vs_star[vs_star > 1000.0] = 1000.0 arg = vs_star / C['vlin'] site_resp_term = C['theta12'] * np.log(arg) # Get linear scaling term idx = sites.vs30 >= C['vlin'] site_resp_term[idx] += C['b'] * self.CONSTS['n'] * np.log(arg[idx]) # Get nonlinear scaling term idx = np.logical_not(idx) site_resp_term[idx] += -C['b'] * np.log(pga1000[idx] + self.CONSTS['c']) + C['b'] * np.log(pga1000[idx] + self.CONSTS['c'] * arg[idx] ** self.CONSTS['n']) return site_resp_term
def by_cat(self): """ Iterates over categories and returns a filtered datamat. If a categories object is attached, the images object for the given category is returned as well (else None is returned). Returns: (datamat, categories) : A tuple that contains first the filtered datamat (has only one category) and second the associated categories object (if it is available, None otherwise) """ for value in np.unique(self.category): cat_fm = self.filter(self.category == value) if self._categories: yield (cat_fm, self._categories[value]) else: yield (cat_fm, None)
def function[by_cat, parameter[self]]: constant[ Iterates over categories and returns a filtered datamat. If a categories object is attached, the images object for the given category is returned as well (else None is returned). Returns: (datamat, categories) : A tuple that contains first the filtered datamat (has only one category) and second the associated categories object (if it is available, None otherwise) ] for taget[name[value]] in starred[call[name[np].unique, parameter[name[self].category]]] begin[:] variable[cat_fm] assign[=] call[name[self].filter, parameter[compare[name[self].category equal[==] name[value]]]] if name[self]._categories begin[:] <ast.Yield object at 0x7da20cabd8a0>
keyword[def] identifier[by_cat] ( identifier[self] ): literal[string] keyword[for] identifier[value] keyword[in] identifier[np] . identifier[unique] ( identifier[self] . identifier[category] ): identifier[cat_fm] = identifier[self] . identifier[filter] ( identifier[self] . identifier[category] == identifier[value] ) keyword[if] identifier[self] . identifier[_categories] : keyword[yield] ( identifier[cat_fm] , identifier[self] . identifier[_categories] [ identifier[value] ]) keyword[else] : keyword[yield] ( identifier[cat_fm] , keyword[None] )
def by_cat(self): """ Iterates over categories and returns a filtered datamat. If a categories object is attached, the images object for the given category is returned as well (else None is returned). Returns: (datamat, categories) : A tuple that contains first the filtered datamat (has only one category) and second the associated categories object (if it is available, None otherwise) """ for value in np.unique(self.category): cat_fm = self.filter(self.category == value) if self._categories: yield (cat_fm, self._categories[value]) # depends on [control=['if'], data=[]] else: yield (cat_fm, None) # depends on [control=['for'], data=['value']]
def resolved_row(objs, geomatcher): """Temporarily insert ``RoW`` into ``geomatcher.topology``, defined by the topo faces not used in ``objs``. Will overwrite any existing ``RoW``. On exiting the context manager, ``RoW`` is deleted.""" def get_locations(lst): for elem in lst: try: yield elem['location'] except TypeError: yield elem geomatcher['RoW'] = geomatcher.faces.difference( reduce( set.union, [geomatcher[obj] for obj in get_locations(objs)] ) ) yield geomatcher del geomatcher['RoW']
def function[resolved_row, parameter[objs, geomatcher]]: constant[Temporarily insert ``RoW`` into ``geomatcher.topology``, defined by the topo faces not used in ``objs``. Will overwrite any existing ``RoW``. On exiting the context manager, ``RoW`` is deleted.] def function[get_locations, parameter[lst]]: for taget[name[elem]] in starred[name[lst]] begin[:] <ast.Try object at 0x7da1b15a3d00> call[name[geomatcher]][constant[RoW]] assign[=] call[name[geomatcher].faces.difference, parameter[call[name[reduce], parameter[name[set].union, <ast.ListComp object at 0x7da1b15a2470>]]]] <ast.Yield object at 0x7da1b14d1960> <ast.Delete object at 0x7da1b14d2260>
keyword[def] identifier[resolved_row] ( identifier[objs] , identifier[geomatcher] ): literal[string] keyword[def] identifier[get_locations] ( identifier[lst] ): keyword[for] identifier[elem] keyword[in] identifier[lst] : keyword[try] : keyword[yield] identifier[elem] [ literal[string] ] keyword[except] identifier[TypeError] : keyword[yield] identifier[elem] identifier[geomatcher] [ literal[string] ]= identifier[geomatcher] . identifier[faces] . identifier[difference] ( identifier[reduce] ( identifier[set] . identifier[union] , [ identifier[geomatcher] [ identifier[obj] ] keyword[for] identifier[obj] keyword[in] identifier[get_locations] ( identifier[objs] )] ) ) keyword[yield] identifier[geomatcher] keyword[del] identifier[geomatcher] [ literal[string] ]
def resolved_row(objs, geomatcher): """Temporarily insert ``RoW`` into ``geomatcher.topology``, defined by the topo faces not used in ``objs``. Will overwrite any existing ``RoW``. On exiting the context manager, ``RoW`` is deleted.""" def get_locations(lst): for elem in lst: try: yield elem['location'] # depends on [control=['try'], data=[]] except TypeError: yield elem # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['elem']] geomatcher['RoW'] = geomatcher.faces.difference(reduce(set.union, [geomatcher[obj] for obj in get_locations(objs)])) yield geomatcher del geomatcher['RoW']
def get_secret(key, *args, **kwargs): """Retrieves a secret.""" env_value = os.environ.get(key.replace('.', '_').upper()) if not env_value: # Backwards compatibility: the deprecated secrets vault return _get_secret_from_vault(key, *args, **kwargs) return env_value
def function[get_secret, parameter[key]]: constant[Retrieves a secret.] variable[env_value] assign[=] call[name[os].environ.get, parameter[call[call[name[key].replace, parameter[constant[.], constant[_]]].upper, parameter[]]]] if <ast.UnaryOp object at 0x7da20e956a70> begin[:] return[call[name[_get_secret_from_vault], parameter[name[key], <ast.Starred object at 0x7da20e954cd0>]]] return[name[env_value]]
keyword[def] identifier[get_secret] ( identifier[key] ,* identifier[args] ,** identifier[kwargs] ): literal[string] identifier[env_value] = identifier[os] . identifier[environ] . identifier[get] ( identifier[key] . identifier[replace] ( literal[string] , literal[string] ). identifier[upper] ()) keyword[if] keyword[not] identifier[env_value] : keyword[return] identifier[_get_secret_from_vault] ( identifier[key] ,* identifier[args] ,** identifier[kwargs] ) keyword[return] identifier[env_value]
def get_secret(key, *args, **kwargs): """Retrieves a secret.""" env_value = os.environ.get(key.replace('.', '_').upper()) if not env_value: # Backwards compatibility: the deprecated secrets vault return _get_secret_from_vault(key, *args, **kwargs) # depends on [control=['if'], data=[]] return env_value
def _rem_id_from_index(self, indexedField, pk, val, conn=None): ''' _rem_id_from_index - Removes an id from an index internal ''' if conn is None: conn = self._get_connection() conn.srem(self._get_key_for_index(indexedField, val), pk)
def function[_rem_id_from_index, parameter[self, indexedField, pk, val, conn]]: constant[ _rem_id_from_index - Removes an id from an index internal ] if compare[name[conn] is constant[None]] begin[:] variable[conn] assign[=] call[name[self]._get_connection, parameter[]] call[name[conn].srem, parameter[call[name[self]._get_key_for_index, parameter[name[indexedField], name[val]]], name[pk]]]
keyword[def] identifier[_rem_id_from_index] ( identifier[self] , identifier[indexedField] , identifier[pk] , identifier[val] , identifier[conn] = keyword[None] ): literal[string] keyword[if] identifier[conn] keyword[is] keyword[None] : identifier[conn] = identifier[self] . identifier[_get_connection] () identifier[conn] . identifier[srem] ( identifier[self] . identifier[_get_key_for_index] ( identifier[indexedField] , identifier[val] ), identifier[pk] )
def _rem_id_from_index(self, indexedField, pk, val, conn=None): """ _rem_id_from_index - Removes an id from an index internal """ if conn is None: conn = self._get_connection() # depends on [control=['if'], data=['conn']] conn.srem(self._get_key_for_index(indexedField, val), pk)
def __get_formulas(self): """ Gets formulas in this cell range as a tuple. If cells contain actual formulas then the returned values start with an equal sign but all values are returned. """ array = self._get_target().getFormulaArray() return tuple(itertools.chain.from_iterable(array))
def function[__get_formulas, parameter[self]]: constant[ Gets formulas in this cell range as a tuple. If cells contain actual formulas then the returned values start with an equal sign but all values are returned. ] variable[array] assign[=] call[call[name[self]._get_target, parameter[]].getFormulaArray, parameter[]] return[call[name[tuple], parameter[call[name[itertools].chain.from_iterable, parameter[name[array]]]]]]
keyword[def] identifier[__get_formulas] ( identifier[self] ): literal[string] identifier[array] = identifier[self] . identifier[_get_target] (). identifier[getFormulaArray] () keyword[return] identifier[tuple] ( identifier[itertools] . identifier[chain] . identifier[from_iterable] ( identifier[array] ))
def __get_formulas(self): """ Gets formulas in this cell range as a tuple. If cells contain actual formulas then the returned values start with an equal sign but all values are returned. """ array = self._get_target().getFormulaArray() return tuple(itertools.chain.from_iterable(array))
def is_connected(self, attempts=3): """Try to reconnect if neccessary. :param attempts: The amount of tries to reconnect if neccessary. :type attempts: ``int`` """ if self.gce is None: while attempts > 0: self.logger.info("Attempting to connect ...") try: self.connect() except ComputeEngineManagerException: attempts -= 1 continue self.logger.info("Connection established.") return True self.logger.error("Unable to connect to Google Compute Engine.") return False return True
def function[is_connected, parameter[self, attempts]]: constant[Try to reconnect if neccessary. :param attempts: The amount of tries to reconnect if neccessary. :type attempts: ``int`` ] if compare[name[self].gce is constant[None]] begin[:] while compare[name[attempts] greater[>] constant[0]] begin[:] call[name[self].logger.info, parameter[constant[Attempting to connect ...]]] <ast.Try object at 0x7da1b1394370> call[name[self].logger.info, parameter[constant[Connection established.]]] return[constant[True]] call[name[self].logger.error, parameter[constant[Unable to connect to Google Compute Engine.]]] return[constant[False]] return[constant[True]]
keyword[def] identifier[is_connected] ( identifier[self] , identifier[attempts] = literal[int] ): literal[string] keyword[if] identifier[self] . identifier[gce] keyword[is] keyword[None] : keyword[while] identifier[attempts] > literal[int] : identifier[self] . identifier[logger] . identifier[info] ( literal[string] ) keyword[try] : identifier[self] . identifier[connect] () keyword[except] identifier[ComputeEngineManagerException] : identifier[attempts] -= literal[int] keyword[continue] identifier[self] . identifier[logger] . identifier[info] ( literal[string] ) keyword[return] keyword[True] identifier[self] . identifier[logger] . identifier[error] ( literal[string] ) keyword[return] keyword[False] keyword[return] keyword[True]
def is_connected(self, attempts=3): """Try to reconnect if neccessary. :param attempts: The amount of tries to reconnect if neccessary. :type attempts: ``int`` """ if self.gce is None: while attempts > 0: self.logger.info('Attempting to connect ...') try: self.connect() # depends on [control=['try'], data=[]] except ComputeEngineManagerException: attempts -= 1 continue # depends on [control=['except'], data=[]] self.logger.info('Connection established.') return True # depends on [control=['while'], data=['attempts']] self.logger.error('Unable to connect to Google Compute Engine.') return False # depends on [control=['if'], data=[]] return True
def from_html(html_style_colour_str: str): """ Parser for KDialog output, which outputs a HTML style hex code like #55aa00 @param html_style_colour_str: HTML style hex string encoded colour. (#rrggbb) @return: ColourData instance @rtype: ColourData """ html_style_colour_str = html_style_colour_str.lstrip("#") components = list(map("".join, zip(*[iter(html_style_colour_str)]*2))) return ColourData(*(int(colour, 16) for colour in components))
def function[from_html, parameter[html_style_colour_str]]: constant[ Parser for KDialog output, which outputs a HTML style hex code like #55aa00 @param html_style_colour_str: HTML style hex string encoded colour. (#rrggbb) @return: ColourData instance @rtype: ColourData ] variable[html_style_colour_str] assign[=] call[name[html_style_colour_str].lstrip, parameter[constant[#]]] variable[components] assign[=] call[name[list], parameter[call[name[map], parameter[constant[].join, call[name[zip], parameter[<ast.Starred object at 0x7da18dc997b0>]]]]]] return[call[name[ColourData], parameter[<ast.Starred object at 0x7da18dc9a5c0>]]]
keyword[def] identifier[from_html] ( identifier[html_style_colour_str] : identifier[str] ): literal[string] identifier[html_style_colour_str] = identifier[html_style_colour_str] . identifier[lstrip] ( literal[string] ) identifier[components] = identifier[list] ( identifier[map] ( literal[string] . identifier[join] , identifier[zip] (*[ identifier[iter] ( identifier[html_style_colour_str] )]* literal[int] ))) keyword[return] identifier[ColourData] (*( identifier[int] ( identifier[colour] , literal[int] ) keyword[for] identifier[colour] keyword[in] identifier[components] ))
def from_html(html_style_colour_str: str): """ Parser for KDialog output, which outputs a HTML style hex code like #55aa00 @param html_style_colour_str: HTML style hex string encoded colour. (#rrggbb) @return: ColourData instance @rtype: ColourData """ html_style_colour_str = html_style_colour_str.lstrip('#') components = list(map(''.join, zip(*[iter(html_style_colour_str)] * 2))) return ColourData(*(int(colour, 16) for colour in components))
def boot(self): """ Boots a server for the app, if it isn't already booted. Returns: Server: This server. """ if not self.responsive: # Remember the port so we can reuse it if we try to serve this same app again. type(self)._ports[self.port_key] = self.port init_func = capybara.servers[capybara.server_name] init_args = (self.middleware, self.port, self.host) self.server_thread = Thread(target=init_func, args=init_args) # Inform Python that it shouldn't wait for this thread to terminate before # exiting. (It will still be appropriately terminated when the process exits.) self.server_thread.daemon = True self.server_thread.start() # Make sure the server actually starts and becomes responsive. timer = Timer(60) while not self.responsive: if timer.expired: raise RuntimeError("WSGI application timed out during boot") self.server_thread.join(0.1) return self
def function[boot, parameter[self]]: constant[ Boots a server for the app, if it isn't already booted. Returns: Server: This server. ] if <ast.UnaryOp object at 0x7da1b033e290> begin[:] call[call[name[type], parameter[name[self]]]._ports][name[self].port_key] assign[=] name[self].port variable[init_func] assign[=] call[name[capybara].servers][name[capybara].server_name] variable[init_args] assign[=] tuple[[<ast.Attribute object at 0x7da1b0211330>, <ast.Attribute object at 0x7da1b02105b0>, <ast.Attribute object at 0x7da1b0213eb0>]] name[self].server_thread assign[=] call[name[Thread], parameter[]] name[self].server_thread.daemon assign[=] constant[True] call[name[self].server_thread.start, parameter[]] variable[timer] assign[=] call[name[Timer], parameter[constant[60]]] while <ast.UnaryOp object at 0x7da1b0210070> begin[:] if name[timer].expired begin[:] <ast.Raise object at 0x7da1b0210340> call[name[self].server_thread.join, parameter[constant[0.1]]] return[name[self]]
keyword[def] identifier[boot] ( identifier[self] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[responsive] : identifier[type] ( identifier[self] ). identifier[_ports] [ identifier[self] . identifier[port_key] ]= identifier[self] . identifier[port] identifier[init_func] = identifier[capybara] . identifier[servers] [ identifier[capybara] . identifier[server_name] ] identifier[init_args] =( identifier[self] . identifier[middleware] , identifier[self] . identifier[port] , identifier[self] . identifier[host] ) identifier[self] . identifier[server_thread] = identifier[Thread] ( identifier[target] = identifier[init_func] , identifier[args] = identifier[init_args] ) identifier[self] . identifier[server_thread] . identifier[daemon] = keyword[True] identifier[self] . identifier[server_thread] . identifier[start] () identifier[timer] = identifier[Timer] ( literal[int] ) keyword[while] keyword[not] identifier[self] . identifier[responsive] : keyword[if] identifier[timer] . identifier[expired] : keyword[raise] identifier[RuntimeError] ( literal[string] ) identifier[self] . identifier[server_thread] . identifier[join] ( literal[int] ) keyword[return] identifier[self]
def boot(self): """ Boots a server for the app, if it isn't already booted. Returns: Server: This server. """ if not self.responsive: # Remember the port so we can reuse it if we try to serve this same app again. type(self)._ports[self.port_key] = self.port init_func = capybara.servers[capybara.server_name] init_args = (self.middleware, self.port, self.host) self.server_thread = Thread(target=init_func, args=init_args) # Inform Python that it shouldn't wait for this thread to terminate before # exiting. (It will still be appropriately terminated when the process exits.) self.server_thread.daemon = True self.server_thread.start() # Make sure the server actually starts and becomes responsive. timer = Timer(60) while not self.responsive: if timer.expired: raise RuntimeError('WSGI application timed out during boot') # depends on [control=['if'], data=[]] self.server_thread.join(0.1) # depends on [control=['while'], data=[]] # depends on [control=['if'], data=[]] return self
def _imm_resolve_deps(cls): ''' _imm_resolve_deps(imm_class) resolves the dependencies of the given immutable class imm_class and edits the immutable metadata appropriately. ''' dat = cls._pimms_immutable_data_ params = dat['params'] values = dat['values'] consts = dat['consts'] checks = dat['checks'] members = list(params.keys()) + list(values.keys()) mem_ids = {k:i for (i,k) in enumerate(members)} # make sure that every input that's not already a value or param becomes a param: all_inputs = [v[0] for v in six.itervalues(values)] + [c[0] for c in six.itervalues(checks)] all_inputs = set([i for inp in all_inputs for i in inp]) extra_inputs = [i for i in all_inputs if i not in mem_ids] for i in extra_inputs: params[i] = (None, None, [], [], []) mem_ids[i] = len(members) members.append(i) # create a graph of the dependencies: dep_edges = set([]) for (v,(inputs,_,_)) in six.iteritems(values): for i in inputs: dep_edges.add((mem_ids[v], mem_ids[i])) # get the transitive closure... deps = _imm_trans_clos(dep_edges) # we can put all the param and value deps into their appropriate places now for (dependant, dependency) in deps: if dependency is dependant: raise RuntimeError('circular dependency in immutable: value \'%s\'' % dependant) (mdpcy, mdpdt) = (members[dependency], members[dependant]) if mdpcy in params: params[mdpcy][4].append(mdpdt) elif mdpcy in values: values[mdpcy][2].append(mdpdt) # last major task is to setup the checks deps2params = {v: set([]) for v in six.iterkeys(values)} for (p,pd) in six.iteritems(params): for v in pd[4]: deps2params[v].add(p) deps2consts = {v: set([]) for v in six.iterkeys(values)} for c in six.iterkeys(consts): deps = values[c][2] for v in deps: deps2consts[v].add(c) for (c,(arg_list,check_fn)) in six.iteritems(checks): param_list = set([]) const_list = set([]) for a in arg_list: if a in params: param_list.add(a) elif a in values: if a in consts: const_list.add(a) else: param_list |= deps2params[a] const_list |= deps2consts[a] else: raise RuntimeError('requirement %s requested non-member: %s' % (c, a)) for p in param_list: params[p][2].append(arg_list) params[p][3].append(check_fn) for c in const_list: consts[p][0].append(arg_list) consts[p][1].append(check_fn) # That's it; all data should be built at this point return cls
def function[_imm_resolve_deps, parameter[cls]]: constant[ _imm_resolve_deps(imm_class) resolves the dependencies of the given immutable class imm_class and edits the immutable metadata appropriately. ] variable[dat] assign[=] name[cls]._pimms_immutable_data_ variable[params] assign[=] call[name[dat]][constant[params]] variable[values] assign[=] call[name[dat]][constant[values]] variable[consts] assign[=] call[name[dat]][constant[consts]] variable[checks] assign[=] call[name[dat]][constant[checks]] variable[members] assign[=] binary_operation[call[name[list], parameter[call[name[params].keys, parameter[]]]] + call[name[list], parameter[call[name[values].keys, parameter[]]]]] variable[mem_ids] assign[=] <ast.DictComp object at 0x7da1b209d9f0> variable[all_inputs] assign[=] binary_operation[<ast.ListComp object at 0x7da1b209c0a0> + <ast.ListComp object at 0x7da1b209c310>] variable[all_inputs] assign[=] call[name[set], parameter[<ast.ListComp object at 0x7da1b209e740>]] variable[extra_inputs] assign[=] <ast.ListComp object at 0x7da1b209e1a0> for taget[name[i]] in starred[name[extra_inputs]] begin[:] call[name[params]][name[i]] assign[=] tuple[[<ast.Constant object at 0x7da1b209dae0>, <ast.Constant object at 0x7da1b209dc60>, <ast.List object at 0x7da1b209dab0>, <ast.List object at 0x7da1b209e860>, <ast.List object at 0x7da1b209e890>]] call[name[mem_ids]][name[i]] assign[=] call[name[len], parameter[name[members]]] call[name[members].append, parameter[name[i]]] variable[dep_edges] assign[=] call[name[set], parameter[list[[]]]] for taget[tuple[[<ast.Name object at 0x7da1b209d720>, <ast.Tuple object at 0x7da1b209d7e0>]]] in starred[call[name[six].iteritems, parameter[name[values]]]] begin[:] for taget[name[i]] in starred[name[inputs]] begin[:] call[name[dep_edges].add, parameter[tuple[[<ast.Subscript object at 0x7da1b209d450>, <ast.Subscript object at 0x7da1b209d3c0>]]]] variable[deps] assign[=] call[name[_imm_trans_clos], parameter[name[dep_edges]]] for taget[tuple[[<ast.Name object at 0x7da1b209d330>, <ast.Name object at 0x7da1b209d210>]]] in starred[name[deps]] begin[:] if compare[name[dependency] is name[dependant]] begin[:] <ast.Raise object at 0x7da1b209ea10> <ast.Tuple object at 0x7da1b209d180> assign[=] tuple[[<ast.Subscript object at 0x7da1b209cfa0>, <ast.Subscript object at 0x7da1b209cf40>]] if compare[name[mdpcy] in name[params]] begin[:] call[call[call[name[params]][name[mdpcy]]][constant[4]].append, parameter[name[mdpdt]]] variable[deps2params] assign[=] <ast.DictComp object at 0x7da1b209c8e0> for taget[tuple[[<ast.Name object at 0x7da18f09db40>, <ast.Name object at 0x7da18f09c850>]]] in starred[call[name[six].iteritems, parameter[name[params]]]] begin[:] for taget[name[v]] in starred[call[name[pd]][constant[4]]] begin[:] call[call[name[deps2params]][name[v]].add, parameter[name[p]]] variable[deps2consts] assign[=] <ast.DictComp object at 0x7da18f09df00> for taget[name[c]] in starred[call[name[six].iterkeys, parameter[name[consts]]]] begin[:] variable[deps] assign[=] call[call[name[values]][name[c]]][constant[2]] for taget[name[v]] in starred[name[deps]] begin[:] call[call[name[deps2consts]][name[v]].add, parameter[name[c]]] for taget[tuple[[<ast.Name object at 0x7da18f09c160>, <ast.Tuple object at 0x7da18f09cbb0>]]] in starred[call[name[six].iteritems, parameter[name[checks]]]] begin[:] variable[param_list] assign[=] call[name[set], parameter[list[[]]]] variable[const_list] assign[=] call[name[set], parameter[list[[]]]] for taget[name[a]] in starred[name[arg_list]] begin[:] if compare[name[a] in name[params]] begin[:] call[name[param_list].add, parameter[name[a]]] for taget[name[p]] in starred[name[param_list]] begin[:] call[call[call[name[params]][name[p]]][constant[2]].append, parameter[name[arg_list]]] call[call[call[name[params]][name[p]]][constant[3]].append, parameter[name[check_fn]]] for taget[name[c]] in starred[name[const_list]] begin[:] call[call[call[name[consts]][name[p]]][constant[0]].append, parameter[name[arg_list]]] call[call[call[name[consts]][name[p]]][constant[1]].append, parameter[name[check_fn]]] return[name[cls]]
keyword[def] identifier[_imm_resolve_deps] ( identifier[cls] ): literal[string] identifier[dat] = identifier[cls] . identifier[_pimms_immutable_data_] identifier[params] = identifier[dat] [ literal[string] ] identifier[values] = identifier[dat] [ literal[string] ] identifier[consts] = identifier[dat] [ literal[string] ] identifier[checks] = identifier[dat] [ literal[string] ] identifier[members] = identifier[list] ( identifier[params] . identifier[keys] ())+ identifier[list] ( identifier[values] . identifier[keys] ()) identifier[mem_ids] ={ identifier[k] : identifier[i] keyword[for] ( identifier[i] , identifier[k] ) keyword[in] identifier[enumerate] ( identifier[members] )} identifier[all_inputs] =[ identifier[v] [ literal[int] ] keyword[for] identifier[v] keyword[in] identifier[six] . identifier[itervalues] ( identifier[values] )]+[ identifier[c] [ literal[int] ] keyword[for] identifier[c] keyword[in] identifier[six] . identifier[itervalues] ( identifier[checks] )] identifier[all_inputs] = identifier[set] ([ identifier[i] keyword[for] identifier[inp] keyword[in] identifier[all_inputs] keyword[for] identifier[i] keyword[in] identifier[inp] ]) identifier[extra_inputs] =[ identifier[i] keyword[for] identifier[i] keyword[in] identifier[all_inputs] keyword[if] identifier[i] keyword[not] keyword[in] identifier[mem_ids] ] keyword[for] identifier[i] keyword[in] identifier[extra_inputs] : identifier[params] [ identifier[i] ]=( keyword[None] , keyword[None] ,[],[],[]) identifier[mem_ids] [ identifier[i] ]= identifier[len] ( identifier[members] ) identifier[members] . identifier[append] ( identifier[i] ) identifier[dep_edges] = identifier[set] ([]) keyword[for] ( identifier[v] ,( identifier[inputs] , identifier[_] , identifier[_] )) keyword[in] identifier[six] . identifier[iteritems] ( identifier[values] ): keyword[for] identifier[i] keyword[in] identifier[inputs] : identifier[dep_edges] . identifier[add] (( identifier[mem_ids] [ identifier[v] ], identifier[mem_ids] [ identifier[i] ])) identifier[deps] = identifier[_imm_trans_clos] ( identifier[dep_edges] ) keyword[for] ( identifier[dependant] , identifier[dependency] ) keyword[in] identifier[deps] : keyword[if] identifier[dependency] keyword[is] identifier[dependant] : keyword[raise] identifier[RuntimeError] ( literal[string] % identifier[dependant] ) ( identifier[mdpcy] , identifier[mdpdt] )=( identifier[members] [ identifier[dependency] ], identifier[members] [ identifier[dependant] ]) keyword[if] identifier[mdpcy] keyword[in] identifier[params] : identifier[params] [ identifier[mdpcy] ][ literal[int] ]. identifier[append] ( identifier[mdpdt] ) keyword[elif] identifier[mdpcy] keyword[in] identifier[values] : identifier[values] [ identifier[mdpcy] ][ literal[int] ]. identifier[append] ( identifier[mdpdt] ) identifier[deps2params] ={ identifier[v] : identifier[set] ([]) keyword[for] identifier[v] keyword[in] identifier[six] . identifier[iterkeys] ( identifier[values] )} keyword[for] ( identifier[p] , identifier[pd] ) keyword[in] identifier[six] . identifier[iteritems] ( identifier[params] ): keyword[for] identifier[v] keyword[in] identifier[pd] [ literal[int] ]: identifier[deps2params] [ identifier[v] ]. identifier[add] ( identifier[p] ) identifier[deps2consts] ={ identifier[v] : identifier[set] ([]) keyword[for] identifier[v] keyword[in] identifier[six] . identifier[iterkeys] ( identifier[values] )} keyword[for] identifier[c] keyword[in] identifier[six] . identifier[iterkeys] ( identifier[consts] ): identifier[deps] = identifier[values] [ identifier[c] ][ literal[int] ] keyword[for] identifier[v] keyword[in] identifier[deps] : identifier[deps2consts] [ identifier[v] ]. identifier[add] ( identifier[c] ) keyword[for] ( identifier[c] ,( identifier[arg_list] , identifier[check_fn] )) keyword[in] identifier[six] . identifier[iteritems] ( identifier[checks] ): identifier[param_list] = identifier[set] ([]) identifier[const_list] = identifier[set] ([]) keyword[for] identifier[a] keyword[in] identifier[arg_list] : keyword[if] identifier[a] keyword[in] identifier[params] : identifier[param_list] . identifier[add] ( identifier[a] ) keyword[elif] identifier[a] keyword[in] identifier[values] : keyword[if] identifier[a] keyword[in] identifier[consts] : identifier[const_list] . identifier[add] ( identifier[a] ) keyword[else] : identifier[param_list] |= identifier[deps2params] [ identifier[a] ] identifier[const_list] |= identifier[deps2consts] [ identifier[a] ] keyword[else] : keyword[raise] identifier[RuntimeError] ( literal[string] %( identifier[c] , identifier[a] )) keyword[for] identifier[p] keyword[in] identifier[param_list] : identifier[params] [ identifier[p] ][ literal[int] ]. identifier[append] ( identifier[arg_list] ) identifier[params] [ identifier[p] ][ literal[int] ]. identifier[append] ( identifier[check_fn] ) keyword[for] identifier[c] keyword[in] identifier[const_list] : identifier[consts] [ identifier[p] ][ literal[int] ]. identifier[append] ( identifier[arg_list] ) identifier[consts] [ identifier[p] ][ literal[int] ]. identifier[append] ( identifier[check_fn] ) keyword[return] identifier[cls]
def _imm_resolve_deps(cls): """ _imm_resolve_deps(imm_class) resolves the dependencies of the given immutable class imm_class and edits the immutable metadata appropriately. """ dat = cls._pimms_immutable_data_ params = dat['params'] values = dat['values'] consts = dat['consts'] checks = dat['checks'] members = list(params.keys()) + list(values.keys()) mem_ids = {k: i for (i, k) in enumerate(members)} # make sure that every input that's not already a value or param becomes a param: all_inputs = [v[0] for v in six.itervalues(values)] + [c[0] for c in six.itervalues(checks)] all_inputs = set([i for inp in all_inputs for i in inp]) extra_inputs = [i for i in all_inputs if i not in mem_ids] for i in extra_inputs: params[i] = (None, None, [], [], []) mem_ids[i] = len(members) members.append(i) # depends on [control=['for'], data=['i']] # create a graph of the dependencies: dep_edges = set([]) for (v, (inputs, _, _)) in six.iteritems(values): for i in inputs: dep_edges.add((mem_ids[v], mem_ids[i])) # depends on [control=['for'], data=['i']] # depends on [control=['for'], data=[]] # get the transitive closure... deps = _imm_trans_clos(dep_edges) # we can put all the param and value deps into their appropriate places now for (dependant, dependency) in deps: if dependency is dependant: raise RuntimeError("circular dependency in immutable: value '%s'" % dependant) # depends on [control=['if'], data=['dependant']] (mdpcy, mdpdt) = (members[dependency], members[dependant]) if mdpcy in params: params[mdpcy][4].append(mdpdt) # depends on [control=['if'], data=['mdpcy', 'params']] elif mdpcy in values: values[mdpcy][2].append(mdpdt) # depends on [control=['if'], data=['mdpcy', 'values']] # depends on [control=['for'], data=[]] # last major task is to setup the checks deps2params = {v: set([]) for v in six.iterkeys(values)} for (p, pd) in six.iteritems(params): for v in pd[4]: deps2params[v].add(p) # depends on [control=['for'], data=['v']] # depends on [control=['for'], data=[]] deps2consts = {v: set([]) for v in six.iterkeys(values)} for c in six.iterkeys(consts): deps = values[c][2] for v in deps: deps2consts[v].add(c) # depends on [control=['for'], data=['v']] # depends on [control=['for'], data=['c']] for (c, (arg_list, check_fn)) in six.iteritems(checks): param_list = set([]) const_list = set([]) for a in arg_list: if a in params: param_list.add(a) # depends on [control=['if'], data=['a']] elif a in values: if a in consts: const_list.add(a) # depends on [control=['if'], data=['a']] else: param_list |= deps2params[a] const_list |= deps2consts[a] # depends on [control=['if'], data=['a']] else: raise RuntimeError('requirement %s requested non-member: %s' % (c, a)) # depends on [control=['for'], data=['a']] for p in param_list: params[p][2].append(arg_list) params[p][3].append(check_fn) # depends on [control=['for'], data=['p']] for c in const_list: consts[p][0].append(arg_list) consts[p][1].append(check_fn) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]] # That's it; all data should be built at this point return cls
def list_configs(): ''' List all available configs CLI example: .. code-block:: bash salt '*' snapper.list_configs ''' try: configs = snapper.ListConfigs() return dict((config[0], config[2]) for config in configs) except dbus.DBusException as exc: raise CommandExecutionError( 'Error encountered while listing configurations: {0}' .format(_dbus_exception_to_reason(exc, locals())) )
def function[list_configs, parameter[]]: constant[ List all available configs CLI example: .. code-block:: bash salt '*' snapper.list_configs ] <ast.Try object at 0x7da2044c04f0>
keyword[def] identifier[list_configs] (): literal[string] keyword[try] : identifier[configs] = identifier[snapper] . identifier[ListConfigs] () keyword[return] identifier[dict] (( identifier[config] [ literal[int] ], identifier[config] [ literal[int] ]) keyword[for] identifier[config] keyword[in] identifier[configs] ) keyword[except] identifier[dbus] . identifier[DBusException] keyword[as] identifier[exc] : keyword[raise] identifier[CommandExecutionError] ( literal[string] . identifier[format] ( identifier[_dbus_exception_to_reason] ( identifier[exc] , identifier[locals] ())) )
def list_configs(): """ List all available configs CLI example: .. code-block:: bash salt '*' snapper.list_configs """ try: configs = snapper.ListConfigs() return dict(((config[0], config[2]) for config in configs)) # depends on [control=['try'], data=[]] except dbus.DBusException as exc: raise CommandExecutionError('Error encountered while listing configurations: {0}'.format(_dbus_exception_to_reason(exc, locals()))) # depends on [control=['except'], data=['exc']]
def is_diacritic(char, strict=True): """ Check whether the character is a diacritic (as opposed to a letter or a suprasegmental). In strict mode return True only if the diacritic is part of the IPA spec. """ if char in chart.diacritics: return True if not strict: return (unicodedata.category(char) in ['Lm', 'Mn', 'Sk']) \ and (not is_suprasegmental(char)) \ and (not is_tie_bar(char)) \ and (not 0xA700 <= ord(char) <= 0xA71F) return False
def function[is_diacritic, parameter[char, strict]]: constant[ Check whether the character is a diacritic (as opposed to a letter or a suprasegmental). In strict mode return True only if the diacritic is part of the IPA spec. ] if compare[name[char] in name[chart].diacritics] begin[:] return[constant[True]] if <ast.UnaryOp object at 0x7da1b15f2c80> begin[:] return[<ast.BoolOp object at 0x7da1b15f0b50>] return[constant[False]]
keyword[def] identifier[is_diacritic] ( identifier[char] , identifier[strict] = keyword[True] ): literal[string] keyword[if] identifier[char] keyword[in] identifier[chart] . identifier[diacritics] : keyword[return] keyword[True] keyword[if] keyword[not] identifier[strict] : keyword[return] ( identifier[unicodedata] . identifier[category] ( identifier[char] ) keyword[in] [ literal[string] , literal[string] , literal[string] ]) keyword[and] ( keyword[not] identifier[is_suprasegmental] ( identifier[char] )) keyword[and] ( keyword[not] identifier[is_tie_bar] ( identifier[char] )) keyword[and] ( keyword[not] literal[int] <= identifier[ord] ( identifier[char] )<= literal[int] ) keyword[return] keyword[False]
def is_diacritic(char, strict=True): """ Check whether the character is a diacritic (as opposed to a letter or a suprasegmental). In strict mode return True only if the diacritic is part of the IPA spec. """ if char in chart.diacritics: return True # depends on [control=['if'], data=[]] if not strict: return unicodedata.category(char) in ['Lm', 'Mn', 'Sk'] and (not is_suprasegmental(char)) and (not is_tie_bar(char)) and (not 42752 <= ord(char) <= 42783) # depends on [control=['if'], data=[]] return False
def _parse_blocks(instream): """Parse an alignment block from the given file handle. Block looks like: [0_(1)=fa2cma(8){go=10000,gx=2000,pn=1000.0,lf=0,rf=0}: (209)***********************************************... ... sequences, numbered 1-8 ... _0]. """ ilines = sugar.unblank(instream) for line in ilines: if line.startswith('['): # Start of block level, one, name, seqcount, params = _parse_block_header(line) qlen, qchars = _parse_block_postheader(next(ilines)) # Pass control to the sequence parser sequences = list(_parse_sequences(ilines, qlen)) # Validation if not len(sequences) == seqcount: logging.warn("Expected %d sequences in block %s, found %d", seqcount, name, len(sequences)) yield {'level': level, 'one': one, 'name': name, # 'seqcount': seqcount, 'params': params, 'query_length': qlen, 'query_chars': qchars, 'sequences': sequences, }
def function[_parse_blocks, parameter[instream]]: constant[Parse an alignment block from the given file handle. Block looks like: [0_(1)=fa2cma(8){go=10000,gx=2000,pn=1000.0,lf=0,rf=0}: (209)***********************************************... ... sequences, numbered 1-8 ... _0]. ] variable[ilines] assign[=] call[name[sugar].unblank, parameter[name[instream]]] for taget[name[line]] in starred[name[ilines]] begin[:] if call[name[line].startswith, parameter[constant[[]]] begin[:] <ast.Tuple object at 0x7da20e956c20> assign[=] call[name[_parse_block_header], parameter[name[line]]] <ast.Tuple object at 0x7da20e954490> assign[=] call[name[_parse_block_postheader], parameter[call[name[next], parameter[name[ilines]]]]] variable[sequences] assign[=] call[name[list], parameter[call[name[_parse_sequences], parameter[name[ilines], name[qlen]]]]] if <ast.UnaryOp object at 0x7da20e955c90> begin[:] call[name[logging].warn, parameter[constant[Expected %d sequences in block %s, found %d], name[seqcount], name[name], call[name[len], parameter[name[sequences]]]]] <ast.Yield object at 0x7da20e957490>
keyword[def] identifier[_parse_blocks] ( identifier[instream] ): literal[string] identifier[ilines] = identifier[sugar] . identifier[unblank] ( identifier[instream] ) keyword[for] identifier[line] keyword[in] identifier[ilines] : keyword[if] identifier[line] . identifier[startswith] ( literal[string] ): identifier[level] , identifier[one] , identifier[name] , identifier[seqcount] , identifier[params] = identifier[_parse_block_header] ( identifier[line] ) identifier[qlen] , identifier[qchars] = identifier[_parse_block_postheader] ( identifier[next] ( identifier[ilines] )) identifier[sequences] = identifier[list] ( identifier[_parse_sequences] ( identifier[ilines] , identifier[qlen] )) keyword[if] keyword[not] identifier[len] ( identifier[sequences] )== identifier[seqcount] : identifier[logging] . identifier[warn] ( literal[string] , identifier[seqcount] , identifier[name] , identifier[len] ( identifier[sequences] )) keyword[yield] { literal[string] : identifier[level] , literal[string] : identifier[one] , literal[string] : identifier[name] , literal[string] : identifier[params] , literal[string] : identifier[qlen] , literal[string] : identifier[qchars] , literal[string] : identifier[sequences] , }
def _parse_blocks(instream): """Parse an alignment block from the given file handle. Block looks like: [0_(1)=fa2cma(8){go=10000,gx=2000,pn=1000.0,lf=0,rf=0}: (209)***********************************************... ... sequences, numbered 1-8 ... _0]. """ ilines = sugar.unblank(instream) for line in ilines: if line.startswith('['): # Start of block (level, one, name, seqcount, params) = _parse_block_header(line) (qlen, qchars) = _parse_block_postheader(next(ilines)) # Pass control to the sequence parser sequences = list(_parse_sequences(ilines, qlen)) # Validation if not len(sequences) == seqcount: logging.warn('Expected %d sequences in block %s, found %d', seqcount, name, len(sequences)) # depends on [control=['if'], data=[]] # 'seqcount': seqcount, yield {'level': level, 'one': one, 'name': name, 'params': params, 'query_length': qlen, 'query_chars': qchars, 'sequences': sequences} # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line']]
async def on_step(self, iteration): self.combinedActions = [] """ - depots when low on remaining supply - townhalls contains commandcenter and orbitalcommand - self.units(TYPE).not_ready.amount selects all units of that type, filters incomplete units, and then counts the amount - self.already_pending(TYPE) counts how many units are queued - but in this bot below you will find a slightly different already_pending function which only counts units queued (but not in construction) """ if self.supply_left < 5 and self.townhalls.exists and self.supply_used >= 14 and self.can_afford(UnitTypeId.SUPPLYDEPOT) and self.units(UnitTypeId.SUPPLYDEPOT).not_ready.amount + self.already_pending(UnitTypeId.SUPPLYDEPOT) < 1: ws = self.workers.gathering if ws: # if workers found w = ws.furthest_to(ws.center) loc = await self.find_placement(UnitTypeId.SUPPLYDEPOT, w.position, placement_step=3) if loc: # if a placement location was found # build exactly on that location self.combinedActions.append(w.build(UnitTypeId.SUPPLYDEPOT, loc)) # lower all depots when finished for depot in self.units(UnitTypeId.SUPPLYDEPOT).ready: self.combinedActions.append(depot(AbilityId.MORPH_SUPPLYDEPOT_LOWER)) # morph commandcenter to orbitalcommand if self.units(UnitTypeId.BARRACKS).ready.exists and self.can_afford(UnitTypeId.ORBITALCOMMAND): # check if orbital is affordable for cc in self.units(UnitTypeId.COMMANDCENTER).idle: # .idle filters idle command centers self.combinedActions.append(cc(AbilityId.UPGRADETOORBITAL_ORBITALCOMMAND)) # expand if we can afford and have less than 2 bases if 1 <= self.townhalls.amount < 2 and self.already_pending(UnitTypeId.COMMANDCENTER) == 0 and self.can_afford(UnitTypeId.COMMANDCENTER): # get_next_expansion returns the center of the mineral fields of the next nearby expansion next_expo = await self.get_next_expansion() # from the center of mineral fields, we need to find a valid place to place the command center location = await self.find_placement(UnitTypeId.COMMANDCENTER, next_expo, placement_step=1) if location: # now we "select" (or choose) the nearest worker to that found location w = self.select_build_worker(location) if w and self.can_afford(UnitTypeId.COMMANDCENTER): # the worker will be commanded to build the command center error = await self.do(w.build(UnitTypeId.COMMANDCENTER, location)) if error: print(error) # make up to 4 barracks if we can afford them # check if we have a supply depot (tech requirement) before trying to make barracks if self.units.of_type([UnitTypeId.SUPPLYDEPOT, UnitTypeId.SUPPLYDEPOTLOWERED, UnitTypeId.SUPPLYDEPOTDROP]).ready.exists and self.units(UnitTypeId.BARRACKS).amount + self.already_pending(UnitTypeId.BARRACKS) < 4 and self.can_afford(UnitTypeId.BARRACKS): ws = self.workers.gathering if ws and self.townhalls.exists: # need to check if townhalls.amount > 0 because placement is based on townhall location w = ws.furthest_to(ws.center) # I chose placement_step 4 here so there will be gaps between barracks hopefully loc = await self.find_placement(UnitTypeId.BARRACKS, self.townhalls.random.position, placement_step=4) if loc: self.combinedActions.append(w.build(UnitTypeId.BARRACKS, loc)) # build refineries (on nearby vespene) when at least one barracks is in construction if self.units(UnitTypeId.BARRACKS).amount > 0 and self.already_pending(UnitTypeId.REFINERY) < 1: for th in self.townhalls: vgs = self.state.vespene_geyser.closer_than(10, th) for vg in vgs: if await self.can_place(UnitTypeId.REFINERY, vg.position) and self.can_afford(UnitTypeId.REFINERY): ws = self.workers.gathering if ws.exists: # same condition as above w = ws.closest_to(vg) # caution: the target for the refinery has to be the vespene geyser, not its position! self.combinedActions.append(w.build(UnitTypeId.REFINERY, vg)) # make scvs until 18, usually you only need 1:1 mineral:gas ratio for reapers, but if you don't lose any then you will need additional depots (mule income should take care of that) # stop scv production when barracks is complete but we still have a command cender (priotize morphing to orbital command) if self.can_afford(UnitTypeId.SCV) and self.supply_left > 0 and self.units(UnitTypeId.SCV).amount < 18 and (self.units(UnitTypeId.BARRACKS).ready.amount < 1 and self.units(UnitTypeId.COMMANDCENTER).idle.exists or self.units(UnitTypeId.ORBITALCOMMAND).idle.exists): for th in self.townhalls.idle: self.combinedActions.append(th.train(UnitTypeId.SCV)) # make reapers if we can afford them and we have supply remaining if self.can_afford(UnitTypeId.REAPER) and self.supply_left > 0: # loop through all idle barracks for rax in self.units(UnitTypeId.BARRACKS).idle: self.combinedActions.append(rax.train(UnitTypeId.REAPER)) # send workers to mine from gas if iteration % 25 == 0: await self.distribute_workers() # reaper micro for r in self.units(UnitTypeId.REAPER): # move to range 15 of closest unit if reaper is below 20 hp and not regenerating enemyThreatsClose = self.known_enemy_units.filter(lambda x: x.can_attack_ground).closer_than(15, r) # threats that can attack the reaper if r.health_percentage < 2/5 and enemyThreatsClose.exists: retreatPoints = self.neighbors8(r.position, distance=2) | self.neighbors8(r.position, distance=4) # filter points that are pathable retreatPoints = {x for x in retreatPoints if self.inPathingGrid(x)} if retreatPoints: closestEnemy = enemyThreatsClose.closest_to(r) retreatPoint = closestEnemy.position.furthest(retreatPoints) self.combinedActions.append(r.move(retreatPoint)) continue # continue for loop, dont execute any of the following # reaper is ready to attack, shoot nearest ground unit enemyGroundUnits = self.known_enemy_units.not_flying.closer_than(5, r) # hardcoded attackrange of 5 if r.weapon_cooldown == 0 and enemyGroundUnits.exists: enemyGroundUnits = enemyGroundUnits.sorted(lambda x: x.distance_to(r)) closestEnemy = enemyGroundUnits[0] self.combinedActions.append(r.attack(closestEnemy)) continue # continue for loop, dont execute any of the following # attack is on cooldown, check if grenade is on cooldown, if not then throw it to furthest enemy in range 5 reaperGrenadeRange = self._game_data.abilities[AbilityId.KD8CHARGE_KD8CHARGE.value]._proto.cast_range enemyGroundUnitsInGrenadeRange = self.known_enemy_units.not_structure.not_flying.exclude_type([UnitTypeId.LARVA, UnitTypeId.EGG]).closer_than(reaperGrenadeRange, r) if enemyGroundUnitsInGrenadeRange.exists and (r.is_attacking or r.is_moving): # if AbilityId.KD8CHARGE_KD8CHARGE in abilities, we check that to see if the reaper grenade is off cooldown abilities = (await self.get_available_abilities(r)) enemyGroundUnitsInGrenadeRange = enemyGroundUnitsInGrenadeRange.sorted(lambda x: x.distance_to(r), reverse=True) furthestEnemy = None for enemy in enemyGroundUnitsInGrenadeRange: if await self.can_cast(r, AbilityId.KD8CHARGE_KD8CHARGE, enemy, cached_abilities_of_unit=abilities): furthestEnemy = enemy break if furthestEnemy: self.combinedActions.append(r(AbilityId.KD8CHARGE_KD8CHARGE, furthestEnemy)) continue # continue for loop, don't execute any of the following # move towards to max unit range if enemy is closer than 4 enemyThreatsVeryClose = self.known_enemy_units.filter(lambda x: x.can_attack_ground).closer_than(4.5, r) # hardcoded attackrange minus 0.5 # threats that can attack the reaper if r.weapon_cooldown != 0 and enemyThreatsVeryClose.exists: retreatPoints = self.neighbors8(r.position, distance=2) | self.neighbors8(r.position, distance=4) # filter points that are pathable by a reaper retreatPoints = {x for x in retreatPoints if self.inPathingGrid(x)} if retreatPoints: closestEnemy = enemyThreatsVeryClose.closest_to(r) retreatPoint = max(retreatPoints, key=lambda x: x.distance_to(closestEnemy) - x.distance_to(r)) # retreatPoint = closestEnemy.position.furthest(retreatPoints) self.combinedActions.append(r.move(retreatPoint)) continue # continue for loop, don't execute any of the following # move to nearest enemy ground unit/building because no enemy unit is closer than 5 allEnemyGroundUnits = self.known_enemy_units.not_flying if allEnemyGroundUnits.exists: closestEnemy = allEnemyGroundUnits.closest_to(r) self.combinedActions.append(r.move(closestEnemy)) continue # continue for loop, don't execute any of the following # move to random enemy start location if no enemy buildings have been seen self.combinedActions.append(r.move(random.choice(self.enemy_start_locations))) # manage idle scvs, would be taken care by distribute workers aswell if self.townhalls.exists: for w in self.workers.idle: th = self.townhalls.closest_to(w) mfs = self.state.mineral_field.closer_than(10, th) if mfs: mf = mfs.closest_to(w) self.combinedActions.append(w.gather(mf)) # manage orbital energy and drop mules for oc in self.units(UnitTypeId.ORBITALCOMMAND).filter(lambda x: x.energy >= 50): mfs = self.state.mineral_field.closer_than(10, oc) if mfs: mf = max(mfs, key=lambda x:x.mineral_contents) self.combinedActions.append(oc(AbilityId.CALLDOWNMULE_CALLDOWNMULE, mf)) # when running out of mineral fields near command center, fly to next base with minerals # execuite actions await self.do_actions(self.combinedActions)
<ast.AsyncFunctionDef object at 0x7da204567e50>
keyword[async] keyword[def] identifier[on_step] ( identifier[self] , identifier[iteration] ): identifier[self] . identifier[combinedActions] =[] literal[string] keyword[if] identifier[self] . identifier[supply_left] < literal[int] keyword[and] identifier[self] . identifier[townhalls] . identifier[exists] keyword[and] identifier[self] . identifier[supply_used] >= literal[int] keyword[and] identifier[self] . identifier[can_afford] ( identifier[UnitTypeId] . identifier[SUPPLYDEPOT] ) keyword[and] identifier[self] . identifier[units] ( identifier[UnitTypeId] . identifier[SUPPLYDEPOT] ). identifier[not_ready] . identifier[amount] + identifier[self] . identifier[already_pending] ( identifier[UnitTypeId] . identifier[SUPPLYDEPOT] )< literal[int] : identifier[ws] = identifier[self] . identifier[workers] . identifier[gathering] keyword[if] identifier[ws] : identifier[w] = identifier[ws] . identifier[furthest_to] ( identifier[ws] . identifier[center] ) identifier[loc] = keyword[await] identifier[self] . identifier[find_placement] ( identifier[UnitTypeId] . identifier[SUPPLYDEPOT] , identifier[w] . identifier[position] , identifier[placement_step] = literal[int] ) keyword[if] identifier[loc] : identifier[self] . identifier[combinedActions] . identifier[append] ( identifier[w] . identifier[build] ( identifier[UnitTypeId] . identifier[SUPPLYDEPOT] , identifier[loc] )) keyword[for] identifier[depot] keyword[in] identifier[self] . identifier[units] ( identifier[UnitTypeId] . identifier[SUPPLYDEPOT] ). identifier[ready] : identifier[self] . identifier[combinedActions] . identifier[append] ( identifier[depot] ( identifier[AbilityId] . identifier[MORPH_SUPPLYDEPOT_LOWER] )) keyword[if] identifier[self] . identifier[units] ( identifier[UnitTypeId] . identifier[BARRACKS] ). identifier[ready] . identifier[exists] keyword[and] identifier[self] . identifier[can_afford] ( identifier[UnitTypeId] . identifier[ORBITALCOMMAND] ): keyword[for] identifier[cc] keyword[in] identifier[self] . identifier[units] ( identifier[UnitTypeId] . identifier[COMMANDCENTER] ). identifier[idle] : identifier[self] . identifier[combinedActions] . identifier[append] ( identifier[cc] ( identifier[AbilityId] . identifier[UPGRADETOORBITAL_ORBITALCOMMAND] )) keyword[if] literal[int] <= identifier[self] . identifier[townhalls] . identifier[amount] < literal[int] keyword[and] identifier[self] . identifier[already_pending] ( identifier[UnitTypeId] . identifier[COMMANDCENTER] )== literal[int] keyword[and] identifier[self] . identifier[can_afford] ( identifier[UnitTypeId] . identifier[COMMANDCENTER] ): identifier[next_expo] = keyword[await] identifier[self] . identifier[get_next_expansion] () identifier[location] = keyword[await] identifier[self] . identifier[find_placement] ( identifier[UnitTypeId] . identifier[COMMANDCENTER] , identifier[next_expo] , identifier[placement_step] = literal[int] ) keyword[if] identifier[location] : identifier[w] = identifier[self] . identifier[select_build_worker] ( identifier[location] ) keyword[if] identifier[w] keyword[and] identifier[self] . identifier[can_afford] ( identifier[UnitTypeId] . identifier[COMMANDCENTER] ): identifier[error] = keyword[await] identifier[self] . identifier[do] ( identifier[w] . identifier[build] ( identifier[UnitTypeId] . identifier[COMMANDCENTER] , identifier[location] )) keyword[if] identifier[error] : identifier[print] ( identifier[error] ) keyword[if] identifier[self] . identifier[units] . identifier[of_type] ([ identifier[UnitTypeId] . identifier[SUPPLYDEPOT] , identifier[UnitTypeId] . identifier[SUPPLYDEPOTLOWERED] , identifier[UnitTypeId] . identifier[SUPPLYDEPOTDROP] ]). identifier[ready] . identifier[exists] keyword[and] identifier[self] . identifier[units] ( identifier[UnitTypeId] . identifier[BARRACKS] ). identifier[amount] + identifier[self] . identifier[already_pending] ( identifier[UnitTypeId] . identifier[BARRACKS] )< literal[int] keyword[and] identifier[self] . identifier[can_afford] ( identifier[UnitTypeId] . identifier[BARRACKS] ): identifier[ws] = identifier[self] . identifier[workers] . identifier[gathering] keyword[if] identifier[ws] keyword[and] identifier[self] . identifier[townhalls] . identifier[exists] : identifier[w] = identifier[ws] . identifier[furthest_to] ( identifier[ws] . identifier[center] ) identifier[loc] = keyword[await] identifier[self] . identifier[find_placement] ( identifier[UnitTypeId] . identifier[BARRACKS] , identifier[self] . identifier[townhalls] . identifier[random] . identifier[position] , identifier[placement_step] = literal[int] ) keyword[if] identifier[loc] : identifier[self] . identifier[combinedActions] . identifier[append] ( identifier[w] . identifier[build] ( identifier[UnitTypeId] . identifier[BARRACKS] , identifier[loc] )) keyword[if] identifier[self] . identifier[units] ( identifier[UnitTypeId] . identifier[BARRACKS] ). identifier[amount] > literal[int] keyword[and] identifier[self] . identifier[already_pending] ( identifier[UnitTypeId] . identifier[REFINERY] )< literal[int] : keyword[for] identifier[th] keyword[in] identifier[self] . identifier[townhalls] : identifier[vgs] = identifier[self] . identifier[state] . identifier[vespene_geyser] . identifier[closer_than] ( literal[int] , identifier[th] ) keyword[for] identifier[vg] keyword[in] identifier[vgs] : keyword[if] keyword[await] identifier[self] . identifier[can_place] ( identifier[UnitTypeId] . identifier[REFINERY] , identifier[vg] . identifier[position] ) keyword[and] identifier[self] . identifier[can_afford] ( identifier[UnitTypeId] . identifier[REFINERY] ): identifier[ws] = identifier[self] . identifier[workers] . identifier[gathering] keyword[if] identifier[ws] . identifier[exists] : identifier[w] = identifier[ws] . identifier[closest_to] ( identifier[vg] ) identifier[self] . identifier[combinedActions] . identifier[append] ( identifier[w] . identifier[build] ( identifier[UnitTypeId] . identifier[REFINERY] , identifier[vg] )) keyword[if] identifier[self] . identifier[can_afford] ( identifier[UnitTypeId] . identifier[SCV] ) keyword[and] identifier[self] . identifier[supply_left] > literal[int] keyword[and] identifier[self] . identifier[units] ( identifier[UnitTypeId] . identifier[SCV] ). identifier[amount] < literal[int] keyword[and] ( identifier[self] . identifier[units] ( identifier[UnitTypeId] . identifier[BARRACKS] ). identifier[ready] . identifier[amount] < literal[int] keyword[and] identifier[self] . identifier[units] ( identifier[UnitTypeId] . identifier[COMMANDCENTER] ). identifier[idle] . identifier[exists] keyword[or] identifier[self] . identifier[units] ( identifier[UnitTypeId] . identifier[ORBITALCOMMAND] ). identifier[idle] . identifier[exists] ): keyword[for] identifier[th] keyword[in] identifier[self] . identifier[townhalls] . identifier[idle] : identifier[self] . identifier[combinedActions] . identifier[append] ( identifier[th] . identifier[train] ( identifier[UnitTypeId] . identifier[SCV] )) keyword[if] identifier[self] . identifier[can_afford] ( identifier[UnitTypeId] . identifier[REAPER] ) keyword[and] identifier[self] . identifier[supply_left] > literal[int] : keyword[for] identifier[rax] keyword[in] identifier[self] . identifier[units] ( identifier[UnitTypeId] . identifier[BARRACKS] ). identifier[idle] : identifier[self] . identifier[combinedActions] . identifier[append] ( identifier[rax] . identifier[train] ( identifier[UnitTypeId] . identifier[REAPER] )) keyword[if] identifier[iteration] % literal[int] == literal[int] : keyword[await] identifier[self] . identifier[distribute_workers] () keyword[for] identifier[r] keyword[in] identifier[self] . identifier[units] ( identifier[UnitTypeId] . identifier[REAPER] ): identifier[enemyThreatsClose] = identifier[self] . identifier[known_enemy_units] . identifier[filter] ( keyword[lambda] identifier[x] : identifier[x] . identifier[can_attack_ground] ). identifier[closer_than] ( literal[int] , identifier[r] ) keyword[if] identifier[r] . identifier[health_percentage] < literal[int] / literal[int] keyword[and] identifier[enemyThreatsClose] . identifier[exists] : identifier[retreatPoints] = identifier[self] . identifier[neighbors8] ( identifier[r] . identifier[position] , identifier[distance] = literal[int] )| identifier[self] . identifier[neighbors8] ( identifier[r] . identifier[position] , identifier[distance] = literal[int] ) identifier[retreatPoints] ={ identifier[x] keyword[for] identifier[x] keyword[in] identifier[retreatPoints] keyword[if] identifier[self] . identifier[inPathingGrid] ( identifier[x] )} keyword[if] identifier[retreatPoints] : identifier[closestEnemy] = identifier[enemyThreatsClose] . identifier[closest_to] ( identifier[r] ) identifier[retreatPoint] = identifier[closestEnemy] . identifier[position] . identifier[furthest] ( identifier[retreatPoints] ) identifier[self] . identifier[combinedActions] . identifier[append] ( identifier[r] . identifier[move] ( identifier[retreatPoint] )) keyword[continue] identifier[enemyGroundUnits] = identifier[self] . identifier[known_enemy_units] . identifier[not_flying] . identifier[closer_than] ( literal[int] , identifier[r] ) keyword[if] identifier[r] . identifier[weapon_cooldown] == literal[int] keyword[and] identifier[enemyGroundUnits] . identifier[exists] : identifier[enemyGroundUnits] = identifier[enemyGroundUnits] . identifier[sorted] ( keyword[lambda] identifier[x] : identifier[x] . identifier[distance_to] ( identifier[r] )) identifier[closestEnemy] = identifier[enemyGroundUnits] [ literal[int] ] identifier[self] . identifier[combinedActions] . identifier[append] ( identifier[r] . identifier[attack] ( identifier[closestEnemy] )) keyword[continue] identifier[reaperGrenadeRange] = identifier[self] . identifier[_game_data] . identifier[abilities] [ identifier[AbilityId] . identifier[KD8CHARGE_KD8CHARGE] . identifier[value] ]. identifier[_proto] . identifier[cast_range] identifier[enemyGroundUnitsInGrenadeRange] = identifier[self] . identifier[known_enemy_units] . identifier[not_structure] . identifier[not_flying] . identifier[exclude_type] ([ identifier[UnitTypeId] . identifier[LARVA] , identifier[UnitTypeId] . identifier[EGG] ]). identifier[closer_than] ( identifier[reaperGrenadeRange] , identifier[r] ) keyword[if] identifier[enemyGroundUnitsInGrenadeRange] . identifier[exists] keyword[and] ( identifier[r] . identifier[is_attacking] keyword[or] identifier[r] . identifier[is_moving] ): identifier[abilities] =( keyword[await] identifier[self] . identifier[get_available_abilities] ( identifier[r] )) identifier[enemyGroundUnitsInGrenadeRange] = identifier[enemyGroundUnitsInGrenadeRange] . identifier[sorted] ( keyword[lambda] identifier[x] : identifier[x] . identifier[distance_to] ( identifier[r] ), identifier[reverse] = keyword[True] ) identifier[furthestEnemy] = keyword[None] keyword[for] identifier[enemy] keyword[in] identifier[enemyGroundUnitsInGrenadeRange] : keyword[if] keyword[await] identifier[self] . identifier[can_cast] ( identifier[r] , identifier[AbilityId] . identifier[KD8CHARGE_KD8CHARGE] , identifier[enemy] , identifier[cached_abilities_of_unit] = identifier[abilities] ): identifier[furthestEnemy] = identifier[enemy] keyword[break] keyword[if] identifier[furthestEnemy] : identifier[self] . identifier[combinedActions] . identifier[append] ( identifier[r] ( identifier[AbilityId] . identifier[KD8CHARGE_KD8CHARGE] , identifier[furthestEnemy] )) keyword[continue] identifier[enemyThreatsVeryClose] = identifier[self] . identifier[known_enemy_units] . identifier[filter] ( keyword[lambda] identifier[x] : identifier[x] . identifier[can_attack_ground] ). identifier[closer_than] ( literal[int] , identifier[r] ) keyword[if] identifier[r] . identifier[weapon_cooldown] != literal[int] keyword[and] identifier[enemyThreatsVeryClose] . identifier[exists] : identifier[retreatPoints] = identifier[self] . identifier[neighbors8] ( identifier[r] . identifier[position] , identifier[distance] = literal[int] )| identifier[self] . identifier[neighbors8] ( identifier[r] . identifier[position] , identifier[distance] = literal[int] ) identifier[retreatPoints] ={ identifier[x] keyword[for] identifier[x] keyword[in] identifier[retreatPoints] keyword[if] identifier[self] . identifier[inPathingGrid] ( identifier[x] )} keyword[if] identifier[retreatPoints] : identifier[closestEnemy] = identifier[enemyThreatsVeryClose] . identifier[closest_to] ( identifier[r] ) identifier[retreatPoint] = identifier[max] ( identifier[retreatPoints] , identifier[key] = keyword[lambda] identifier[x] : identifier[x] . identifier[distance_to] ( identifier[closestEnemy] )- identifier[x] . identifier[distance_to] ( identifier[r] )) identifier[self] . identifier[combinedActions] . identifier[append] ( identifier[r] . identifier[move] ( identifier[retreatPoint] )) keyword[continue] identifier[allEnemyGroundUnits] = identifier[self] . identifier[known_enemy_units] . identifier[not_flying] keyword[if] identifier[allEnemyGroundUnits] . identifier[exists] : identifier[closestEnemy] = identifier[allEnemyGroundUnits] . identifier[closest_to] ( identifier[r] ) identifier[self] . identifier[combinedActions] . identifier[append] ( identifier[r] . identifier[move] ( identifier[closestEnemy] )) keyword[continue] identifier[self] . identifier[combinedActions] . identifier[append] ( identifier[r] . identifier[move] ( identifier[random] . identifier[choice] ( identifier[self] . identifier[enemy_start_locations] ))) keyword[if] identifier[self] . identifier[townhalls] . identifier[exists] : keyword[for] identifier[w] keyword[in] identifier[self] . identifier[workers] . identifier[idle] : identifier[th] = identifier[self] . identifier[townhalls] . identifier[closest_to] ( identifier[w] ) identifier[mfs] = identifier[self] . identifier[state] . identifier[mineral_field] . identifier[closer_than] ( literal[int] , identifier[th] ) keyword[if] identifier[mfs] : identifier[mf] = identifier[mfs] . identifier[closest_to] ( identifier[w] ) identifier[self] . identifier[combinedActions] . identifier[append] ( identifier[w] . identifier[gather] ( identifier[mf] )) keyword[for] identifier[oc] keyword[in] identifier[self] . identifier[units] ( identifier[UnitTypeId] . identifier[ORBITALCOMMAND] ). identifier[filter] ( keyword[lambda] identifier[x] : identifier[x] . identifier[energy] >= literal[int] ): identifier[mfs] = identifier[self] . identifier[state] . identifier[mineral_field] . identifier[closer_than] ( literal[int] , identifier[oc] ) keyword[if] identifier[mfs] : identifier[mf] = identifier[max] ( identifier[mfs] , identifier[key] = keyword[lambda] identifier[x] : identifier[x] . identifier[mineral_contents] ) identifier[self] . identifier[combinedActions] . identifier[append] ( identifier[oc] ( identifier[AbilityId] . identifier[CALLDOWNMULE_CALLDOWNMULE] , identifier[mf] )) keyword[await] identifier[self] . identifier[do_actions] ( identifier[self] . identifier[combinedActions] )
async def on_step(self, iteration): self.combinedActions = [] '\n - depots when low on remaining supply\n - townhalls contains commandcenter and orbitalcommand\n - self.units(TYPE).not_ready.amount selects all units of that type, filters incomplete units, and then counts the amount\n - self.already_pending(TYPE) counts how many units are queued - but in this bot below you will find a slightly different already_pending function which only counts units queued (but not in construction)\n ' if self.supply_left < 5 and self.townhalls.exists and (self.supply_used >= 14) and self.can_afford(UnitTypeId.SUPPLYDEPOT) and (self.units(UnitTypeId.SUPPLYDEPOT).not_ready.amount + self.already_pending(UnitTypeId.SUPPLYDEPOT) < 1): ws = self.workers.gathering if ws: # if workers found w = ws.furthest_to(ws.center) loc = await self.find_placement(UnitTypeId.SUPPLYDEPOT, w.position, placement_step=3) if loc: # if a placement location was found # build exactly on that location self.combinedActions.append(w.build(UnitTypeId.SUPPLYDEPOT, loc)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # lower all depots when finished for depot in self.units(UnitTypeId.SUPPLYDEPOT).ready: self.combinedActions.append(depot(AbilityId.MORPH_SUPPLYDEPOT_LOWER)) # depends on [control=['for'], data=['depot']] # morph commandcenter to orbitalcommand if self.units(UnitTypeId.BARRACKS).ready.exists and self.can_afford(UnitTypeId.ORBITALCOMMAND): # check if orbital is affordable for cc in self.units(UnitTypeId.COMMANDCENTER).idle: # .idle filters idle command centers self.combinedActions.append(cc(AbilityId.UPGRADETOORBITAL_ORBITALCOMMAND)) # depends on [control=['for'], data=['cc']] # depends on [control=['if'], data=[]] # expand if we can afford and have less than 2 bases if 1 <= self.townhalls.amount < 2 and self.already_pending(UnitTypeId.COMMANDCENTER) == 0 and self.can_afford(UnitTypeId.COMMANDCENTER): # get_next_expansion returns the center of the mineral fields of the next nearby expansion next_expo = await self.get_next_expansion() # from the center of mineral fields, we need to find a valid place to place the command center location = await self.find_placement(UnitTypeId.COMMANDCENTER, next_expo, placement_step=1) if location: # now we "select" (or choose) the nearest worker to that found location w = self.select_build_worker(location) if w and self.can_afford(UnitTypeId.COMMANDCENTER): # the worker will be commanded to build the command center error = await self.do(w.build(UnitTypeId.COMMANDCENTER, location)) if error: print(error) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # make up to 4 barracks if we can afford them # check if we have a supply depot (tech requirement) before trying to make barracks if self.units.of_type([UnitTypeId.SUPPLYDEPOT, UnitTypeId.SUPPLYDEPOTLOWERED, UnitTypeId.SUPPLYDEPOTDROP]).ready.exists and self.units(UnitTypeId.BARRACKS).amount + self.already_pending(UnitTypeId.BARRACKS) < 4 and self.can_afford(UnitTypeId.BARRACKS): ws = self.workers.gathering if ws and self.townhalls.exists: # need to check if townhalls.amount > 0 because placement is based on townhall location w = ws.furthest_to(ws.center) # I chose placement_step 4 here so there will be gaps between barracks hopefully loc = await self.find_placement(UnitTypeId.BARRACKS, self.townhalls.random.position, placement_step=4) if loc: self.combinedActions.append(w.build(UnitTypeId.BARRACKS, loc)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # build refineries (on nearby vespene) when at least one barracks is in construction if self.units(UnitTypeId.BARRACKS).amount > 0 and self.already_pending(UnitTypeId.REFINERY) < 1: for th in self.townhalls: vgs = self.state.vespene_geyser.closer_than(10, th) for vg in vgs: if await self.can_place(UnitTypeId.REFINERY, vg.position) and self.can_afford(UnitTypeId.REFINERY): ws = self.workers.gathering if ws.exists: # same condition as above w = ws.closest_to(vg) # caution: the target for the refinery has to be the vespene geyser, not its position! self.combinedActions.append(w.build(UnitTypeId.REFINERY, vg)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['vg']] # depends on [control=['for'], data=['th']] # depends on [control=['if'], data=[]] # make scvs until 18, usually you only need 1:1 mineral:gas ratio for reapers, but if you don't lose any then you will need additional depots (mule income should take care of that) # stop scv production when barracks is complete but we still have a command cender (priotize morphing to orbital command) if self.can_afford(UnitTypeId.SCV) and self.supply_left > 0 and (self.units(UnitTypeId.SCV).amount < 18) and (self.units(UnitTypeId.BARRACKS).ready.amount < 1 and self.units(UnitTypeId.COMMANDCENTER).idle.exists or self.units(UnitTypeId.ORBITALCOMMAND).idle.exists): for th in self.townhalls.idle: self.combinedActions.append(th.train(UnitTypeId.SCV)) # depends on [control=['for'], data=['th']] # depends on [control=['if'], data=[]] # make reapers if we can afford them and we have supply remaining if self.can_afford(UnitTypeId.REAPER) and self.supply_left > 0: # loop through all idle barracks for rax in self.units(UnitTypeId.BARRACKS).idle: self.combinedActions.append(rax.train(UnitTypeId.REAPER)) # depends on [control=['for'], data=['rax']] # depends on [control=['if'], data=[]] # send workers to mine from gas if iteration % 25 == 0: await self.distribute_workers() # depends on [control=['if'], data=[]] # reaper micro for r in self.units(UnitTypeId.REAPER): # move to range 15 of closest unit if reaper is below 20 hp and not regenerating enemyThreatsClose = self.known_enemy_units.filter(lambda x: x.can_attack_ground).closer_than(15, r) # threats that can attack the reaper if r.health_percentage < 2 / 5 and enemyThreatsClose.exists: retreatPoints = self.neighbors8(r.position, distance=2) | self.neighbors8(r.position, distance=4) # filter points that are pathable retreatPoints = {x for x in retreatPoints if self.inPathingGrid(x)} if retreatPoints: closestEnemy = enemyThreatsClose.closest_to(r) retreatPoint = closestEnemy.position.furthest(retreatPoints) self.combinedActions.append(r.move(retreatPoint)) continue # continue for loop, dont execute any of the following # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # reaper is ready to attack, shoot nearest ground unit enemyGroundUnits = self.known_enemy_units.not_flying.closer_than(5, r) # hardcoded attackrange of 5 if r.weapon_cooldown == 0 and enemyGroundUnits.exists: enemyGroundUnits = enemyGroundUnits.sorted(lambda x: x.distance_to(r)) closestEnemy = enemyGroundUnits[0] self.combinedActions.append(r.attack(closestEnemy)) continue # continue for loop, dont execute any of the following # depends on [control=['if'], data=[]] # attack is on cooldown, check if grenade is on cooldown, if not then throw it to furthest enemy in range 5 reaperGrenadeRange = self._game_data.abilities[AbilityId.KD8CHARGE_KD8CHARGE.value]._proto.cast_range enemyGroundUnitsInGrenadeRange = self.known_enemy_units.not_structure.not_flying.exclude_type([UnitTypeId.LARVA, UnitTypeId.EGG]).closer_than(reaperGrenadeRange, r) if enemyGroundUnitsInGrenadeRange.exists and (r.is_attacking or r.is_moving): # if AbilityId.KD8CHARGE_KD8CHARGE in abilities, we check that to see if the reaper grenade is off cooldown abilities = await self.get_available_abilities(r) enemyGroundUnitsInGrenadeRange = enemyGroundUnitsInGrenadeRange.sorted(lambda x: x.distance_to(r), reverse=True) furthestEnemy = None for enemy in enemyGroundUnitsInGrenadeRange: if await self.can_cast(r, AbilityId.KD8CHARGE_KD8CHARGE, enemy, cached_abilities_of_unit=abilities): furthestEnemy = enemy break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['enemy']] if furthestEnemy: self.combinedActions.append(r(AbilityId.KD8CHARGE_KD8CHARGE, furthestEnemy)) continue # continue for loop, don't execute any of the following # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # move towards to max unit range if enemy is closer than 4 enemyThreatsVeryClose = self.known_enemy_units.filter(lambda x: x.can_attack_ground).closer_than(4.5, r) # hardcoded attackrange minus 0.5 # threats that can attack the reaper if r.weapon_cooldown != 0 and enemyThreatsVeryClose.exists: retreatPoints = self.neighbors8(r.position, distance=2) | self.neighbors8(r.position, distance=4) # filter points that are pathable by a reaper retreatPoints = {x for x in retreatPoints if self.inPathingGrid(x)} if retreatPoints: closestEnemy = enemyThreatsVeryClose.closest_to(r) retreatPoint = max(retreatPoints, key=lambda x: x.distance_to(closestEnemy) - x.distance_to(r)) # retreatPoint = closestEnemy.position.furthest(retreatPoints) self.combinedActions.append(r.move(retreatPoint)) continue # continue for loop, don't execute any of the following # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # move to nearest enemy ground unit/building because no enemy unit is closer than 5 allEnemyGroundUnits = self.known_enemy_units.not_flying if allEnemyGroundUnits.exists: closestEnemy = allEnemyGroundUnits.closest_to(r) self.combinedActions.append(r.move(closestEnemy)) continue # continue for loop, don't execute any of the following # depends on [control=['if'], data=[]] # move to random enemy start location if no enemy buildings have been seen self.combinedActions.append(r.move(random.choice(self.enemy_start_locations))) # depends on [control=['for'], data=['r']] # manage idle scvs, would be taken care by distribute workers aswell if self.townhalls.exists: for w in self.workers.idle: th = self.townhalls.closest_to(w) mfs = self.state.mineral_field.closer_than(10, th) if mfs: mf = mfs.closest_to(w) self.combinedActions.append(w.gather(mf)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['w']] # depends on [control=['if'], data=[]] # manage orbital energy and drop mules for oc in self.units(UnitTypeId.ORBITALCOMMAND).filter(lambda x: x.energy >= 50): mfs = self.state.mineral_field.closer_than(10, oc) if mfs: mf = max(mfs, key=lambda x: x.mineral_contents) self.combinedActions.append(oc(AbilityId.CALLDOWNMULE_CALLDOWNMULE, mf)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['oc']] # when running out of mineral fields near command center, fly to next base with minerals # execuite actions await self.do_actions(self.combinedActions)
def move(self, d_xyz): """ Translate the Polyhedron in x, y and z coordinates. :param d_xyz: displacement in x, y(, and z). :type d_xyz: tuple (len=2 or 3) :returns: ``pyny.Polyhedron`` """ polygon = np.array([[0,0], [0,1], [1,1], [0,1]]) space = Space(Place(polygon, polyhedra=self)) return space.move(d_xyz, inplace=False)[0].polyhedra[0]
def function[move, parameter[self, d_xyz]]: constant[ Translate the Polyhedron in x, y and z coordinates. :param d_xyz: displacement in x, y(, and z). :type d_xyz: tuple (len=2 or 3) :returns: ``pyny.Polyhedron`` ] variable[polygon] assign[=] call[name[np].array, parameter[list[[<ast.List object at 0x7da1b24ac460>, <ast.List object at 0x7da1b24afe80>, <ast.List object at 0x7da1b24ac2e0>, <ast.List object at 0x7da1b24ae140>]]]] variable[space] assign[=] call[name[Space], parameter[call[name[Place], parameter[name[polygon]]]]] return[call[call[call[name[space].move, parameter[name[d_xyz]]]][constant[0]].polyhedra][constant[0]]]
keyword[def] identifier[move] ( identifier[self] , identifier[d_xyz] ): literal[string] identifier[polygon] = identifier[np] . identifier[array] ([[ literal[int] , literal[int] ],[ literal[int] , literal[int] ],[ literal[int] , literal[int] ],[ literal[int] , literal[int] ]]) identifier[space] = identifier[Space] ( identifier[Place] ( identifier[polygon] , identifier[polyhedra] = identifier[self] )) keyword[return] identifier[space] . identifier[move] ( identifier[d_xyz] , identifier[inplace] = keyword[False] )[ literal[int] ]. identifier[polyhedra] [ literal[int] ]
def move(self, d_xyz): """ Translate the Polyhedron in x, y and z coordinates. :param d_xyz: displacement in x, y(, and z). :type d_xyz: tuple (len=2 or 3) :returns: ``pyny.Polyhedron`` """ polygon = np.array([[0, 0], [0, 1], [1, 1], [0, 1]]) space = Space(Place(polygon, polyhedra=self)) return space.move(d_xyz, inplace=False)[0].polyhedra[0]
def _run_down(self, path, migration, pretend=False): """ Run "down" a migration instance. """ migration_file = migration["migration"] instance = self._resolve(path, migration_file) if pretend: return self._pretend_to_run(instance, "down") if instance.transactional: with instance.db.transaction(): instance.down() else: instance.down() self._repository.delete(migration) self._note( decode("[<info>OK</>] <info>Rolled back</info> ") + "<fg=cyan>%s</>" % migration_file )
def function[_run_down, parameter[self, path, migration, pretend]]: constant[ Run "down" a migration instance. ] variable[migration_file] assign[=] call[name[migration]][constant[migration]] variable[instance] assign[=] call[name[self]._resolve, parameter[name[path], name[migration_file]]] if name[pretend] begin[:] return[call[name[self]._pretend_to_run, parameter[name[instance], constant[down]]]] if name[instance].transactional begin[:] with call[name[instance].db.transaction, parameter[]] begin[:] call[name[instance].down, parameter[]] call[name[self]._repository.delete, parameter[name[migration]]] call[name[self]._note, parameter[binary_operation[call[name[decode], parameter[constant[[<info>OK</>] <info>Rolled back</info> ]]] + binary_operation[constant[<fg=cyan>%s</>] <ast.Mod object at 0x7da2590d6920> name[migration_file]]]]]
keyword[def] identifier[_run_down] ( identifier[self] , identifier[path] , identifier[migration] , identifier[pretend] = keyword[False] ): literal[string] identifier[migration_file] = identifier[migration] [ literal[string] ] identifier[instance] = identifier[self] . identifier[_resolve] ( identifier[path] , identifier[migration_file] ) keyword[if] identifier[pretend] : keyword[return] identifier[self] . identifier[_pretend_to_run] ( identifier[instance] , literal[string] ) keyword[if] identifier[instance] . identifier[transactional] : keyword[with] identifier[instance] . identifier[db] . identifier[transaction] (): identifier[instance] . identifier[down] () keyword[else] : identifier[instance] . identifier[down] () identifier[self] . identifier[_repository] . identifier[delete] ( identifier[migration] ) identifier[self] . identifier[_note] ( identifier[decode] ( literal[string] ) + literal[string] % identifier[migration_file] )
def _run_down(self, path, migration, pretend=False): """ Run "down" a migration instance. """ migration_file = migration['migration'] instance = self._resolve(path, migration_file) if pretend: return self._pretend_to_run(instance, 'down') # depends on [control=['if'], data=[]] if instance.transactional: with instance.db.transaction(): instance.down() # depends on [control=['with'], data=[]] # depends on [control=['if'], data=[]] else: instance.down() self._repository.delete(migration) self._note(decode('[<info>OK</>] <info>Rolled back</info> ') + '<fg=cyan>%s</>' % migration_file)
def start(self, players): """ Start the game. The value of option 'pregame' determines whether the pregame will occur or not. - Resets the board - Sets the players - Sets the game state to the appropriate first turn of the game - Finds the robber on the board, sets the robber_tile appropriately - Logs the catanlog header :param players: players to start the game with """ from .boardbuilder import Opt self.reset() if self.board.opts.get('players') == Opt.debug: players = Game.get_debug_players() self.set_players(players) if self.options.get('pregame') is None or self.options.get('pregame') == 'on': logging.debug('Entering pregame, game options={}'.format(self.options)) self.set_state(catan.states.GameStatePreGamePlacingPiece(self, catan.pieces.PieceType.settlement)) elif self.options.get('pregame') == 'off': logging.debug('Skipping pregame, game options={}'.format(self.options)) self.set_state(catan.states.GameStateBeginTurn(self)) terrain = list() numbers = list() for tile in self.board.tiles: terrain.append(tile.terrain) numbers.append(tile.number) for (_, coord), piece in self.board.pieces.items(): if piece.type == catan.pieces.PieceType.robber: self.robber_tile = hexgrid.tile_id_from_coord(coord) logging.debug('Found robber at coord={}, set robber_tile={}'.format(coord, self.robber_tile)) self.catanlog.log_game_start(self.players, terrain, numbers, self.board.ports) self.notify_observers()
def function[start, parameter[self, players]]: constant[ Start the game. The value of option 'pregame' determines whether the pregame will occur or not. - Resets the board - Sets the players - Sets the game state to the appropriate first turn of the game - Finds the robber on the board, sets the robber_tile appropriately - Logs the catanlog header :param players: players to start the game with ] from relative_module[boardbuilder] import module[Opt] call[name[self].reset, parameter[]] if compare[call[name[self].board.opts.get, parameter[constant[players]]] equal[==] name[Opt].debug] begin[:] variable[players] assign[=] call[name[Game].get_debug_players, parameter[]] call[name[self].set_players, parameter[name[players]]] if <ast.BoolOp object at 0x7da1b25c3c10> begin[:] call[name[logging].debug, parameter[call[constant[Entering pregame, game options={}].format, parameter[name[self].options]]]] call[name[self].set_state, parameter[call[name[catan].states.GameStatePreGamePlacingPiece, parameter[name[self], name[catan].pieces.PieceType.settlement]]]] variable[terrain] assign[=] call[name[list], parameter[]] variable[numbers] assign[=] call[name[list], parameter[]] for taget[name[tile]] in starred[name[self].board.tiles] begin[:] call[name[terrain].append, parameter[name[tile].terrain]] call[name[numbers].append, parameter[name[tile].number]] for taget[tuple[[<ast.Tuple object at 0x7da1b23334f0>, <ast.Name object at 0x7da1b2333400>]]] in starred[call[name[self].board.pieces.items, parameter[]]] begin[:] if compare[name[piece].type equal[==] name[catan].pieces.PieceType.robber] begin[:] name[self].robber_tile assign[=] call[name[hexgrid].tile_id_from_coord, parameter[name[coord]]] call[name[logging].debug, parameter[call[constant[Found robber at coord={}, set robber_tile={}].format, parameter[name[coord], name[self].robber_tile]]]] call[name[self].catanlog.log_game_start, parameter[name[self].players, name[terrain], name[numbers], name[self].board.ports]] call[name[self].notify_observers, parameter[]]
keyword[def] identifier[start] ( identifier[self] , identifier[players] ): literal[string] keyword[from] . identifier[boardbuilder] keyword[import] identifier[Opt] identifier[self] . identifier[reset] () keyword[if] identifier[self] . identifier[board] . identifier[opts] . identifier[get] ( literal[string] )== identifier[Opt] . identifier[debug] : identifier[players] = identifier[Game] . identifier[get_debug_players] () identifier[self] . identifier[set_players] ( identifier[players] ) keyword[if] identifier[self] . identifier[options] . identifier[get] ( literal[string] ) keyword[is] keyword[None] keyword[or] identifier[self] . identifier[options] . identifier[get] ( literal[string] )== literal[string] : identifier[logging] . identifier[debug] ( literal[string] . identifier[format] ( identifier[self] . identifier[options] )) identifier[self] . identifier[set_state] ( identifier[catan] . identifier[states] . identifier[GameStatePreGamePlacingPiece] ( identifier[self] , identifier[catan] . identifier[pieces] . identifier[PieceType] . identifier[settlement] )) keyword[elif] identifier[self] . identifier[options] . identifier[get] ( literal[string] )== literal[string] : identifier[logging] . identifier[debug] ( literal[string] . identifier[format] ( identifier[self] . identifier[options] )) identifier[self] . identifier[set_state] ( identifier[catan] . identifier[states] . identifier[GameStateBeginTurn] ( identifier[self] )) identifier[terrain] = identifier[list] () identifier[numbers] = identifier[list] () keyword[for] identifier[tile] keyword[in] identifier[self] . identifier[board] . identifier[tiles] : identifier[terrain] . identifier[append] ( identifier[tile] . identifier[terrain] ) identifier[numbers] . identifier[append] ( identifier[tile] . identifier[number] ) keyword[for] ( identifier[_] , identifier[coord] ), identifier[piece] keyword[in] identifier[self] . identifier[board] . identifier[pieces] . identifier[items] (): keyword[if] identifier[piece] . identifier[type] == identifier[catan] . identifier[pieces] . identifier[PieceType] . identifier[robber] : identifier[self] . identifier[robber_tile] = identifier[hexgrid] . identifier[tile_id_from_coord] ( identifier[coord] ) identifier[logging] . identifier[debug] ( literal[string] . identifier[format] ( identifier[coord] , identifier[self] . identifier[robber_tile] )) identifier[self] . identifier[catanlog] . identifier[log_game_start] ( identifier[self] . identifier[players] , identifier[terrain] , identifier[numbers] , identifier[self] . identifier[board] . identifier[ports] ) identifier[self] . identifier[notify_observers] ()
def start(self, players): """ Start the game. The value of option 'pregame' determines whether the pregame will occur or not. - Resets the board - Sets the players - Sets the game state to the appropriate first turn of the game - Finds the robber on the board, sets the robber_tile appropriately - Logs the catanlog header :param players: players to start the game with """ from .boardbuilder import Opt self.reset() if self.board.opts.get('players') == Opt.debug: players = Game.get_debug_players() # depends on [control=['if'], data=[]] self.set_players(players) if self.options.get('pregame') is None or self.options.get('pregame') == 'on': logging.debug('Entering pregame, game options={}'.format(self.options)) self.set_state(catan.states.GameStatePreGamePlacingPiece(self, catan.pieces.PieceType.settlement)) # depends on [control=['if'], data=[]] elif self.options.get('pregame') == 'off': logging.debug('Skipping pregame, game options={}'.format(self.options)) self.set_state(catan.states.GameStateBeginTurn(self)) # depends on [control=['if'], data=[]] terrain = list() numbers = list() for tile in self.board.tiles: terrain.append(tile.terrain) numbers.append(tile.number) # depends on [control=['for'], data=['tile']] for ((_, coord), piece) in self.board.pieces.items(): if piece.type == catan.pieces.PieceType.robber: self.robber_tile = hexgrid.tile_id_from_coord(coord) logging.debug('Found robber at coord={}, set robber_tile={}'.format(coord, self.robber_tile)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] self.catanlog.log_game_start(self.players, terrain, numbers, self.board.ports) self.notify_observers()
def safe_execute_script(driver, script): """ When executing a script that contains a jQuery command, it's important that the jQuery library has been loaded first. This method will load jQuery if it wasn't already loaded. """ try: driver.execute_script(script) except Exception: # The likely reason this fails is because: "jQuery is not defined" activate_jquery(driver) # It's a good thing we can define it here driver.execute_script(script)
def function[safe_execute_script, parameter[driver, script]]: constant[ When executing a script that contains a jQuery command, it's important that the jQuery library has been loaded first. This method will load jQuery if it wasn't already loaded. ] <ast.Try object at 0x7da1b1bbab60>
keyword[def] identifier[safe_execute_script] ( identifier[driver] , identifier[script] ): literal[string] keyword[try] : identifier[driver] . identifier[execute_script] ( identifier[script] ) keyword[except] identifier[Exception] : identifier[activate_jquery] ( identifier[driver] ) identifier[driver] . identifier[execute_script] ( identifier[script] )
def safe_execute_script(driver, script): """ When executing a script that contains a jQuery command, it's important that the jQuery library has been loaded first. This method will load jQuery if it wasn't already loaded. """ try: driver.execute_script(script) # depends on [control=['try'], data=[]] except Exception: # The likely reason this fails is because: "jQuery is not defined" activate_jquery(driver) # It's a good thing we can define it here driver.execute_script(script) # depends on [control=['except'], data=[]]
def is_merged(self): """Checks to see if the pull request was merged. :returns: bool """ url = self._build_url('merge', base_url=self._api) return self._boolean(self._get(url), 204, 404)
def function[is_merged, parameter[self]]: constant[Checks to see if the pull request was merged. :returns: bool ] variable[url] assign[=] call[name[self]._build_url, parameter[constant[merge]]] return[call[name[self]._boolean, parameter[call[name[self]._get, parameter[name[url]]], constant[204], constant[404]]]]
keyword[def] identifier[is_merged] ( identifier[self] ): literal[string] identifier[url] = identifier[self] . identifier[_build_url] ( literal[string] , identifier[base_url] = identifier[self] . identifier[_api] ) keyword[return] identifier[self] . identifier[_boolean] ( identifier[self] . identifier[_get] ( identifier[url] ), literal[int] , literal[int] )
def is_merged(self): """Checks to see if the pull request was merged. :returns: bool """ url = self._build_url('merge', base_url=self._api) return self._boolean(self._get(url), 204, 404)
def _make_stream_reader(cls, stream): """ Return a |StreamReader| instance with wrapping *stream* and having "endian-ness" determined by the 'MM' or 'II' indicator in the TIFF stream header. """ endian = cls._detect_endian(stream) return StreamReader(stream, endian)
def function[_make_stream_reader, parameter[cls, stream]]: constant[ Return a |StreamReader| instance with wrapping *stream* and having "endian-ness" determined by the 'MM' or 'II' indicator in the TIFF stream header. ] variable[endian] assign[=] call[name[cls]._detect_endian, parameter[name[stream]]] return[call[name[StreamReader], parameter[name[stream], name[endian]]]]
keyword[def] identifier[_make_stream_reader] ( identifier[cls] , identifier[stream] ): literal[string] identifier[endian] = identifier[cls] . identifier[_detect_endian] ( identifier[stream] ) keyword[return] identifier[StreamReader] ( identifier[stream] , identifier[endian] )
def _make_stream_reader(cls, stream): """ Return a |StreamReader| instance with wrapping *stream* and having "endian-ness" determined by the 'MM' or 'II' indicator in the TIFF stream header. """ endian = cls._detect_endian(stream) return StreamReader(stream, endian)
def absent(name, auth=None, **kwargs): ''' Ensure a security group does not exist name Name of the security group ''' ret = {'name': name, 'changes': {}, 'result': True, 'comment': ''} kwargs = __utils__['args.clean_kwargs'](**kwargs) __salt__['neutronng.setup_clouds'](auth) kwargs['project_id'] = __salt__['keystoneng.project_get']( name=kwargs['project_name']) secgroup = __salt__['neutronng.security_group_get']( name=name, filters={'project_id': kwargs['project_id']} ) if secgroup: if __opts__['test'] is True: ret['result'] = None ret['changes'] = {'id': secgroup.id} ret['comment'] = 'Security group will be deleted.' return ret __salt__['neutronng.security_group_delete'](name=secgroup) ret['changes']['id'] = name ret['comment'] = 'Deleted security group' return ret
def function[absent, parameter[name, auth]]: constant[ Ensure a security group does not exist name Name of the security group ] variable[ret] assign[=] dictionary[[<ast.Constant object at 0x7da1b215c490>, <ast.Constant object at 0x7da1b215f070>, <ast.Constant object at 0x7da1b215dde0>, <ast.Constant object at 0x7da1b215ee60>], [<ast.Name object at 0x7da1b215f010>, <ast.Dict object at 0x7da1b215ca00>, <ast.Constant object at 0x7da1b215e0b0>, <ast.Constant object at 0x7da1b215f760>]] variable[kwargs] assign[=] call[call[name[__utils__]][constant[args.clean_kwargs]], parameter[]] call[call[name[__salt__]][constant[neutronng.setup_clouds]], parameter[name[auth]]] call[name[kwargs]][constant[project_id]] assign[=] call[call[name[__salt__]][constant[keystoneng.project_get]], parameter[]] variable[secgroup] assign[=] call[call[name[__salt__]][constant[neutronng.security_group_get]], parameter[]] if name[secgroup] begin[:] if compare[call[name[__opts__]][constant[test]] is constant[True]] begin[:] call[name[ret]][constant[result]] assign[=] constant[None] call[name[ret]][constant[changes]] assign[=] dictionary[[<ast.Constant object at 0x7da1b215e7a0>], [<ast.Attribute object at 0x7da1b215f3a0>]] call[name[ret]][constant[comment]] assign[=] constant[Security group will be deleted.] return[name[ret]] call[call[name[__salt__]][constant[neutronng.security_group_delete]], parameter[]] call[call[name[ret]][constant[changes]]][constant[id]] assign[=] name[name] call[name[ret]][constant[comment]] assign[=] constant[Deleted security group] return[name[ret]]
keyword[def] identifier[absent] ( identifier[name] , identifier[auth] = keyword[None] ,** identifier[kwargs] ): literal[string] identifier[ret] ={ literal[string] : identifier[name] , literal[string] :{}, literal[string] : keyword[True] , literal[string] : literal[string] } identifier[kwargs] = identifier[__utils__] [ literal[string] ](** identifier[kwargs] ) identifier[__salt__] [ literal[string] ]( identifier[auth] ) identifier[kwargs] [ literal[string] ]= identifier[__salt__] [ literal[string] ]( identifier[name] = identifier[kwargs] [ literal[string] ]) identifier[secgroup] = identifier[__salt__] [ literal[string] ]( identifier[name] = identifier[name] , identifier[filters] ={ literal[string] : identifier[kwargs] [ literal[string] ]} ) keyword[if] identifier[secgroup] : keyword[if] identifier[__opts__] [ literal[string] ] keyword[is] keyword[True] : identifier[ret] [ literal[string] ]= keyword[None] identifier[ret] [ literal[string] ]={ literal[string] : identifier[secgroup] . identifier[id] } identifier[ret] [ literal[string] ]= literal[string] keyword[return] identifier[ret] identifier[__salt__] [ literal[string] ]( identifier[name] = identifier[secgroup] ) identifier[ret] [ literal[string] ][ literal[string] ]= identifier[name] identifier[ret] [ literal[string] ]= literal[string] keyword[return] identifier[ret]
def absent(name, auth=None, **kwargs): """ Ensure a security group does not exist name Name of the security group """ ret = {'name': name, 'changes': {}, 'result': True, 'comment': ''} kwargs = __utils__['args.clean_kwargs'](**kwargs) __salt__['neutronng.setup_clouds'](auth) kwargs['project_id'] = __salt__['keystoneng.project_get'](name=kwargs['project_name']) secgroup = __salt__['neutronng.security_group_get'](name=name, filters={'project_id': kwargs['project_id']}) if secgroup: if __opts__['test'] is True: ret['result'] = None ret['changes'] = {'id': secgroup.id} ret['comment'] = 'Security group will be deleted.' return ret # depends on [control=['if'], data=[]] __salt__['neutronng.security_group_delete'](name=secgroup) ret['changes']['id'] = name ret['comment'] = 'Deleted security group' # depends on [control=['if'], data=[]] return ret
def _start_tracer(self, origin): """ Start a new Tracer object, and store it in self.tracers. """ tracer = self._tracer_class(log=self.log) tracer.data = self.data fn = tracer.start(origin) self.tracers.append(tracer) return fn
def function[_start_tracer, parameter[self, origin]]: constant[ Start a new Tracer object, and store it in self.tracers. ] variable[tracer] assign[=] call[name[self]._tracer_class, parameter[]] name[tracer].data assign[=] name[self].data variable[fn] assign[=] call[name[tracer].start, parameter[name[origin]]] call[name[self].tracers.append, parameter[name[tracer]]] return[name[fn]]
keyword[def] identifier[_start_tracer] ( identifier[self] , identifier[origin] ): literal[string] identifier[tracer] = identifier[self] . identifier[_tracer_class] ( identifier[log] = identifier[self] . identifier[log] ) identifier[tracer] . identifier[data] = identifier[self] . identifier[data] identifier[fn] = identifier[tracer] . identifier[start] ( identifier[origin] ) identifier[self] . identifier[tracers] . identifier[append] ( identifier[tracer] ) keyword[return] identifier[fn]
def _start_tracer(self, origin): """ Start a new Tracer object, and store it in self.tracers. """ tracer = self._tracer_class(log=self.log) tracer.data = self.data fn = tracer.start(origin) self.tracers.append(tracer) return fn
def _strip_line_sep(self, s): """Strip trailing line separators from s, but no other whitespaces.""" if s[-2:] == b'\r\n': return s[:-2] elif s[-1:] == b'\n': return s[:-1] else: return s
def function[_strip_line_sep, parameter[self, s]]: constant[Strip trailing line separators from s, but no other whitespaces.] if compare[call[name[s]][<ast.Slice object at 0x7da1b1b0fca0>] equal[==] constant[b'\r\n']] begin[:] return[call[name[s]][<ast.Slice object at 0x7da1b1b0e980>]]
keyword[def] identifier[_strip_line_sep] ( identifier[self] , identifier[s] ): literal[string] keyword[if] identifier[s] [- literal[int] :]== literal[string] : keyword[return] identifier[s] [:- literal[int] ] keyword[elif] identifier[s] [- literal[int] :]== literal[string] : keyword[return] identifier[s] [:- literal[int] ] keyword[else] : keyword[return] identifier[s]
def _strip_line_sep(self, s): """Strip trailing line separators from s, but no other whitespaces.""" if s[-2:] == b'\r\n': return s[:-2] # depends on [control=['if'], data=[]] elif s[-1:] == b'\n': return s[:-1] # depends on [control=['if'], data=[]] else: return s
def git_tag_to_semver(git_tag: str) -> SemVer: """ :git_tag: A string representation of a Git tag. Searches a Git tag's string representation for a SemVer, and returns that as a SemVer object. """ pattern = re.compile(r'[0-9]+\.[0-9]+\.[0-9]+$') match = pattern.search(git_tag) if match: version = match.group(0) else: raise InvalidTagFormatException('Tag passed contains no SemVer.') return SemVer.from_str(version)
def function[git_tag_to_semver, parameter[git_tag]]: constant[ :git_tag: A string representation of a Git tag. Searches a Git tag's string representation for a SemVer, and returns that as a SemVer object. ] variable[pattern] assign[=] call[name[re].compile, parameter[constant[[0-9]+\.[0-9]+\.[0-9]+$]]] variable[match] assign[=] call[name[pattern].search, parameter[name[git_tag]]] if name[match] begin[:] variable[version] assign[=] call[name[match].group, parameter[constant[0]]] return[call[name[SemVer].from_str, parameter[name[version]]]]
keyword[def] identifier[git_tag_to_semver] ( identifier[git_tag] : identifier[str] )-> identifier[SemVer] : literal[string] identifier[pattern] = identifier[re] . identifier[compile] ( literal[string] ) identifier[match] = identifier[pattern] . identifier[search] ( identifier[git_tag] ) keyword[if] identifier[match] : identifier[version] = identifier[match] . identifier[group] ( literal[int] ) keyword[else] : keyword[raise] identifier[InvalidTagFormatException] ( literal[string] ) keyword[return] identifier[SemVer] . identifier[from_str] ( identifier[version] )
def git_tag_to_semver(git_tag: str) -> SemVer: """ :git_tag: A string representation of a Git tag. Searches a Git tag's string representation for a SemVer, and returns that as a SemVer object. """ pattern = re.compile('[0-9]+\\.[0-9]+\\.[0-9]+$') match = pattern.search(git_tag) if match: version = match.group(0) # depends on [control=['if'], data=[]] else: raise InvalidTagFormatException('Tag passed contains no SemVer.') return SemVer.from_str(version)
def _input_valid(input_, operation, message, output_condition_uri=None): """Validates a single Input against a single Output. Note: In case of a `CREATE` Transaction, this method does not validate against `output_condition_uri`. Args: input_ (:class:`~bigchaindb.common.transaction. Input`) The Input to be signed. operation (str): The type of Transaction. message (str): The fulfillment message. output_condition_uri (str, optional): An Output to check the Input against. Returns: bool: If the Input is valid. """ ccffill = input_.fulfillment try: parsed_ffill = Fulfillment.from_uri(ccffill.serialize_uri()) except (TypeError, ValueError, ParsingError, ASN1DecodeError, ASN1EncodeError): return False if operation == Transaction.CREATE: # NOTE: In the case of a `CREATE` transaction, the # output is always valid. output_valid = True else: output_valid = output_condition_uri == ccffill.condition_uri message = sha3_256(message.encode()) if input_.fulfills: message.update('{}{}'.format( input_.fulfills.txid, input_.fulfills.output).encode()) # NOTE: We pass a timestamp to `.validate`, as in case of a timeout # condition we'll have to validate against it # cryptoconditions makes no assumptions of the encoding of the # message to sign or verify. It only accepts bytestrings ffill_valid = parsed_ffill.validate(message=message.digest()) return output_valid and ffill_valid
def function[_input_valid, parameter[input_, operation, message, output_condition_uri]]: constant[Validates a single Input against a single Output. Note: In case of a `CREATE` Transaction, this method does not validate against `output_condition_uri`. Args: input_ (:class:`~bigchaindb.common.transaction. Input`) The Input to be signed. operation (str): The type of Transaction. message (str): The fulfillment message. output_condition_uri (str, optional): An Output to check the Input against. Returns: bool: If the Input is valid. ] variable[ccffill] assign[=] name[input_].fulfillment <ast.Try object at 0x7da20c76fc10> if compare[name[operation] equal[==] name[Transaction].CREATE] begin[:] variable[output_valid] assign[=] constant[True] variable[message] assign[=] call[name[sha3_256], parameter[call[name[message].encode, parameter[]]]] if name[input_].fulfills begin[:] call[name[message].update, parameter[call[call[constant[{}{}].format, parameter[name[input_].fulfills.txid, name[input_].fulfills.output]].encode, parameter[]]]] variable[ffill_valid] assign[=] call[name[parsed_ffill].validate, parameter[]] return[<ast.BoolOp object at 0x7da20c76c400>]
keyword[def] identifier[_input_valid] ( identifier[input_] , identifier[operation] , identifier[message] , identifier[output_condition_uri] = keyword[None] ): literal[string] identifier[ccffill] = identifier[input_] . identifier[fulfillment] keyword[try] : identifier[parsed_ffill] = identifier[Fulfillment] . identifier[from_uri] ( identifier[ccffill] . identifier[serialize_uri] ()) keyword[except] ( identifier[TypeError] , identifier[ValueError] , identifier[ParsingError] , identifier[ASN1DecodeError] , identifier[ASN1EncodeError] ): keyword[return] keyword[False] keyword[if] identifier[operation] == identifier[Transaction] . identifier[CREATE] : identifier[output_valid] = keyword[True] keyword[else] : identifier[output_valid] = identifier[output_condition_uri] == identifier[ccffill] . identifier[condition_uri] identifier[message] = identifier[sha3_256] ( identifier[message] . identifier[encode] ()) keyword[if] identifier[input_] . identifier[fulfills] : identifier[message] . identifier[update] ( literal[string] . identifier[format] ( identifier[input_] . identifier[fulfills] . identifier[txid] , identifier[input_] . identifier[fulfills] . identifier[output] ). identifier[encode] ()) identifier[ffill_valid] = identifier[parsed_ffill] . identifier[validate] ( identifier[message] = identifier[message] . identifier[digest] ()) keyword[return] identifier[output_valid] keyword[and] identifier[ffill_valid]
def _input_valid(input_, operation, message, output_condition_uri=None): """Validates a single Input against a single Output. Note: In case of a `CREATE` Transaction, this method does not validate against `output_condition_uri`. Args: input_ (:class:`~bigchaindb.common.transaction. Input`) The Input to be signed. operation (str): The type of Transaction. message (str): The fulfillment message. output_condition_uri (str, optional): An Output to check the Input against. Returns: bool: If the Input is valid. """ ccffill = input_.fulfillment try: parsed_ffill = Fulfillment.from_uri(ccffill.serialize_uri()) # depends on [control=['try'], data=[]] except (TypeError, ValueError, ParsingError, ASN1DecodeError, ASN1EncodeError): return False # depends on [control=['except'], data=[]] if operation == Transaction.CREATE: # NOTE: In the case of a `CREATE` transaction, the # output is always valid. output_valid = True # depends on [control=['if'], data=[]] else: output_valid = output_condition_uri == ccffill.condition_uri message = sha3_256(message.encode()) if input_.fulfills: message.update('{}{}'.format(input_.fulfills.txid, input_.fulfills.output).encode()) # depends on [control=['if'], data=[]] # NOTE: We pass a timestamp to `.validate`, as in case of a timeout # condition we'll have to validate against it # cryptoconditions makes no assumptions of the encoding of the # message to sign or verify. It only accepts bytestrings ffill_valid = parsed_ffill.validate(message=message.digest()) return output_valid and ffill_valid
def add_provider(self, share, provider, readonly=False): """Add a provider to the provider_map routing table.""" # Make sure share starts with, or is '/' share = "/" + share.strip("/") assert share not in self.provider_map if compat.is_basestring(provider): # Syntax: # <mount_path>: <folder_path> # We allow a simple string as 'provider'. In this case we interpret # it as a file system root folder that is published. provider = FilesystemProvider(provider, readonly) elif type(provider) in (dict,): if "provider" in provider: # Syntax: # <mount_path>: {"provider": <class_path>, "args": <pos_args>, "kwargs": <named_args} prov_class = dynamic_import_class(provider["provider"]) provider = prov_class( *provider.get("args", []), **provider.get("kwargs", {}) ) else: # Syntax: # <mount_path>: {"root": <path>, "redaonly": <bool>} provider = FilesystemProvider( provider["root"], bool(provider.get("readonly", False)) ) elif type(provider) in (list, tuple): raise ValueError( "Provider {}: tuple/list syntax is no longer supported".format(provider) ) # provider = FilesystemProvider(provider[0], provider[1]) if not isinstance(provider, DAVProvider): raise ValueError("Invalid provider {}".format(provider)) provider.set_share_path(share) if self.mount_path: provider.set_mount_path(self.mount_path) # TODO: someday we may want to configure different lock/prop # managers per provider provider.set_lock_manager(self.lock_manager) provider.set_prop_manager(self.prop_manager) self.provider_map[share] = provider # self.provider_map[share] = {"provider": provider, "allow_anonymous": False} # Store the list of share paths, ordered by length, so route lookups # will return the most specific match self.sorted_share_list = [s.lower() for s in self.provider_map.keys()] self.sorted_share_list = sorted(self.sorted_share_list, key=len, reverse=True) return provider
def function[add_provider, parameter[self, share, provider, readonly]]: constant[Add a provider to the provider_map routing table.] variable[share] assign[=] binary_operation[constant[/] + call[name[share].strip, parameter[constant[/]]]] assert[compare[name[share] <ast.NotIn object at 0x7da2590d7190> name[self].provider_map]] if call[name[compat].is_basestring, parameter[name[provider]]] begin[:] variable[provider] assign[=] call[name[FilesystemProvider], parameter[name[provider], name[readonly]]] if <ast.UnaryOp object at 0x7da1b0191930> begin[:] <ast.Raise object at 0x7da1b0192fe0> call[name[provider].set_share_path, parameter[name[share]]] if name[self].mount_path begin[:] call[name[provider].set_mount_path, parameter[name[self].mount_path]] call[name[provider].set_lock_manager, parameter[name[self].lock_manager]] call[name[provider].set_prop_manager, parameter[name[self].prop_manager]] call[name[self].provider_map][name[share]] assign[=] name[provider] name[self].sorted_share_list assign[=] <ast.ListComp object at 0x7da1b0192020> name[self].sorted_share_list assign[=] call[name[sorted], parameter[name[self].sorted_share_list]] return[name[provider]]
keyword[def] identifier[add_provider] ( identifier[self] , identifier[share] , identifier[provider] , identifier[readonly] = keyword[False] ): literal[string] identifier[share] = literal[string] + identifier[share] . identifier[strip] ( literal[string] ) keyword[assert] identifier[share] keyword[not] keyword[in] identifier[self] . identifier[provider_map] keyword[if] identifier[compat] . identifier[is_basestring] ( identifier[provider] ): identifier[provider] = identifier[FilesystemProvider] ( identifier[provider] , identifier[readonly] ) keyword[elif] identifier[type] ( identifier[provider] ) keyword[in] ( identifier[dict] ,): keyword[if] literal[string] keyword[in] identifier[provider] : identifier[prov_class] = identifier[dynamic_import_class] ( identifier[provider] [ literal[string] ]) identifier[provider] = identifier[prov_class] ( * identifier[provider] . identifier[get] ( literal[string] ,[]),** identifier[provider] . identifier[get] ( literal[string] ,{}) ) keyword[else] : identifier[provider] = identifier[FilesystemProvider] ( identifier[provider] [ literal[string] ], identifier[bool] ( identifier[provider] . identifier[get] ( literal[string] , keyword[False] )) ) keyword[elif] identifier[type] ( identifier[provider] ) keyword[in] ( identifier[list] , identifier[tuple] ): keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[provider] ) ) keyword[if] keyword[not] identifier[isinstance] ( identifier[provider] , identifier[DAVProvider] ): keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[provider] )) identifier[provider] . identifier[set_share_path] ( identifier[share] ) keyword[if] identifier[self] . identifier[mount_path] : identifier[provider] . identifier[set_mount_path] ( identifier[self] . identifier[mount_path] ) identifier[provider] . identifier[set_lock_manager] ( identifier[self] . identifier[lock_manager] ) identifier[provider] . identifier[set_prop_manager] ( identifier[self] . identifier[prop_manager] ) identifier[self] . identifier[provider_map] [ identifier[share] ]= identifier[provider] identifier[self] . identifier[sorted_share_list] =[ identifier[s] . identifier[lower] () keyword[for] identifier[s] keyword[in] identifier[self] . identifier[provider_map] . identifier[keys] ()] identifier[self] . identifier[sorted_share_list] = identifier[sorted] ( identifier[self] . identifier[sorted_share_list] , identifier[key] = identifier[len] , identifier[reverse] = keyword[True] ) keyword[return] identifier[provider]
def add_provider(self, share, provider, readonly=False): """Add a provider to the provider_map routing table.""" # Make sure share starts with, or is '/' share = '/' + share.strip('/') assert share not in self.provider_map if compat.is_basestring(provider): # Syntax: # <mount_path>: <folder_path> # We allow a simple string as 'provider'. In this case we interpret # it as a file system root folder that is published. provider = FilesystemProvider(provider, readonly) # depends on [control=['if'], data=[]] elif type(provider) in (dict,): if 'provider' in provider: # Syntax: # <mount_path>: {"provider": <class_path>, "args": <pos_args>, "kwargs": <named_args} prov_class = dynamic_import_class(provider['provider']) provider = prov_class(*provider.get('args', []), **provider.get('kwargs', {})) # depends on [control=['if'], data=['provider']] else: # Syntax: # <mount_path>: {"root": <path>, "redaonly": <bool>} provider = FilesystemProvider(provider['root'], bool(provider.get('readonly', False))) # depends on [control=['if'], data=[]] elif type(provider) in (list, tuple): raise ValueError('Provider {}: tuple/list syntax is no longer supported'.format(provider)) # depends on [control=['if'], data=[]] # provider = FilesystemProvider(provider[0], provider[1]) if not isinstance(provider, DAVProvider): raise ValueError('Invalid provider {}'.format(provider)) # depends on [control=['if'], data=[]] provider.set_share_path(share) if self.mount_path: provider.set_mount_path(self.mount_path) # depends on [control=['if'], data=[]] # TODO: someday we may want to configure different lock/prop # managers per provider provider.set_lock_manager(self.lock_manager) provider.set_prop_manager(self.prop_manager) self.provider_map[share] = provider # self.provider_map[share] = {"provider": provider, "allow_anonymous": False} # Store the list of share paths, ordered by length, so route lookups # will return the most specific match self.sorted_share_list = [s.lower() for s in self.provider_map.keys()] self.sorted_share_list = sorted(self.sorted_share_list, key=len, reverse=True) return provider
def handle_import(self, options): """ Gets posts from Blogger. """ blog_id = options.get("blog_id") if blog_id is None: raise CommandError("Usage is import_blogger %s" % self.args) try: from gdata import service except ImportError: raise CommandError("Could not import the gdata library.") blogger = service.GDataService() blogger.service = "blogger" blogger.server = "www.blogger.com" start_index = 1 processed_posts = [] new_posts = 1 while new_posts: new_posts = 0 query = service.Query() query.feed = "/feeds/%s/posts/full" % blog_id query.max_results = 500 query.start_index = start_index try: feed = blogger.Get(query.ToUri()) except service.RequestError as err: message = "There was a service error. The response was: " \ "%(status)s %(reason)s - %(body)s" % err.message raise CommandError(message, blogger.server + query.feed, err.message["status"]) for (i, entry) in enumerate(feed.entry): # this basically gets the unique post ID from the URL to itself # and pulls the ID off the end. post_id = entry.GetSelfLink().href.split("/")[-1] # Skip duplicate posts. Important for the last query. if post_id in processed_posts: continue title = entry.title.text content = entry.content.text # this strips off the time zone info off the end as we want UTC clean_date = entry.published.text[:re.search(r"\.\d{3}", entry.published.text).end()] published_date = self.parse_datetime(clean_date) # TODO - issues with content not generating correct <P> tags tags = [tag.term for tag in entry.category] post = self.add_post(title=title, content=content, pub_date=published_date, tags=tags) # get the comments from the post feed and then add them to # the post details comment_url = "/feeds/%s/%s/comments/full?max-results=1000" comments = blogger.Get(comment_url % (blog_id, post_id)) for comment in comments.entry: email = comment.author[0].email.text author_name = comment.author[0].name.text # Strip off the time zone info off the end as we want UTC clean_date = comment.published.text[:re.search(r"\.\d{3}", comment.published.text).end()] comment_date = self.parse_datetime(clean_date) website = "" if comment.author[0].uri: website = comment.author[0].uri.text body = comment.content.text # add the comment as a dict to the end of the comments list self.add_comment(post=post, name=author_name, email=email, body=body, website=website, pub_date=comment_date) processed_posts.append(post_id) new_posts += 1 start_index += 500
def function[handle_import, parameter[self, options]]: constant[ Gets posts from Blogger. ] variable[blog_id] assign[=] call[name[options].get, parameter[constant[blog_id]]] if compare[name[blog_id] is constant[None]] begin[:] <ast.Raise object at 0x7da20e9542e0> <ast.Try object at 0x7da20e9560e0> variable[blogger] assign[=] call[name[service].GDataService, parameter[]] name[blogger].service assign[=] constant[blogger] name[blogger].server assign[=] constant[www.blogger.com] variable[start_index] assign[=] constant[1] variable[processed_posts] assign[=] list[[]] variable[new_posts] assign[=] constant[1] while name[new_posts] begin[:] variable[new_posts] assign[=] constant[0] variable[query] assign[=] call[name[service].Query, parameter[]] name[query].feed assign[=] binary_operation[constant[/feeds/%s/posts/full] <ast.Mod object at 0x7da2590d6920> name[blog_id]] name[query].max_results assign[=] constant[500] name[query].start_index assign[=] name[start_index] <ast.Try object at 0x7da20e9540a0> for taget[tuple[[<ast.Name object at 0x7da20e955660>, <ast.Name object at 0x7da20e955780>]]] in starred[call[name[enumerate], parameter[name[feed].entry]]] begin[:] variable[post_id] assign[=] call[call[call[name[entry].GetSelfLink, parameter[]].href.split, parameter[constant[/]]]][<ast.UnaryOp object at 0x7da1b157b5e0>] if compare[name[post_id] in name[processed_posts]] begin[:] continue variable[title] assign[=] name[entry].title.text variable[content] assign[=] name[entry].content.text variable[clean_date] assign[=] call[name[entry].published.text][<ast.Slice object at 0x7da20cabc2b0>] variable[published_date] assign[=] call[name[self].parse_datetime, parameter[name[clean_date]]] variable[tags] assign[=] <ast.ListComp object at 0x7da18f812980> variable[post] assign[=] call[name[self].add_post, parameter[]] variable[comment_url] assign[=] constant[/feeds/%s/%s/comments/full?max-results=1000] variable[comments] assign[=] call[name[blogger].Get, parameter[binary_operation[name[comment_url] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b1579bd0>, <ast.Name object at 0x7da1b15784c0>]]]]] for taget[name[comment]] in starred[name[comments].entry] begin[:] variable[email] assign[=] call[name[comment].author][constant[0]].email.text variable[author_name] assign[=] call[name[comment].author][constant[0]].name.text variable[clean_date] assign[=] call[name[comment].published.text][<ast.Slice object at 0x7da1b157a800>] variable[comment_date] assign[=] call[name[self].parse_datetime, parameter[name[clean_date]]] variable[website] assign[=] constant[] if call[name[comment].author][constant[0]].uri begin[:] variable[website] assign[=] call[name[comment].author][constant[0]].uri.text variable[body] assign[=] name[comment].content.text call[name[self].add_comment, parameter[]] call[name[processed_posts].append, parameter[name[post_id]]] <ast.AugAssign object at 0x7da1b1579240> <ast.AugAssign object at 0x7da1b1578df0>
keyword[def] identifier[handle_import] ( identifier[self] , identifier[options] ): literal[string] identifier[blog_id] = identifier[options] . identifier[get] ( literal[string] ) keyword[if] identifier[blog_id] keyword[is] keyword[None] : keyword[raise] identifier[CommandError] ( literal[string] % identifier[self] . identifier[args] ) keyword[try] : keyword[from] identifier[gdata] keyword[import] identifier[service] keyword[except] identifier[ImportError] : keyword[raise] identifier[CommandError] ( literal[string] ) identifier[blogger] = identifier[service] . identifier[GDataService] () identifier[blogger] . identifier[service] = literal[string] identifier[blogger] . identifier[server] = literal[string] identifier[start_index] = literal[int] identifier[processed_posts] =[] identifier[new_posts] = literal[int] keyword[while] identifier[new_posts] : identifier[new_posts] = literal[int] identifier[query] = identifier[service] . identifier[Query] () identifier[query] . identifier[feed] = literal[string] % identifier[blog_id] identifier[query] . identifier[max_results] = literal[int] identifier[query] . identifier[start_index] = identifier[start_index] keyword[try] : identifier[feed] = identifier[blogger] . identifier[Get] ( identifier[query] . identifier[ToUri] ()) keyword[except] identifier[service] . identifier[RequestError] keyword[as] identifier[err] : identifier[message] = literal[string] literal[string] % identifier[err] . identifier[message] keyword[raise] identifier[CommandError] ( identifier[message] , identifier[blogger] . identifier[server] + identifier[query] . identifier[feed] , identifier[err] . identifier[message] [ literal[string] ]) keyword[for] ( identifier[i] , identifier[entry] ) keyword[in] identifier[enumerate] ( identifier[feed] . identifier[entry] ): identifier[post_id] = identifier[entry] . identifier[GetSelfLink] (). identifier[href] . identifier[split] ( literal[string] )[- literal[int] ] keyword[if] identifier[post_id] keyword[in] identifier[processed_posts] : keyword[continue] identifier[title] = identifier[entry] . identifier[title] . identifier[text] identifier[content] = identifier[entry] . identifier[content] . identifier[text] identifier[clean_date] = identifier[entry] . identifier[published] . identifier[text] [: identifier[re] . identifier[search] ( literal[string] , identifier[entry] . identifier[published] . identifier[text] ). identifier[end] ()] identifier[published_date] = identifier[self] . identifier[parse_datetime] ( identifier[clean_date] ) identifier[tags] =[ identifier[tag] . identifier[term] keyword[for] identifier[tag] keyword[in] identifier[entry] . identifier[category] ] identifier[post] = identifier[self] . identifier[add_post] ( identifier[title] = identifier[title] , identifier[content] = identifier[content] , identifier[pub_date] = identifier[published_date] , identifier[tags] = identifier[tags] ) identifier[comment_url] = literal[string] identifier[comments] = identifier[blogger] . identifier[Get] ( identifier[comment_url] %( identifier[blog_id] , identifier[post_id] )) keyword[for] identifier[comment] keyword[in] identifier[comments] . identifier[entry] : identifier[email] = identifier[comment] . identifier[author] [ literal[int] ]. identifier[email] . identifier[text] identifier[author_name] = identifier[comment] . identifier[author] [ literal[int] ]. identifier[name] . identifier[text] identifier[clean_date] = identifier[comment] . identifier[published] . identifier[text] [: identifier[re] . identifier[search] ( literal[string] , identifier[comment] . identifier[published] . identifier[text] ). identifier[end] ()] identifier[comment_date] = identifier[self] . identifier[parse_datetime] ( identifier[clean_date] ) identifier[website] = literal[string] keyword[if] identifier[comment] . identifier[author] [ literal[int] ]. identifier[uri] : identifier[website] = identifier[comment] . identifier[author] [ literal[int] ]. identifier[uri] . identifier[text] identifier[body] = identifier[comment] . identifier[content] . identifier[text] identifier[self] . identifier[add_comment] ( identifier[post] = identifier[post] , identifier[name] = identifier[author_name] , identifier[email] = identifier[email] , identifier[body] = identifier[body] , identifier[website] = identifier[website] , identifier[pub_date] = identifier[comment_date] ) identifier[processed_posts] . identifier[append] ( identifier[post_id] ) identifier[new_posts] += literal[int] identifier[start_index] += literal[int]
def handle_import(self, options): """ Gets posts from Blogger. """ blog_id = options.get('blog_id') if blog_id is None: raise CommandError('Usage is import_blogger %s' % self.args) # depends on [control=['if'], data=[]] try: from gdata import service # depends on [control=['try'], data=[]] except ImportError: raise CommandError('Could not import the gdata library.') # depends on [control=['except'], data=[]] blogger = service.GDataService() blogger.service = 'blogger' blogger.server = 'www.blogger.com' start_index = 1 processed_posts = [] new_posts = 1 while new_posts: new_posts = 0 query = service.Query() query.feed = '/feeds/%s/posts/full' % blog_id query.max_results = 500 query.start_index = start_index try: feed = blogger.Get(query.ToUri()) # depends on [control=['try'], data=[]] except service.RequestError as err: message = 'There was a service error. The response was: %(status)s %(reason)s - %(body)s' % err.message raise CommandError(message, blogger.server + query.feed, err.message['status']) # depends on [control=['except'], data=['err']] for (i, entry) in enumerate(feed.entry): # this basically gets the unique post ID from the URL to itself # and pulls the ID off the end. post_id = entry.GetSelfLink().href.split('/')[-1] # Skip duplicate posts. Important for the last query. if post_id in processed_posts: continue # depends on [control=['if'], data=[]] title = entry.title.text content = entry.content.text # this strips off the time zone info off the end as we want UTC clean_date = entry.published.text[:re.search('\\.\\d{3}', entry.published.text).end()] published_date = self.parse_datetime(clean_date) # TODO - issues with content not generating correct <P> tags tags = [tag.term for tag in entry.category] post = self.add_post(title=title, content=content, pub_date=published_date, tags=tags) # get the comments from the post feed and then add them to # the post details comment_url = '/feeds/%s/%s/comments/full?max-results=1000' comments = blogger.Get(comment_url % (blog_id, post_id)) for comment in comments.entry: email = comment.author[0].email.text author_name = comment.author[0].name.text # Strip off the time zone info off the end as we want UTC clean_date = comment.published.text[:re.search('\\.\\d{3}', comment.published.text).end()] comment_date = self.parse_datetime(clean_date) website = '' if comment.author[0].uri: website = comment.author[0].uri.text # depends on [control=['if'], data=[]] body = comment.content.text # add the comment as a dict to the end of the comments list self.add_comment(post=post, name=author_name, email=email, body=body, website=website, pub_date=comment_date) # depends on [control=['for'], data=['comment']] processed_posts.append(post_id) new_posts += 1 # depends on [control=['for'], data=[]] start_index += 500 # depends on [control=['while'], data=[]]
def to_inference_data(self): """Convert all available data to an InferenceData object. Note that if groups can not be created (i.e., there is no `trace`, so the `posterior` and `sample_stats` can not be extracted), then the InferenceData will not have those groups. """ return InferenceData( **{ "posterior": self.posterior_to_xarray(), "sample_stats": self.sample_stats_to_xarray(), "posterior_predictive": self.posterior_predictive_to_xarray(), "observed_data": self.observed_data_to_xarray(), } )
def function[to_inference_data, parameter[self]]: constant[Convert all available data to an InferenceData object. Note that if groups can not be created (i.e., there is no `trace`, so the `posterior` and `sample_stats` can not be extracted), then the InferenceData will not have those groups. ] return[call[name[InferenceData], parameter[]]]
keyword[def] identifier[to_inference_data] ( identifier[self] ): literal[string] keyword[return] identifier[InferenceData] ( **{ literal[string] : identifier[self] . identifier[posterior_to_xarray] (), literal[string] : identifier[self] . identifier[sample_stats_to_xarray] (), literal[string] : identifier[self] . identifier[posterior_predictive_to_xarray] (), literal[string] : identifier[self] . identifier[observed_data_to_xarray] (), } )
def to_inference_data(self): """Convert all available data to an InferenceData object. Note that if groups can not be created (i.e., there is no `trace`, so the `posterior` and `sample_stats` can not be extracted), then the InferenceData will not have those groups. """ return InferenceData(**{'posterior': self.posterior_to_xarray(), 'sample_stats': self.sample_stats_to_xarray(), 'posterior_predictive': self.posterior_predictive_to_xarray(), 'observed_data': self.observed_data_to_xarray()})
def deprecated(replacement=None): """A decorator which can be used to mark functions as deprecated.""" def outer(fun): msg = "psutil.%s is deprecated" % fun.__name__ if replacement is not None: msg += "; use %s instead" % replacement if fun.__doc__ is None: fun.__doc__ = msg @wraps(fun) def inner(*args, **kwargs): warnings.warn(msg, category=DeprecationWarning, stacklevel=2) return fun(*args, **kwargs) return inner return outer
def function[deprecated, parameter[replacement]]: constant[A decorator which can be used to mark functions as deprecated.] def function[outer, parameter[fun]]: variable[msg] assign[=] binary_operation[constant[psutil.%s is deprecated] <ast.Mod object at 0x7da2590d6920> name[fun].__name__] if compare[name[replacement] is_not constant[None]] begin[:] <ast.AugAssign object at 0x7da18f813c40> if compare[name[fun].__doc__ is constant[None]] begin[:] name[fun].__doc__ assign[=] name[msg] def function[inner, parameter[]]: call[name[warnings].warn, parameter[name[msg]]] return[call[name[fun], parameter[<ast.Starred object at 0x7da18f8135e0>]]] return[name[inner]] return[name[outer]]
keyword[def] identifier[deprecated] ( identifier[replacement] = keyword[None] ): literal[string] keyword[def] identifier[outer] ( identifier[fun] ): identifier[msg] = literal[string] % identifier[fun] . identifier[__name__] keyword[if] identifier[replacement] keyword[is] keyword[not] keyword[None] : identifier[msg] += literal[string] % identifier[replacement] keyword[if] identifier[fun] . identifier[__doc__] keyword[is] keyword[None] : identifier[fun] . identifier[__doc__] = identifier[msg] @ identifier[wraps] ( identifier[fun] ) keyword[def] identifier[inner] (* identifier[args] ,** identifier[kwargs] ): identifier[warnings] . identifier[warn] ( identifier[msg] , identifier[category] = identifier[DeprecationWarning] , identifier[stacklevel] = literal[int] ) keyword[return] identifier[fun] (* identifier[args] ,** identifier[kwargs] ) keyword[return] identifier[inner] keyword[return] identifier[outer]
def deprecated(replacement=None): """A decorator which can be used to mark functions as deprecated.""" def outer(fun): msg = 'psutil.%s is deprecated' % fun.__name__ if replacement is not None: msg += '; use %s instead' % replacement # depends on [control=['if'], data=['replacement']] if fun.__doc__ is None: fun.__doc__ = msg # depends on [control=['if'], data=[]] @wraps(fun) def inner(*args, **kwargs): warnings.warn(msg, category=DeprecationWarning, stacklevel=2) return fun(*args, **kwargs) return inner return outer
def plot_polarbar(scores, labels=None, labels_size=15, colors="default", distribution_means=None, distribution_sds=None, treshold=1.28, fig_size=(15, 15)): """ Polar bar chart. Parameters ---------- scores : list or dict Scores to plot. labels : list List of labels to be used for ticks. labels_size : int Label's size. colors : list or str List of colors or "default". distribution_means : int or list List of means to add a range ribbon. distribution_sds : int or list List of SDs to add a range ribbon. treshold : float Limits of the range ribbon (in terms of standart deviation from mean). fig_size : tuple Figure size. Returns ---------- plot : matplotlig figure The figure. Example ---------- >>> import neurokit as nk >>> fig = nk.plot_polarbar(scores=[1, 2, 3, 4, 5], labels=["A", "B", "C", "D", "E"], distribution_means=3, distribution_sds=1) >>> fig.show() Notes ---------- *Authors* - `Dominique Makowski <https://dominiquemakowski.github.io/>`_ *Dependencies* - matplotlib - numpy """ # Sanity check if isinstance(scores, dict): if labels is None: labels = list(scores.keys()) try: scores = [scores[key] for key in labels] except KeyError: print("NeuroKit Error: plot_polarbar(): labels and scores keys not matching. Recheck them.") # Parameters if colors == "default": if len(scores) < 9: colors = ["#f44336", "#9C27B0", "#3F51B5","#03A9F4", "#009688", "#8BC34A", "#FFEB3B", "#FF9800", "#795548"] else: colors = None if labels is None: labels = range(len(scores)) N = len(scores) theta = np.linspace(0.0, -2 * np.pi, N, endpoint=False) width = 2 * np.pi / N # Main plot = plt.figure(figsize=fig_size) layer1 = plot.add_subplot(111, projection="polar") bars1 = layer1.bar(theta+np.pi/len(scores), scores, width=width, bottom=0.0) layer1.yaxis.set_ticks(range(11)) layer1.yaxis.set_ticklabels([]) layer1.xaxis.set_ticks(theta+np.pi/len(scores)) layer1.xaxis.set_ticklabels(labels, fontsize=labels_size) for index, bar in enumerate(bars1): if colors is not None: bar.set_facecolor(colors[index]) bar.set_alpha(1) # Layer 2 if distribution_means is not None and distribution_sds is not None: # Sanity check if isinstance(distribution_means, int): distribution_means = [distribution_means]*N if isinstance(distribution_sds, int): distribution_sds = [distribution_sds]*N # TODO: add convertion if those parameter are dict bottoms, tops = normal_range(np.array(distribution_means), np.array(distribution_sds), treshold=treshold) tops = tops - bottoms layer2 = plot.add_subplot(111, polar=True) bars2 = layer2.bar(theta, tops, width=width, bottom=bottoms, linewidth=0) layer2.xaxis.set_ticks(theta+np.pi/len(scores)) layer2.xaxis.set_ticklabels(labels, fontsize=labels_size) for index, bar in enumerate(bars2): bar.set_facecolor("#607D8B") bar.set_alpha(0.3) return(plot)
def function[plot_polarbar, parameter[scores, labels, labels_size, colors, distribution_means, distribution_sds, treshold, fig_size]]: constant[ Polar bar chart. Parameters ---------- scores : list or dict Scores to plot. labels : list List of labels to be used for ticks. labels_size : int Label's size. colors : list or str List of colors or "default". distribution_means : int or list List of means to add a range ribbon. distribution_sds : int or list List of SDs to add a range ribbon. treshold : float Limits of the range ribbon (in terms of standart deviation from mean). fig_size : tuple Figure size. Returns ---------- plot : matplotlig figure The figure. Example ---------- >>> import neurokit as nk >>> fig = nk.plot_polarbar(scores=[1, 2, 3, 4, 5], labels=["A", "B", "C", "D", "E"], distribution_means=3, distribution_sds=1) >>> fig.show() Notes ---------- *Authors* - `Dominique Makowski <https://dominiquemakowski.github.io/>`_ *Dependencies* - matplotlib - numpy ] if call[name[isinstance], parameter[name[scores], name[dict]]] begin[:] if compare[name[labels] is constant[None]] begin[:] variable[labels] assign[=] call[name[list], parameter[call[name[scores].keys, parameter[]]]] <ast.Try object at 0x7da204962da0> if compare[name[colors] equal[==] constant[default]] begin[:] if compare[call[name[len], parameter[name[scores]]] less[<] constant[9]] begin[:] variable[colors] assign[=] list[[<ast.Constant object at 0x7da204963fd0>, <ast.Constant object at 0x7da204963100>, <ast.Constant object at 0x7da204961d20>, <ast.Constant object at 0x7da204962830>, <ast.Constant object at 0x7da204961c90>, <ast.Constant object at 0x7da204963c40>, <ast.Constant object at 0x7da204962500>, <ast.Constant object at 0x7da204961cf0>, <ast.Constant object at 0x7da2049608e0>]] if compare[name[labels] is constant[None]] begin[:] variable[labels] assign[=] call[name[range], parameter[call[name[len], parameter[name[scores]]]]] variable[N] assign[=] call[name[len], parameter[name[scores]]] variable[theta] assign[=] call[name[np].linspace, parameter[constant[0.0], binary_operation[<ast.UnaryOp object at 0x7da204963580> * name[np].pi], name[N]]] variable[width] assign[=] binary_operation[binary_operation[constant[2] * name[np].pi] / name[N]] variable[plot] assign[=] call[name[plt].figure, parameter[]] variable[layer1] assign[=] call[name[plot].add_subplot, parameter[constant[111]]] variable[bars1] assign[=] call[name[layer1].bar, parameter[binary_operation[name[theta] + binary_operation[name[np].pi / call[name[len], parameter[name[scores]]]]], name[scores]]] call[name[layer1].yaxis.set_ticks, parameter[call[name[range], parameter[constant[11]]]]] call[name[layer1].yaxis.set_ticklabels, parameter[list[[]]]] call[name[layer1].xaxis.set_ticks, parameter[binary_operation[name[theta] + binary_operation[name[np].pi / call[name[len], parameter[name[scores]]]]]]] call[name[layer1].xaxis.set_ticklabels, parameter[name[labels]]] for taget[tuple[[<ast.Name object at 0x7da20cabdfc0>, <ast.Name object at 0x7da20cabebf0>]]] in starred[call[name[enumerate], parameter[name[bars1]]]] begin[:] if compare[name[colors] is_not constant[None]] begin[:] call[name[bar].set_facecolor, parameter[call[name[colors]][name[index]]]] call[name[bar].set_alpha, parameter[constant[1]]] if <ast.BoolOp object at 0x7da20cabfeb0> begin[:] if call[name[isinstance], parameter[name[distribution_means], name[int]]] begin[:] variable[distribution_means] assign[=] binary_operation[list[[<ast.Name object at 0x7da20cabdd80>]] * name[N]] if call[name[isinstance], parameter[name[distribution_sds], name[int]]] begin[:] variable[distribution_sds] assign[=] binary_operation[list[[<ast.Name object at 0x7da20cabcb20>]] * name[N]] <ast.Tuple object at 0x7da20cabe410> assign[=] call[name[normal_range], parameter[call[name[np].array, parameter[name[distribution_means]]], call[name[np].array, parameter[name[distribution_sds]]]]] variable[tops] assign[=] binary_operation[name[tops] - name[bottoms]] variable[layer2] assign[=] call[name[plot].add_subplot, parameter[constant[111]]] variable[bars2] assign[=] call[name[layer2].bar, parameter[name[theta], name[tops]]] call[name[layer2].xaxis.set_ticks, parameter[binary_operation[name[theta] + binary_operation[name[np].pi / call[name[len], parameter[name[scores]]]]]]] call[name[layer2].xaxis.set_ticklabels, parameter[name[labels]]] for taget[tuple[[<ast.Name object at 0x7da20cabfa30>, <ast.Name object at 0x7da20cabf400>]]] in starred[call[name[enumerate], parameter[name[bars2]]]] begin[:] call[name[bar].set_facecolor, parameter[constant[#607D8B]]] call[name[bar].set_alpha, parameter[constant[0.3]]] return[name[plot]]
keyword[def] identifier[plot_polarbar] ( identifier[scores] , identifier[labels] = keyword[None] , identifier[labels_size] = literal[int] , identifier[colors] = literal[string] , identifier[distribution_means] = keyword[None] , identifier[distribution_sds] = keyword[None] , identifier[treshold] = literal[int] , identifier[fig_size] =( literal[int] , literal[int] )): literal[string] keyword[if] identifier[isinstance] ( identifier[scores] , identifier[dict] ): keyword[if] identifier[labels] keyword[is] keyword[None] : identifier[labels] = identifier[list] ( identifier[scores] . identifier[keys] ()) keyword[try] : identifier[scores] =[ identifier[scores] [ identifier[key] ] keyword[for] identifier[key] keyword[in] identifier[labels] ] keyword[except] identifier[KeyError] : identifier[print] ( literal[string] ) keyword[if] identifier[colors] == literal[string] : keyword[if] identifier[len] ( identifier[scores] )< literal[int] : identifier[colors] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ] keyword[else] : identifier[colors] = keyword[None] keyword[if] identifier[labels] keyword[is] keyword[None] : identifier[labels] = identifier[range] ( identifier[len] ( identifier[scores] )) identifier[N] = identifier[len] ( identifier[scores] ) identifier[theta] = identifier[np] . identifier[linspace] ( literal[int] ,- literal[int] * identifier[np] . identifier[pi] , identifier[N] , identifier[endpoint] = keyword[False] ) identifier[width] = literal[int] * identifier[np] . identifier[pi] / identifier[N] identifier[plot] = identifier[plt] . identifier[figure] ( identifier[figsize] = identifier[fig_size] ) identifier[layer1] = identifier[plot] . identifier[add_subplot] ( literal[int] , identifier[projection] = literal[string] ) identifier[bars1] = identifier[layer1] . identifier[bar] ( identifier[theta] + identifier[np] . identifier[pi] / identifier[len] ( identifier[scores] ), identifier[scores] , identifier[width] = identifier[width] , identifier[bottom] = literal[int] ) identifier[layer1] . identifier[yaxis] . identifier[set_ticks] ( identifier[range] ( literal[int] )) identifier[layer1] . identifier[yaxis] . identifier[set_ticklabels] ([]) identifier[layer1] . identifier[xaxis] . identifier[set_ticks] ( identifier[theta] + identifier[np] . identifier[pi] / identifier[len] ( identifier[scores] )) identifier[layer1] . identifier[xaxis] . identifier[set_ticklabels] ( identifier[labels] , identifier[fontsize] = identifier[labels_size] ) keyword[for] identifier[index] , identifier[bar] keyword[in] identifier[enumerate] ( identifier[bars1] ): keyword[if] identifier[colors] keyword[is] keyword[not] keyword[None] : identifier[bar] . identifier[set_facecolor] ( identifier[colors] [ identifier[index] ]) identifier[bar] . identifier[set_alpha] ( literal[int] ) keyword[if] identifier[distribution_means] keyword[is] keyword[not] keyword[None] keyword[and] identifier[distribution_sds] keyword[is] keyword[not] keyword[None] : keyword[if] identifier[isinstance] ( identifier[distribution_means] , identifier[int] ): identifier[distribution_means] =[ identifier[distribution_means] ]* identifier[N] keyword[if] identifier[isinstance] ( identifier[distribution_sds] , identifier[int] ): identifier[distribution_sds] =[ identifier[distribution_sds] ]* identifier[N] identifier[bottoms] , identifier[tops] = identifier[normal_range] ( identifier[np] . identifier[array] ( identifier[distribution_means] ), identifier[np] . identifier[array] ( identifier[distribution_sds] ), identifier[treshold] = identifier[treshold] ) identifier[tops] = identifier[tops] - identifier[bottoms] identifier[layer2] = identifier[plot] . identifier[add_subplot] ( literal[int] , identifier[polar] = keyword[True] ) identifier[bars2] = identifier[layer2] . identifier[bar] ( identifier[theta] , identifier[tops] , identifier[width] = identifier[width] , identifier[bottom] = identifier[bottoms] , identifier[linewidth] = literal[int] ) identifier[layer2] . identifier[xaxis] . identifier[set_ticks] ( identifier[theta] + identifier[np] . identifier[pi] / identifier[len] ( identifier[scores] )) identifier[layer2] . identifier[xaxis] . identifier[set_ticklabels] ( identifier[labels] , identifier[fontsize] = identifier[labels_size] ) keyword[for] identifier[index] , identifier[bar] keyword[in] identifier[enumerate] ( identifier[bars2] ): identifier[bar] . identifier[set_facecolor] ( literal[string] ) identifier[bar] . identifier[set_alpha] ( literal[int] ) keyword[return] ( identifier[plot] )
def plot_polarbar(scores, labels=None, labels_size=15, colors='default', distribution_means=None, distribution_sds=None, treshold=1.28, fig_size=(15, 15)): """ Polar bar chart. Parameters ---------- scores : list or dict Scores to plot. labels : list List of labels to be used for ticks. labels_size : int Label's size. colors : list or str List of colors or "default". distribution_means : int or list List of means to add a range ribbon. distribution_sds : int or list List of SDs to add a range ribbon. treshold : float Limits of the range ribbon (in terms of standart deviation from mean). fig_size : tuple Figure size. Returns ---------- plot : matplotlig figure The figure. Example ---------- >>> import neurokit as nk >>> fig = nk.plot_polarbar(scores=[1, 2, 3, 4, 5], labels=["A", "B", "C", "D", "E"], distribution_means=3, distribution_sds=1) >>> fig.show() Notes ---------- *Authors* - `Dominique Makowski <https://dominiquemakowski.github.io/>`_ *Dependencies* - matplotlib - numpy """ # Sanity check if isinstance(scores, dict): if labels is None: labels = list(scores.keys()) # depends on [control=['if'], data=['labels']] try: scores = [scores[key] for key in labels] # depends on [control=['try'], data=[]] except KeyError: print('NeuroKit Error: plot_polarbar(): labels and scores keys not matching. Recheck them.') # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] # Parameters if colors == 'default': if len(scores) < 9: colors = ['#f44336', '#9C27B0', '#3F51B5', '#03A9F4', '#009688', '#8BC34A', '#FFEB3B', '#FF9800', '#795548'] # depends on [control=['if'], data=[]] else: colors = None # depends on [control=['if'], data=['colors']] if labels is None: labels = range(len(scores)) # depends on [control=['if'], data=['labels']] N = len(scores) theta = np.linspace(0.0, -2 * np.pi, N, endpoint=False) width = 2 * np.pi / N # Main plot = plt.figure(figsize=fig_size) layer1 = plot.add_subplot(111, projection='polar') bars1 = layer1.bar(theta + np.pi / len(scores), scores, width=width, bottom=0.0) layer1.yaxis.set_ticks(range(11)) layer1.yaxis.set_ticklabels([]) layer1.xaxis.set_ticks(theta + np.pi / len(scores)) layer1.xaxis.set_ticklabels(labels, fontsize=labels_size) for (index, bar) in enumerate(bars1): if colors is not None: bar.set_facecolor(colors[index]) # depends on [control=['if'], data=['colors']] bar.set_alpha(1) # depends on [control=['for'], data=[]] # Layer 2 if distribution_means is not None and distribution_sds is not None: # Sanity check if isinstance(distribution_means, int): distribution_means = [distribution_means] * N # depends on [control=['if'], data=[]] if isinstance(distribution_sds, int): distribution_sds = [distribution_sds] * N # depends on [control=['if'], data=[]] # TODO: add convertion if those parameter are dict (bottoms, tops) = normal_range(np.array(distribution_means), np.array(distribution_sds), treshold=treshold) tops = tops - bottoms layer2 = plot.add_subplot(111, polar=True) bars2 = layer2.bar(theta, tops, width=width, bottom=bottoms, linewidth=0) layer2.xaxis.set_ticks(theta + np.pi / len(scores)) layer2.xaxis.set_ticklabels(labels, fontsize=labels_size) for (index, bar) in enumerate(bars2): bar.set_facecolor('#607D8B') bar.set_alpha(0.3) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] return plot
def _remove_features(layer): """Remove features which do not have information for InaSAFE or an invalid geometry. :param layer: The vector layer. :type layer: QgsVectorLayer """ # Get the layer purpose of the layer. layer_purpose = layer.keywords['layer_purpose'] layer_subcategory = layer.keywords.get(layer_purpose) compulsory_field = get_compulsory_fields(layer_purpose, layer_subcategory) inasafe_fields = layer.keywords['inasafe_fields'] # Compulsory fields can be list of field name or single field name. # We need to iterate through all of them field_names = inasafe_fields.get(compulsory_field['key']) if not isinstance(field_names, list): field_names = [field_names] for field_name in field_names: if not field_name: message = 'Keyword %s is missing from %s' % ( compulsory_field['key'], layer_purpose) raise InvalidKeywordsForProcessingAlgorithm(message) index = layer.fields().lookupField(field_name) request = QgsFeatureRequest() request.setSubsetOfAttributes([field_name], layer.fields()) layer.startEditing() i = 0 for feature in layer.getFeatures(request): feat_attr = feature.attributes()[index] if (feat_attr is None or (hasattr(feat_attr, 'isNull') and feat_attr.isNull())): if layer_purpose == 'hazard': # Remove the feature if the hazard is null. layer.deleteFeature(feature.id()) i += 1 elif layer_purpose == 'aggregation': # Put the ID if the value is null. layer.changeAttributeValue( feature.id(), index, str(feature.id())) elif layer_purpose == 'exposure': # Put an empty value, the value mapping will take care of # it in the 'other' group. layer.changeAttributeValue( feature.id(), index, '') # Check if there is en empty geometry. geometry = feature.geometry() if not geometry: layer.deleteFeature(feature.id()) i += 1 continue # Check if the geometry is empty. if geometry.isEmpty(): layer.deleteFeature(feature.id()) i += 1 continue # Check if the geometry is valid. if not geometry.isGeosValid(): # polygonize can produce some invalid geometries # For instance a polygon like this, sharing a same point : # _______ # | ___|__ # | |__| | # |________| # layer.deleteFeature(feature.id()) # i += 1 pass # TODO We need to add more tests # like checking if the value is in the value_mapping. layer.commitChanges() if i: LOGGER.critical( 'Features which have been removed from %s : %s' % (layer.keywords['layer_purpose'], i)) else: LOGGER.info( 'No feature has been removed from %s during the vector layer ' 'preparation' % layer.keywords['layer_purpose'])
def function[_remove_features, parameter[layer]]: constant[Remove features which do not have information for InaSAFE or an invalid geometry. :param layer: The vector layer. :type layer: QgsVectorLayer ] variable[layer_purpose] assign[=] call[name[layer].keywords][constant[layer_purpose]] variable[layer_subcategory] assign[=] call[name[layer].keywords.get, parameter[name[layer_purpose]]] variable[compulsory_field] assign[=] call[name[get_compulsory_fields], parameter[name[layer_purpose], name[layer_subcategory]]] variable[inasafe_fields] assign[=] call[name[layer].keywords][constant[inasafe_fields]] variable[field_names] assign[=] call[name[inasafe_fields].get, parameter[call[name[compulsory_field]][constant[key]]]] if <ast.UnaryOp object at 0x7da1b0ca07c0> begin[:] variable[field_names] assign[=] list[[<ast.Name object at 0x7da1b0ca15a0>]] for taget[name[field_name]] in starred[name[field_names]] begin[:] if <ast.UnaryOp object at 0x7da1b0ca06a0> begin[:] variable[message] assign[=] binary_operation[constant[Keyword %s is missing from %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Subscript object at 0x7da1b0ca2d10>, <ast.Name object at 0x7da1b0ca3c70>]]] <ast.Raise object at 0x7da1b0ca0940> variable[index] assign[=] call[call[name[layer].fields, parameter[]].lookupField, parameter[name[field_name]]] variable[request] assign[=] call[name[QgsFeatureRequest], parameter[]] call[name[request].setSubsetOfAttributes, parameter[list[[<ast.Name object at 0x7da1b0ca03d0>]], call[name[layer].fields, parameter[]]]] call[name[layer].startEditing, parameter[]] variable[i] assign[=] constant[0] for taget[name[feature]] in starred[call[name[layer].getFeatures, parameter[name[request]]]] begin[:] variable[feat_attr] assign[=] call[call[name[feature].attributes, parameter[]]][name[index]] if <ast.BoolOp object at 0x7da1b0ca1cc0> begin[:] if compare[name[layer_purpose] equal[==] constant[hazard]] begin[:] call[name[layer].deleteFeature, parameter[call[name[feature].id, parameter[]]]] <ast.AugAssign object at 0x7da1b0ca0ee0> variable[geometry] assign[=] call[name[feature].geometry, parameter[]] if <ast.UnaryOp object at 0x7da1b0ca1690> begin[:] call[name[layer].deleteFeature, parameter[call[name[feature].id, parameter[]]]] <ast.AugAssign object at 0x7da1b0ca1d80> continue if call[name[geometry].isEmpty, parameter[]] begin[:] call[name[layer].deleteFeature, parameter[call[name[feature].id, parameter[]]]] <ast.AugAssign object at 0x7da1b0ca26b0> continue if <ast.UnaryOp object at 0x7da1b0ca06d0> begin[:] pass call[name[layer].commitChanges, parameter[]] if name[i] begin[:] call[name[LOGGER].critical, parameter[binary_operation[constant[Features which have been removed from %s : %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Subscript object at 0x7da1b0ca30d0>, <ast.Name object at 0x7da1b0ca37c0>]]]]]
keyword[def] identifier[_remove_features] ( identifier[layer] ): literal[string] identifier[layer_purpose] = identifier[layer] . identifier[keywords] [ literal[string] ] identifier[layer_subcategory] = identifier[layer] . identifier[keywords] . identifier[get] ( identifier[layer_purpose] ) identifier[compulsory_field] = identifier[get_compulsory_fields] ( identifier[layer_purpose] , identifier[layer_subcategory] ) identifier[inasafe_fields] = identifier[layer] . identifier[keywords] [ literal[string] ] identifier[field_names] = identifier[inasafe_fields] . identifier[get] ( identifier[compulsory_field] [ literal[string] ]) keyword[if] keyword[not] identifier[isinstance] ( identifier[field_names] , identifier[list] ): identifier[field_names] =[ identifier[field_names] ] keyword[for] identifier[field_name] keyword[in] identifier[field_names] : keyword[if] keyword[not] identifier[field_name] : identifier[message] = literal[string] %( identifier[compulsory_field] [ literal[string] ], identifier[layer_purpose] ) keyword[raise] identifier[InvalidKeywordsForProcessingAlgorithm] ( identifier[message] ) identifier[index] = identifier[layer] . identifier[fields] (). identifier[lookupField] ( identifier[field_name] ) identifier[request] = identifier[QgsFeatureRequest] () identifier[request] . identifier[setSubsetOfAttributes] ([ identifier[field_name] ], identifier[layer] . identifier[fields] ()) identifier[layer] . identifier[startEditing] () identifier[i] = literal[int] keyword[for] identifier[feature] keyword[in] identifier[layer] . identifier[getFeatures] ( identifier[request] ): identifier[feat_attr] = identifier[feature] . identifier[attributes] ()[ identifier[index] ] keyword[if] ( identifier[feat_attr] keyword[is] keyword[None] keyword[or] ( identifier[hasattr] ( identifier[feat_attr] , literal[string] ) keyword[and] identifier[feat_attr] . identifier[isNull] ())): keyword[if] identifier[layer_purpose] == literal[string] : identifier[layer] . identifier[deleteFeature] ( identifier[feature] . identifier[id] ()) identifier[i] += literal[int] keyword[elif] identifier[layer_purpose] == literal[string] : identifier[layer] . identifier[changeAttributeValue] ( identifier[feature] . identifier[id] (), identifier[index] , identifier[str] ( identifier[feature] . identifier[id] ())) keyword[elif] identifier[layer_purpose] == literal[string] : identifier[layer] . identifier[changeAttributeValue] ( identifier[feature] . identifier[id] (), identifier[index] , literal[string] ) identifier[geometry] = identifier[feature] . identifier[geometry] () keyword[if] keyword[not] identifier[geometry] : identifier[layer] . identifier[deleteFeature] ( identifier[feature] . identifier[id] ()) identifier[i] += literal[int] keyword[continue] keyword[if] identifier[geometry] . identifier[isEmpty] (): identifier[layer] . identifier[deleteFeature] ( identifier[feature] . identifier[id] ()) identifier[i] += literal[int] keyword[continue] keyword[if] keyword[not] identifier[geometry] . identifier[isGeosValid] (): keyword[pass] identifier[layer] . identifier[commitChanges] () keyword[if] identifier[i] : identifier[LOGGER] . identifier[critical] ( literal[string] %( identifier[layer] . identifier[keywords] [ literal[string] ], identifier[i] )) keyword[else] : identifier[LOGGER] . identifier[info] ( literal[string] literal[string] % identifier[layer] . identifier[keywords] [ literal[string] ])
def _remove_features(layer): """Remove features which do not have information for InaSAFE or an invalid geometry. :param layer: The vector layer. :type layer: QgsVectorLayer """ # Get the layer purpose of the layer. layer_purpose = layer.keywords['layer_purpose'] layer_subcategory = layer.keywords.get(layer_purpose) compulsory_field = get_compulsory_fields(layer_purpose, layer_subcategory) inasafe_fields = layer.keywords['inasafe_fields'] # Compulsory fields can be list of field name or single field name. # We need to iterate through all of them field_names = inasafe_fields.get(compulsory_field['key']) if not isinstance(field_names, list): field_names = [field_names] # depends on [control=['if'], data=[]] for field_name in field_names: if not field_name: message = 'Keyword %s is missing from %s' % (compulsory_field['key'], layer_purpose) raise InvalidKeywordsForProcessingAlgorithm(message) # depends on [control=['if'], data=[]] index = layer.fields().lookupField(field_name) request = QgsFeatureRequest() request.setSubsetOfAttributes([field_name], layer.fields()) layer.startEditing() i = 0 for feature in layer.getFeatures(request): feat_attr = feature.attributes()[index] if feat_attr is None or (hasattr(feat_attr, 'isNull') and feat_attr.isNull()): if layer_purpose == 'hazard': # Remove the feature if the hazard is null. layer.deleteFeature(feature.id()) i += 1 # depends on [control=['if'], data=[]] elif layer_purpose == 'aggregation': # Put the ID if the value is null. layer.changeAttributeValue(feature.id(), index, str(feature.id())) # depends on [control=['if'], data=[]] elif layer_purpose == 'exposure': # Put an empty value, the value mapping will take care of # it in the 'other' group. layer.changeAttributeValue(feature.id(), index, '') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # Check if there is en empty geometry. geometry = feature.geometry() if not geometry: layer.deleteFeature(feature.id()) i += 1 continue # depends on [control=['if'], data=[]] # Check if the geometry is empty. if geometry.isEmpty(): layer.deleteFeature(feature.id()) i += 1 continue # depends on [control=['if'], data=[]] # Check if the geometry is valid. if not geometry.isGeosValid(): # polygonize can produce some invalid geometries # For instance a polygon like this, sharing a same point : # _______ # | ___|__ # | |__| | # |________| # layer.deleteFeature(feature.id()) # i += 1 pass # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['feature']] # TODO We need to add more tests # like checking if the value is in the value_mapping. layer.commitChanges() if i: LOGGER.critical('Features which have been removed from %s : %s' % (layer.keywords['layer_purpose'], i)) # depends on [control=['if'], data=[]] else: LOGGER.info('No feature has been removed from %s during the vector layer preparation' % layer.keywords['layer_purpose']) # depends on [control=['for'], data=['field_name']]
def request(self, endpoint, method="GET", data="", raw=False, params=None, retries=None, client=None, headers=None, timeout=None, **kwargs): """ Make request to endpoint with OAuth. Returns dictionary with response data. :param endpoint: endpoint path, or a fully qualified URL if raw=True. :param method: GET (default), POST or DELETE. :param data: data to send in the request body. :param raw: use endpoint as entered without trying to modify it. :param params: dictionary of parameters to send in the query string. :param retries: number of times to retry if a request fails. :param client: OAuth client data, if False do request without OAuth. :param headers: dictionary of HTTP headers. :param timeout: the timeout for a request, in seconds. Example: >>> pump.request('https://e14n.com/api/user/evan/profile', raw=True) {u'displayName': u'Evan Prodromou', u'favorites': {u'totalItems': 7227, u'url': u'https://e14n.com/api/user/evan/favorites'}, u'id': u'acct:evan@e14n.com', u'image': {u'height': 96, u'url': u'https://e14n.com/uploads/evan/2014/9/24/knyf1g_thumb.jpg', u'width': 96}, u'liked': False, u'location': {u'displayName': u'Montreal, Quebec, Canada', u'objectType': u'place'}, u'objectType': u'person', u'preferredUsername': u'evan', u'published': u'2013-02-20T15:34:52Z', u'summary': u'I wanna make it with you. http://payb.tc/evanp', u'updated': u'2014-09-24T02:38:32Z', u'url': u'https://e14n.com/evan'} """ retries = self.retries if retries is None else retries timeout = self.timeout if timeout is None else timeout # check client has been setup if client is None: client = self.setup_oauth_client(endpoint) c = client.client fnc = OAuth1Session(c.client_key, client_secret=c.client_secret, resource_owner_key=c.resource_owner_key, resource_owner_secret=c.resource_owner_secret ) elif client is False: fnc = requests params = {} if params is None else params if data and isinstance(data, dict): data = json.dumps(data) if not raw: url = self._build_url(endpoint) else: url = endpoint headers = headers or {"Content-Type": "application/json"} request = { "headers": headers, "params": params, "timeout": timeout, } request.update(kwargs) if method == "POST": fnc = fnc.post request.update({"data": data}) elif method == "PUT": fnc = fnc.put request.update({"data": data}) elif method == "GET": fnc = fnc.get elif method == "DELETE": fnc = fnc.delete for attempt in range(1 + retries): response = self._requester( fnc=fnc, endpoint=endpoint, raw=raw, **request ) if response.status_code == 200: # huray! return response.json() if response.status_code == 400: # can't do much try: try: data = response.json() error = data["error"] except ValueError: error = response.content if not error: raise IndexError # yesss i know. except IndexError: error = "400 - Bad request." raise PyPumpException(error) if response.ok: return response error = "Request Failed to {url} (response: {data} | status: {status})" error = error.format( url=url, data=response.content, status=response.status_code ) raise PyPumpException(error)
def function[request, parameter[self, endpoint, method, data, raw, params, retries, client, headers, timeout]]: constant[ Make request to endpoint with OAuth. Returns dictionary with response data. :param endpoint: endpoint path, or a fully qualified URL if raw=True. :param method: GET (default), POST or DELETE. :param data: data to send in the request body. :param raw: use endpoint as entered without trying to modify it. :param params: dictionary of parameters to send in the query string. :param retries: number of times to retry if a request fails. :param client: OAuth client data, if False do request without OAuth. :param headers: dictionary of HTTP headers. :param timeout: the timeout for a request, in seconds. Example: >>> pump.request('https://e14n.com/api/user/evan/profile', raw=True) {u'displayName': u'Evan Prodromou', u'favorites': {u'totalItems': 7227, u'url': u'https://e14n.com/api/user/evan/favorites'}, u'id': u'acct:evan@e14n.com', u'image': {u'height': 96, u'url': u'https://e14n.com/uploads/evan/2014/9/24/knyf1g_thumb.jpg', u'width': 96}, u'liked': False, u'location': {u'displayName': u'Montreal, Quebec, Canada', u'objectType': u'place'}, u'objectType': u'person', u'preferredUsername': u'evan', u'published': u'2013-02-20T15:34:52Z', u'summary': u'I wanna make it with you. http://payb.tc/evanp', u'updated': u'2014-09-24T02:38:32Z', u'url': u'https://e14n.com/evan'} ] variable[retries] assign[=] <ast.IfExp object at 0x7da1b2648b20> variable[timeout] assign[=] <ast.IfExp object at 0x7da1b26495a0> if compare[name[client] is constant[None]] begin[:] variable[client] assign[=] call[name[self].setup_oauth_client, parameter[name[endpoint]]] variable[c] assign[=] name[client].client variable[fnc] assign[=] call[name[OAuth1Session], parameter[name[c].client_key]] variable[params] assign[=] <ast.IfExp object at 0x7da1b28df5b0> if <ast.BoolOp object at 0x7da1b28dc370> begin[:] variable[data] assign[=] call[name[json].dumps, parameter[name[data]]] if <ast.UnaryOp object at 0x7da1b26481c0> begin[:] variable[url] assign[=] call[name[self]._build_url, parameter[name[endpoint]]] variable[headers] assign[=] <ast.BoolOp object at 0x7da1b264a680> variable[request] assign[=] dictionary[[<ast.Constant object at 0x7da1b264a740>, <ast.Constant object at 0x7da1b26493c0>, <ast.Constant object at 0x7da1b26498d0>], [<ast.Name object at 0x7da1b2649060>, <ast.Name object at 0x7da1b2648190>, <ast.Name object at 0x7da1b26489a0>]] call[name[request].update, parameter[name[kwargs]]] if compare[name[method] equal[==] constant[POST]] begin[:] variable[fnc] assign[=] name[fnc].post call[name[request].update, parameter[dictionary[[<ast.Constant object at 0x7da1b264ab30>], [<ast.Name object at 0x7da1b2649510>]]]] for taget[name[attempt]] in starred[call[name[range], parameter[binary_operation[constant[1] + name[retries]]]]] begin[:] variable[response] assign[=] call[name[self]._requester, parameter[]] if compare[name[response].status_code equal[==] constant[200]] begin[:] return[call[name[response].json, parameter[]]] if compare[name[response].status_code equal[==] constant[400]] begin[:] <ast.Try object at 0x7da1b26297e0> <ast.Raise object at 0x7da1b2629090> if name[response].ok begin[:] return[name[response]] variable[error] assign[=] constant[Request Failed to {url} (response: {data} | status: {status})] variable[error] assign[=] call[name[error].format, parameter[]] <ast.Raise object at 0x7da1b262b580>
keyword[def] identifier[request] ( identifier[self] , identifier[endpoint] , identifier[method] = literal[string] , identifier[data] = literal[string] , identifier[raw] = keyword[False] , identifier[params] = keyword[None] , identifier[retries] = keyword[None] , identifier[client] = keyword[None] , identifier[headers] = keyword[None] , identifier[timeout] = keyword[None] ,** identifier[kwargs] ): literal[string] identifier[retries] = identifier[self] . identifier[retries] keyword[if] identifier[retries] keyword[is] keyword[None] keyword[else] identifier[retries] identifier[timeout] = identifier[self] . identifier[timeout] keyword[if] identifier[timeout] keyword[is] keyword[None] keyword[else] identifier[timeout] keyword[if] identifier[client] keyword[is] keyword[None] : identifier[client] = identifier[self] . identifier[setup_oauth_client] ( identifier[endpoint] ) identifier[c] = identifier[client] . identifier[client] identifier[fnc] = identifier[OAuth1Session] ( identifier[c] . identifier[client_key] , identifier[client_secret] = identifier[c] . identifier[client_secret] , identifier[resource_owner_key] = identifier[c] . identifier[resource_owner_key] , identifier[resource_owner_secret] = identifier[c] . identifier[resource_owner_secret] ) keyword[elif] identifier[client] keyword[is] keyword[False] : identifier[fnc] = identifier[requests] identifier[params] ={} keyword[if] identifier[params] keyword[is] keyword[None] keyword[else] identifier[params] keyword[if] identifier[data] keyword[and] identifier[isinstance] ( identifier[data] , identifier[dict] ): identifier[data] = identifier[json] . identifier[dumps] ( identifier[data] ) keyword[if] keyword[not] identifier[raw] : identifier[url] = identifier[self] . identifier[_build_url] ( identifier[endpoint] ) keyword[else] : identifier[url] = identifier[endpoint] identifier[headers] = identifier[headers] keyword[or] { literal[string] : literal[string] } identifier[request] ={ literal[string] : identifier[headers] , literal[string] : identifier[params] , literal[string] : identifier[timeout] , } identifier[request] . identifier[update] ( identifier[kwargs] ) keyword[if] identifier[method] == literal[string] : identifier[fnc] = identifier[fnc] . identifier[post] identifier[request] . identifier[update] ({ literal[string] : identifier[data] }) keyword[elif] identifier[method] == literal[string] : identifier[fnc] = identifier[fnc] . identifier[put] identifier[request] . identifier[update] ({ literal[string] : identifier[data] }) keyword[elif] identifier[method] == literal[string] : identifier[fnc] = identifier[fnc] . identifier[get] keyword[elif] identifier[method] == literal[string] : identifier[fnc] = identifier[fnc] . identifier[delete] keyword[for] identifier[attempt] keyword[in] identifier[range] ( literal[int] + identifier[retries] ): identifier[response] = identifier[self] . identifier[_requester] ( identifier[fnc] = identifier[fnc] , identifier[endpoint] = identifier[endpoint] , identifier[raw] = identifier[raw] , ** identifier[request] ) keyword[if] identifier[response] . identifier[status_code] == literal[int] : keyword[return] identifier[response] . identifier[json] () keyword[if] identifier[response] . identifier[status_code] == literal[int] : keyword[try] : keyword[try] : identifier[data] = identifier[response] . identifier[json] () identifier[error] = identifier[data] [ literal[string] ] keyword[except] identifier[ValueError] : identifier[error] = identifier[response] . identifier[content] keyword[if] keyword[not] identifier[error] : keyword[raise] identifier[IndexError] keyword[except] identifier[IndexError] : identifier[error] = literal[string] keyword[raise] identifier[PyPumpException] ( identifier[error] ) keyword[if] identifier[response] . identifier[ok] : keyword[return] identifier[response] identifier[error] = literal[string] identifier[error] = identifier[error] . identifier[format] ( identifier[url] = identifier[url] , identifier[data] = identifier[response] . identifier[content] , identifier[status] = identifier[response] . identifier[status_code] ) keyword[raise] identifier[PyPumpException] ( identifier[error] )
def request(self, endpoint, method='GET', data='', raw=False, params=None, retries=None, client=None, headers=None, timeout=None, **kwargs): """ Make request to endpoint with OAuth. Returns dictionary with response data. :param endpoint: endpoint path, or a fully qualified URL if raw=True. :param method: GET (default), POST or DELETE. :param data: data to send in the request body. :param raw: use endpoint as entered without trying to modify it. :param params: dictionary of parameters to send in the query string. :param retries: number of times to retry if a request fails. :param client: OAuth client data, if False do request without OAuth. :param headers: dictionary of HTTP headers. :param timeout: the timeout for a request, in seconds. Example: >>> pump.request('https://e14n.com/api/user/evan/profile', raw=True) {u'displayName': u'Evan Prodromou', u'favorites': {u'totalItems': 7227, u'url': u'https://e14n.com/api/user/evan/favorites'}, u'id': u'acct:evan@e14n.com', u'image': {u'height': 96, u'url': u'https://e14n.com/uploads/evan/2014/9/24/knyf1g_thumb.jpg', u'width': 96}, u'liked': False, u'location': {u'displayName': u'Montreal, Quebec, Canada', u'objectType': u'place'}, u'objectType': u'person', u'preferredUsername': u'evan', u'published': u'2013-02-20T15:34:52Z', u'summary': u'I wanna make it with you. http://payb.tc/evanp', u'updated': u'2014-09-24T02:38:32Z', u'url': u'https://e14n.com/evan'} """ retries = self.retries if retries is None else retries timeout = self.timeout if timeout is None else timeout # check client has been setup if client is None: client = self.setup_oauth_client(endpoint) c = client.client fnc = OAuth1Session(c.client_key, client_secret=c.client_secret, resource_owner_key=c.resource_owner_key, resource_owner_secret=c.resource_owner_secret) # depends on [control=['if'], data=['client']] elif client is False: fnc = requests # depends on [control=['if'], data=[]] params = {} if params is None else params if data and isinstance(data, dict): data = json.dumps(data) # depends on [control=['if'], data=[]] if not raw: url = self._build_url(endpoint) # depends on [control=['if'], data=[]] else: url = endpoint headers = headers or {'Content-Type': 'application/json'} request = {'headers': headers, 'params': params, 'timeout': timeout} request.update(kwargs) if method == 'POST': fnc = fnc.post request.update({'data': data}) # depends on [control=['if'], data=[]] elif method == 'PUT': fnc = fnc.put request.update({'data': data}) # depends on [control=['if'], data=[]] elif method == 'GET': fnc = fnc.get # depends on [control=['if'], data=[]] elif method == 'DELETE': fnc = fnc.delete # depends on [control=['if'], data=[]] for attempt in range(1 + retries): response = self._requester(fnc=fnc, endpoint=endpoint, raw=raw, **request) if response.status_code == 200: # huray! return response.json() # depends on [control=['if'], data=[]] if response.status_code == 400: # can't do much try: try: data = response.json() error = data['error'] # depends on [control=['try'], data=[]] except ValueError: error = response.content # depends on [control=['except'], data=[]] if not error: raise IndexError # yesss i know. # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except IndexError: error = '400 - Bad request.' # depends on [control=['except'], data=[]] raise PyPumpException(error) # depends on [control=['if'], data=[]] if response.ok: return response # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] error = 'Request Failed to {url} (response: {data} | status: {status})' error = error.format(url=url, data=response.content, status=response.status_code) raise PyPumpException(error)
def update_user_pool(UserPoolId=None, Policies=None, LambdaConfig=None, AutoVerifiedAttributes=None, SmsVerificationMessage=None, EmailVerificationMessage=None, EmailVerificationSubject=None, SmsAuthenticationMessage=None, MfaConfiguration=None, DeviceConfiguration=None, EmailConfiguration=None, SmsConfiguration=None, UserPoolTags=None, AdminCreateUserConfig=None): """ Updates the specified user pool with the specified attributes. See also: AWS API Documentation :example: response = client.update_user_pool( UserPoolId='string', Policies={ 'PasswordPolicy': { 'MinimumLength': 123, 'RequireUppercase': True|False, 'RequireLowercase': True|False, 'RequireNumbers': True|False, 'RequireSymbols': True|False } }, LambdaConfig={ 'PreSignUp': 'string', 'CustomMessage': 'string', 'PostConfirmation': 'string', 'PreAuthentication': 'string', 'PostAuthentication': 'string', 'DefineAuthChallenge': 'string', 'CreateAuthChallenge': 'string', 'VerifyAuthChallengeResponse': 'string' }, AutoVerifiedAttributes=[ 'phone_number'|'email', ], SmsVerificationMessage='string', EmailVerificationMessage='string', EmailVerificationSubject='string', SmsAuthenticationMessage='string', MfaConfiguration='OFF'|'ON'|'OPTIONAL', DeviceConfiguration={ 'ChallengeRequiredOnNewDevice': True|False, 'DeviceOnlyRememberedOnUserPrompt': True|False }, EmailConfiguration={ 'SourceArn': 'string', 'ReplyToEmailAddress': 'string' }, SmsConfiguration={ 'SnsCallerArn': 'string', 'ExternalId': 'string' }, UserPoolTags={ 'string': 'string' }, AdminCreateUserConfig={ 'AllowAdminCreateUserOnly': True|False, 'UnusedAccountValidityDays': 123, 'InviteMessageTemplate': { 'SMSMessage': 'string', 'EmailMessage': 'string', 'EmailSubject': 'string' } } ) :type UserPoolId: string :param UserPoolId: [REQUIRED] The user pool ID for the user pool you want to update. :type Policies: dict :param Policies: A container with the policies you wish to update in a user pool. PasswordPolicy (dict) --A container for information about the user pool password policy. MinimumLength (integer) --The minimum length of the password policy that you have set. Cannot be less than 6. RequireUppercase (boolean) --In the password policy that you have set, refers to whether you have required users to use at least one uppercase letter in their password. RequireLowercase (boolean) --In the password policy that you have set, refers to whether you have required users to use at least one lowercase letter in their password. RequireNumbers (boolean) --In the password policy that you have set, refers to whether you have required users to use at least one number in their password. RequireSymbols (boolean) --In the password policy that you have set, refers to whether you have required users to use at least one symbol in their password. :type LambdaConfig: dict :param LambdaConfig: The AWS Lambda configuration information from the request to update the user pool. PreSignUp (string) --A pre-registration AWS Lambda trigger. CustomMessage (string) --A custom Message AWS Lambda trigger. PostConfirmation (string) --A post-confirmation AWS Lambda trigger. PreAuthentication (string) --A pre-authentication AWS Lambda trigger. PostAuthentication (string) --A post-authentication AWS Lambda trigger. DefineAuthChallenge (string) --Defines the authentication challenge. CreateAuthChallenge (string) --Creates an authentication challenge. VerifyAuthChallengeResponse (string) --Verifies the authentication challenge response. :type AutoVerifiedAttributes: list :param AutoVerifiedAttributes: The attributes that are automatically verified when the Amazon Cognito service makes a request to update user pools. (string) -- :type SmsVerificationMessage: string :param SmsVerificationMessage: A container with information about the SMS verification message. :type EmailVerificationMessage: string :param EmailVerificationMessage: The contents of the email verification message. :type EmailVerificationSubject: string :param EmailVerificationSubject: The subject of the email verification message. :type SmsAuthenticationMessage: string :param SmsAuthenticationMessage: The contents of the SMS authentication message. :type MfaConfiguration: string :param MfaConfiguration: Can be one of the following values: OFF - MFA tokens are not required and cannot be specified during user registration. ON - MFA tokens are required for all user registrations. You can only specify required when you are initially creating a user pool. OPTIONAL - Users have the option when registering to create an MFA token. :type DeviceConfiguration: dict :param DeviceConfiguration: Device configuration. ChallengeRequiredOnNewDevice (boolean) --Indicates whether a challenge is required on a new device. Only applicable to a new device. DeviceOnlyRememberedOnUserPrompt (boolean) --If true, a device is only remembered on user prompt. :type EmailConfiguration: dict :param EmailConfiguration: Email configuration. SourceArn (string) --The Amazon Resource Name (ARN) of the email source. ReplyToEmailAddress (string) --The REPLY-TO email address. :type SmsConfiguration: dict :param SmsConfiguration: SMS configuration. SnsCallerArn (string) -- [REQUIRED]The Amazon Resource Name (ARN) of the Amazon Simple Notification Service (SNS) caller. ExternalId (string) --The external ID. :type UserPoolTags: dict :param UserPoolTags: The cost allocation tags for the user pool. For more information, see Adding Cost Allocation Tags to Your User Pool (string) -- (string) -- :type AdminCreateUserConfig: dict :param AdminCreateUserConfig: The configuration for AdminCreateUser requests. AllowAdminCreateUserOnly (boolean) --Set to True if only the administrator is allowed to create user profiles. Set to False if users can sign themselves up via an app. UnusedAccountValidityDays (integer) --The user account expiration limit, in days, after which the account is no longer usable. To reset the account after that time limit, you must call AdminCreateUser again, specifying 'RESEND' for the MessageAction parameter. The default value for this parameter is 7. InviteMessageTemplate (dict) --The message template to be used for the welcome message to new users. SMSMessage (string) --The message template for SMS messages. EmailMessage (string) --The message template for email messages. EmailSubject (string) --The subject line for email messages. :rtype: dict :return: {} """ pass
def function[update_user_pool, parameter[UserPoolId, Policies, LambdaConfig, AutoVerifiedAttributes, SmsVerificationMessage, EmailVerificationMessage, EmailVerificationSubject, SmsAuthenticationMessage, MfaConfiguration, DeviceConfiguration, EmailConfiguration, SmsConfiguration, UserPoolTags, AdminCreateUserConfig]]: constant[ Updates the specified user pool with the specified attributes. See also: AWS API Documentation :example: response = client.update_user_pool( UserPoolId='string', Policies={ 'PasswordPolicy': { 'MinimumLength': 123, 'RequireUppercase': True|False, 'RequireLowercase': True|False, 'RequireNumbers': True|False, 'RequireSymbols': True|False } }, LambdaConfig={ 'PreSignUp': 'string', 'CustomMessage': 'string', 'PostConfirmation': 'string', 'PreAuthentication': 'string', 'PostAuthentication': 'string', 'DefineAuthChallenge': 'string', 'CreateAuthChallenge': 'string', 'VerifyAuthChallengeResponse': 'string' }, AutoVerifiedAttributes=[ 'phone_number'|'email', ], SmsVerificationMessage='string', EmailVerificationMessage='string', EmailVerificationSubject='string', SmsAuthenticationMessage='string', MfaConfiguration='OFF'|'ON'|'OPTIONAL', DeviceConfiguration={ 'ChallengeRequiredOnNewDevice': True|False, 'DeviceOnlyRememberedOnUserPrompt': True|False }, EmailConfiguration={ 'SourceArn': 'string', 'ReplyToEmailAddress': 'string' }, SmsConfiguration={ 'SnsCallerArn': 'string', 'ExternalId': 'string' }, UserPoolTags={ 'string': 'string' }, AdminCreateUserConfig={ 'AllowAdminCreateUserOnly': True|False, 'UnusedAccountValidityDays': 123, 'InviteMessageTemplate': { 'SMSMessage': 'string', 'EmailMessage': 'string', 'EmailSubject': 'string' } } ) :type UserPoolId: string :param UserPoolId: [REQUIRED] The user pool ID for the user pool you want to update. :type Policies: dict :param Policies: A container with the policies you wish to update in a user pool. PasswordPolicy (dict) --A container for information about the user pool password policy. MinimumLength (integer) --The minimum length of the password policy that you have set. Cannot be less than 6. RequireUppercase (boolean) --In the password policy that you have set, refers to whether you have required users to use at least one uppercase letter in their password. RequireLowercase (boolean) --In the password policy that you have set, refers to whether you have required users to use at least one lowercase letter in their password. RequireNumbers (boolean) --In the password policy that you have set, refers to whether you have required users to use at least one number in their password. RequireSymbols (boolean) --In the password policy that you have set, refers to whether you have required users to use at least one symbol in their password. :type LambdaConfig: dict :param LambdaConfig: The AWS Lambda configuration information from the request to update the user pool. PreSignUp (string) --A pre-registration AWS Lambda trigger. CustomMessage (string) --A custom Message AWS Lambda trigger. PostConfirmation (string) --A post-confirmation AWS Lambda trigger. PreAuthentication (string) --A pre-authentication AWS Lambda trigger. PostAuthentication (string) --A post-authentication AWS Lambda trigger. DefineAuthChallenge (string) --Defines the authentication challenge. CreateAuthChallenge (string) --Creates an authentication challenge. VerifyAuthChallengeResponse (string) --Verifies the authentication challenge response. :type AutoVerifiedAttributes: list :param AutoVerifiedAttributes: The attributes that are automatically verified when the Amazon Cognito service makes a request to update user pools. (string) -- :type SmsVerificationMessage: string :param SmsVerificationMessage: A container with information about the SMS verification message. :type EmailVerificationMessage: string :param EmailVerificationMessage: The contents of the email verification message. :type EmailVerificationSubject: string :param EmailVerificationSubject: The subject of the email verification message. :type SmsAuthenticationMessage: string :param SmsAuthenticationMessage: The contents of the SMS authentication message. :type MfaConfiguration: string :param MfaConfiguration: Can be one of the following values: OFF - MFA tokens are not required and cannot be specified during user registration. ON - MFA tokens are required for all user registrations. You can only specify required when you are initially creating a user pool. OPTIONAL - Users have the option when registering to create an MFA token. :type DeviceConfiguration: dict :param DeviceConfiguration: Device configuration. ChallengeRequiredOnNewDevice (boolean) --Indicates whether a challenge is required on a new device. Only applicable to a new device. DeviceOnlyRememberedOnUserPrompt (boolean) --If true, a device is only remembered on user prompt. :type EmailConfiguration: dict :param EmailConfiguration: Email configuration. SourceArn (string) --The Amazon Resource Name (ARN) of the email source. ReplyToEmailAddress (string) --The REPLY-TO email address. :type SmsConfiguration: dict :param SmsConfiguration: SMS configuration. SnsCallerArn (string) -- [REQUIRED]The Amazon Resource Name (ARN) of the Amazon Simple Notification Service (SNS) caller. ExternalId (string) --The external ID. :type UserPoolTags: dict :param UserPoolTags: The cost allocation tags for the user pool. For more information, see Adding Cost Allocation Tags to Your User Pool (string) -- (string) -- :type AdminCreateUserConfig: dict :param AdminCreateUserConfig: The configuration for AdminCreateUser requests. AllowAdminCreateUserOnly (boolean) --Set to True if only the administrator is allowed to create user profiles. Set to False if users can sign themselves up via an app. UnusedAccountValidityDays (integer) --The user account expiration limit, in days, after which the account is no longer usable. To reset the account after that time limit, you must call AdminCreateUser again, specifying 'RESEND' for the MessageAction parameter. The default value for this parameter is 7. InviteMessageTemplate (dict) --The message template to be used for the welcome message to new users. SMSMessage (string) --The message template for SMS messages. EmailMessage (string) --The message template for email messages. EmailSubject (string) --The subject line for email messages. :rtype: dict :return: {} ] pass
keyword[def] identifier[update_user_pool] ( identifier[UserPoolId] = keyword[None] , identifier[Policies] = keyword[None] , identifier[LambdaConfig] = keyword[None] , identifier[AutoVerifiedAttributes] = keyword[None] , identifier[SmsVerificationMessage] = keyword[None] , identifier[EmailVerificationMessage] = keyword[None] , identifier[EmailVerificationSubject] = keyword[None] , identifier[SmsAuthenticationMessage] = keyword[None] , identifier[MfaConfiguration] = keyword[None] , identifier[DeviceConfiguration] = keyword[None] , identifier[EmailConfiguration] = keyword[None] , identifier[SmsConfiguration] = keyword[None] , identifier[UserPoolTags] = keyword[None] , identifier[AdminCreateUserConfig] = keyword[None] ): literal[string] keyword[pass]
def update_user_pool(UserPoolId=None, Policies=None, LambdaConfig=None, AutoVerifiedAttributes=None, SmsVerificationMessage=None, EmailVerificationMessage=None, EmailVerificationSubject=None, SmsAuthenticationMessage=None, MfaConfiguration=None, DeviceConfiguration=None, EmailConfiguration=None, SmsConfiguration=None, UserPoolTags=None, AdminCreateUserConfig=None): """ Updates the specified user pool with the specified attributes. See also: AWS API Documentation :example: response = client.update_user_pool( UserPoolId='string', Policies={ 'PasswordPolicy': { 'MinimumLength': 123, 'RequireUppercase': True|False, 'RequireLowercase': True|False, 'RequireNumbers': True|False, 'RequireSymbols': True|False } }, LambdaConfig={ 'PreSignUp': 'string', 'CustomMessage': 'string', 'PostConfirmation': 'string', 'PreAuthentication': 'string', 'PostAuthentication': 'string', 'DefineAuthChallenge': 'string', 'CreateAuthChallenge': 'string', 'VerifyAuthChallengeResponse': 'string' }, AutoVerifiedAttributes=[ 'phone_number'|'email', ], SmsVerificationMessage='string', EmailVerificationMessage='string', EmailVerificationSubject='string', SmsAuthenticationMessage='string', MfaConfiguration='OFF'|'ON'|'OPTIONAL', DeviceConfiguration={ 'ChallengeRequiredOnNewDevice': True|False, 'DeviceOnlyRememberedOnUserPrompt': True|False }, EmailConfiguration={ 'SourceArn': 'string', 'ReplyToEmailAddress': 'string' }, SmsConfiguration={ 'SnsCallerArn': 'string', 'ExternalId': 'string' }, UserPoolTags={ 'string': 'string' }, AdminCreateUserConfig={ 'AllowAdminCreateUserOnly': True|False, 'UnusedAccountValidityDays': 123, 'InviteMessageTemplate': { 'SMSMessage': 'string', 'EmailMessage': 'string', 'EmailSubject': 'string' } } ) :type UserPoolId: string :param UserPoolId: [REQUIRED] The user pool ID for the user pool you want to update. :type Policies: dict :param Policies: A container with the policies you wish to update in a user pool. PasswordPolicy (dict) --A container for information about the user pool password policy. MinimumLength (integer) --The minimum length of the password policy that you have set. Cannot be less than 6. RequireUppercase (boolean) --In the password policy that you have set, refers to whether you have required users to use at least one uppercase letter in their password. RequireLowercase (boolean) --In the password policy that you have set, refers to whether you have required users to use at least one lowercase letter in their password. RequireNumbers (boolean) --In the password policy that you have set, refers to whether you have required users to use at least one number in their password. RequireSymbols (boolean) --In the password policy that you have set, refers to whether you have required users to use at least one symbol in their password. :type LambdaConfig: dict :param LambdaConfig: The AWS Lambda configuration information from the request to update the user pool. PreSignUp (string) --A pre-registration AWS Lambda trigger. CustomMessage (string) --A custom Message AWS Lambda trigger. PostConfirmation (string) --A post-confirmation AWS Lambda trigger. PreAuthentication (string) --A pre-authentication AWS Lambda trigger. PostAuthentication (string) --A post-authentication AWS Lambda trigger. DefineAuthChallenge (string) --Defines the authentication challenge. CreateAuthChallenge (string) --Creates an authentication challenge. VerifyAuthChallengeResponse (string) --Verifies the authentication challenge response. :type AutoVerifiedAttributes: list :param AutoVerifiedAttributes: The attributes that are automatically verified when the Amazon Cognito service makes a request to update user pools. (string) -- :type SmsVerificationMessage: string :param SmsVerificationMessage: A container with information about the SMS verification message. :type EmailVerificationMessage: string :param EmailVerificationMessage: The contents of the email verification message. :type EmailVerificationSubject: string :param EmailVerificationSubject: The subject of the email verification message. :type SmsAuthenticationMessage: string :param SmsAuthenticationMessage: The contents of the SMS authentication message. :type MfaConfiguration: string :param MfaConfiguration: Can be one of the following values: OFF - MFA tokens are not required and cannot be specified during user registration. ON - MFA tokens are required for all user registrations. You can only specify required when you are initially creating a user pool. OPTIONAL - Users have the option when registering to create an MFA token. :type DeviceConfiguration: dict :param DeviceConfiguration: Device configuration. ChallengeRequiredOnNewDevice (boolean) --Indicates whether a challenge is required on a new device. Only applicable to a new device. DeviceOnlyRememberedOnUserPrompt (boolean) --If true, a device is only remembered on user prompt. :type EmailConfiguration: dict :param EmailConfiguration: Email configuration. SourceArn (string) --The Amazon Resource Name (ARN) of the email source. ReplyToEmailAddress (string) --The REPLY-TO email address. :type SmsConfiguration: dict :param SmsConfiguration: SMS configuration. SnsCallerArn (string) -- [REQUIRED]The Amazon Resource Name (ARN) of the Amazon Simple Notification Service (SNS) caller. ExternalId (string) --The external ID. :type UserPoolTags: dict :param UserPoolTags: The cost allocation tags for the user pool. For more information, see Adding Cost Allocation Tags to Your User Pool (string) -- (string) -- :type AdminCreateUserConfig: dict :param AdminCreateUserConfig: The configuration for AdminCreateUser requests. AllowAdminCreateUserOnly (boolean) --Set to True if only the administrator is allowed to create user profiles. Set to False if users can sign themselves up via an app. UnusedAccountValidityDays (integer) --The user account expiration limit, in days, after which the account is no longer usable. To reset the account after that time limit, you must call AdminCreateUser again, specifying 'RESEND' for the MessageAction parameter. The default value for this parameter is 7. InviteMessageTemplate (dict) --The message template to be used for the welcome message to new users. SMSMessage (string) --The message template for SMS messages. EmailMessage (string) --The message template for email messages. EmailSubject (string) --The subject line for email messages. :rtype: dict :return: {} """ pass
def expand_entry(entry, ignore_xs=0x0): """Turn all Xs which are not marked in `ignore_xs` into ``0``\ s and ``1``\ s. The following will expand any Xs in bits ``1..3``\ :: >>> entry = RoutingTableEntry(set(), 0b0100, 0xfffffff0 | 0b1100) >>> list(expand_entry(entry, 0xfffffff1)) == [ ... RoutingTableEntry(set(), 0b0100, 0xfffffff0 | 0b1110), # 010X ... RoutingTableEntry(set(), 0b0110, 0xfffffff0 | 0b1110), # 011X ... ] True Parameters ---------- entry : :py:class:`~rig.routing_table.RoutingTableEntry` or similar The entry to expand. ignore_xs : int Bit-mask of Xs which should not be expanded. Yields ------ :py:class:`~rig.routing_table.RoutingTableEntry` Routing table entries which represent the original entry but with all Xs not masked off by `ignore_xs` replaced with 1s and 0s. """ # Get all the Xs in the entry that are not ignored xs = (~entry.key & ~entry.mask) & ~ignore_xs # Find the most significant X for bit in (1 << i for i in range(31, -1, -1)): if bit & xs: # Yield all the entries with this bit set as 0 entry_0 = RoutingTableEntry(entry.route, entry.key, entry.mask | bit, entry.sources) for new_entry in expand_entry(entry_0, ignore_xs): yield new_entry # And yield all the entries with this bit set as 1 entry_1 = RoutingTableEntry(entry.route, entry.key | bit, entry.mask | bit, entry.sources) for new_entry in expand_entry(entry_1, ignore_xs): yield new_entry # Stop looking for Xs break else: # If there are no Xs then yield the entry we were given. yield entry
def function[expand_entry, parameter[entry, ignore_xs]]: constant[Turn all Xs which are not marked in `ignore_xs` into ``0``\ s and ``1``\ s. The following will expand any Xs in bits ``1..3``\ :: >>> entry = RoutingTableEntry(set(), 0b0100, 0xfffffff0 | 0b1100) >>> list(expand_entry(entry, 0xfffffff1)) == [ ... RoutingTableEntry(set(), 0b0100, 0xfffffff0 | 0b1110), # 010X ... RoutingTableEntry(set(), 0b0110, 0xfffffff0 | 0b1110), # 011X ... ] True Parameters ---------- entry : :py:class:`~rig.routing_table.RoutingTableEntry` or similar The entry to expand. ignore_xs : int Bit-mask of Xs which should not be expanded. Yields ------ :py:class:`~rig.routing_table.RoutingTableEntry` Routing table entries which represent the original entry but with all Xs not masked off by `ignore_xs` replaced with 1s and 0s. ] variable[xs] assign[=] binary_operation[binary_operation[<ast.UnaryOp object at 0x7da1b19ad1e0> <ast.BitAnd object at 0x7da2590d6b60> <ast.UnaryOp object at 0x7da1b19ad270>] <ast.BitAnd object at 0x7da2590d6b60> <ast.UnaryOp object at 0x7da1b19acd00>] for taget[name[bit]] in starred[<ast.GeneratorExp object at 0x7da1b19acb50>] begin[:] if binary_operation[name[bit] <ast.BitAnd object at 0x7da2590d6b60> name[xs]] begin[:] variable[entry_0] assign[=] call[name[RoutingTableEntry], parameter[name[entry].route, name[entry].key, binary_operation[name[entry].mask <ast.BitOr object at 0x7da2590d6aa0> name[bit]], name[entry].sources]] for taget[name[new_entry]] in starred[call[name[expand_entry], parameter[name[entry_0], name[ignore_xs]]]] begin[:] <ast.Yield object at 0x7da1b19ac790> variable[entry_1] assign[=] call[name[RoutingTableEntry], parameter[name[entry].route, binary_operation[name[entry].key <ast.BitOr object at 0x7da2590d6aa0> name[bit]], binary_operation[name[entry].mask <ast.BitOr object at 0x7da2590d6aa0> name[bit]], name[entry].sources]] for taget[name[new_entry]] in starred[call[name[expand_entry], parameter[name[entry_1], name[ignore_xs]]]] begin[:] <ast.Yield object at 0x7da1b19cf310> break
keyword[def] identifier[expand_entry] ( identifier[entry] , identifier[ignore_xs] = literal[int] ): literal[string] identifier[xs] =(~ identifier[entry] . identifier[key] &~ identifier[entry] . identifier[mask] )&~ identifier[ignore_xs] keyword[for] identifier[bit] keyword[in] ( literal[int] << identifier[i] keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] ,- literal[int] ,- literal[int] )): keyword[if] identifier[bit] & identifier[xs] : identifier[entry_0] = identifier[RoutingTableEntry] ( identifier[entry] . identifier[route] , identifier[entry] . identifier[key] , identifier[entry] . identifier[mask] | identifier[bit] , identifier[entry] . identifier[sources] ) keyword[for] identifier[new_entry] keyword[in] identifier[expand_entry] ( identifier[entry_0] , identifier[ignore_xs] ): keyword[yield] identifier[new_entry] identifier[entry_1] = identifier[RoutingTableEntry] ( identifier[entry] . identifier[route] , identifier[entry] . identifier[key] | identifier[bit] , identifier[entry] . identifier[mask] | identifier[bit] , identifier[entry] . identifier[sources] ) keyword[for] identifier[new_entry] keyword[in] identifier[expand_entry] ( identifier[entry_1] , identifier[ignore_xs] ): keyword[yield] identifier[new_entry] keyword[break] keyword[else] : keyword[yield] identifier[entry]
def expand_entry(entry, ignore_xs=0): """Turn all Xs which are not marked in `ignore_xs` into ``0``\\ s and ``1``\\ s. The following will expand any Xs in bits ``1..3``\\ :: >>> entry = RoutingTableEntry(set(), 0b0100, 0xfffffff0 | 0b1100) >>> list(expand_entry(entry, 0xfffffff1)) == [ ... RoutingTableEntry(set(), 0b0100, 0xfffffff0 | 0b1110), # 010X ... RoutingTableEntry(set(), 0b0110, 0xfffffff0 | 0b1110), # 011X ... ] True Parameters ---------- entry : :py:class:`~rig.routing_table.RoutingTableEntry` or similar The entry to expand. ignore_xs : int Bit-mask of Xs which should not be expanded. Yields ------ :py:class:`~rig.routing_table.RoutingTableEntry` Routing table entries which represent the original entry but with all Xs not masked off by `ignore_xs` replaced with 1s and 0s. """ # Get all the Xs in the entry that are not ignored xs = ~entry.key & ~entry.mask & ~ignore_xs # Find the most significant X for bit in (1 << i for i in range(31, -1, -1)): if bit & xs: # Yield all the entries with this bit set as 0 entry_0 = RoutingTableEntry(entry.route, entry.key, entry.mask | bit, entry.sources) for new_entry in expand_entry(entry_0, ignore_xs): yield new_entry # depends on [control=['for'], data=['new_entry']] # And yield all the entries with this bit set as 1 entry_1 = RoutingTableEntry(entry.route, entry.key | bit, entry.mask | bit, entry.sources) for new_entry in expand_entry(entry_1, ignore_xs): yield new_entry # depends on [control=['for'], data=['new_entry']] # Stop looking for Xs break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['bit']] else: # If there are no Xs then yield the entry we were given. yield entry
def user_organization_membership_make_default(self, id, membership_id, data, **kwargs): "https://developer.zendesk.com/rest_api/docs/core/organization_memberships#set-membership-as-default" api_path = "/api/v2/users/{id}/organization_memberships/{membership_id}/make_default.json" api_path = api_path.format(id=id, membership_id=membership_id) return self.call(api_path, method="PUT", data=data, **kwargs)
def function[user_organization_membership_make_default, parameter[self, id, membership_id, data]]: constant[https://developer.zendesk.com/rest_api/docs/core/organization_memberships#set-membership-as-default] variable[api_path] assign[=] constant[/api/v2/users/{id}/organization_memberships/{membership_id}/make_default.json] variable[api_path] assign[=] call[name[api_path].format, parameter[]] return[call[name[self].call, parameter[name[api_path]]]]
keyword[def] identifier[user_organization_membership_make_default] ( identifier[self] , identifier[id] , identifier[membership_id] , identifier[data] ,** identifier[kwargs] ): literal[string] identifier[api_path] = literal[string] identifier[api_path] = identifier[api_path] . identifier[format] ( identifier[id] = identifier[id] , identifier[membership_id] = identifier[membership_id] ) keyword[return] identifier[self] . identifier[call] ( identifier[api_path] , identifier[method] = literal[string] , identifier[data] = identifier[data] ,** identifier[kwargs] )
def user_organization_membership_make_default(self, id, membership_id, data, **kwargs): """https://developer.zendesk.com/rest_api/docs/core/organization_memberships#set-membership-as-default""" api_path = '/api/v2/users/{id}/organization_memberships/{membership_id}/make_default.json' api_path = api_path.format(id=id, membership_id=membership_id) return self.call(api_path, method='PUT', data=data, **kwargs)
def url_defaults(self, fn): """ Callback function for URL defaults for all view functions of the application. It's called with the endpoint and values and should update the values passed in place. """ self._defer(lambda app: app.url_defaults(fn)) return fn
def function[url_defaults, parameter[self, fn]]: constant[ Callback function for URL defaults for all view functions of the application. It's called with the endpoint and values and should update the values passed in place. ] call[name[self]._defer, parameter[<ast.Lambda object at 0x7da20c6c71f0>]] return[name[fn]]
keyword[def] identifier[url_defaults] ( identifier[self] , identifier[fn] ): literal[string] identifier[self] . identifier[_defer] ( keyword[lambda] identifier[app] : identifier[app] . identifier[url_defaults] ( identifier[fn] )) keyword[return] identifier[fn]
def url_defaults(self, fn): """ Callback function for URL defaults for all view functions of the application. It's called with the endpoint and values and should update the values passed in place. """ self._defer(lambda app: app.url_defaults(fn)) return fn
def _read_para_hip_transport_mode(self, code, cbit, clen, *, desc, length, version): """Read HIP HIP_TRANSPORT_MODE parameter. Structure of HIP HIP_TRANSPORT_MODE parameter [RFC 6261]: 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Type | Length | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Port | Mode ID #1 | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Mode ID #2 | Mode ID #3 | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Mode ID #n | Padding | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 hip_transport_mode.type Parameter Type 1 15 hip_transport_mode.critical Critical Bit 2 16 hip_transport_mode.length Length of Contents 4 32 hip_transport_mode.port Port 6 48 hip_transport_mode.id Mode ID ............ ? ? - Padding """ if clen % 2 != 0: raise ProtocolError(f'HIPv{version}: [Parano {code}] invalid format') _port = self._read_unpack(2) _mdid = list() for _ in range((clen - 2) // 2): _mdid.append(_TP_MODE_ID.get(self._read_unpack(2), 'Unassigned')) hip_transport_mode = dict( type=desc, critical=cbit, length=clen, port=_port, id=tuple(_mdid), ) _plen = length - clen if _plen: self._read_fileng(_plen) return hip_transport_mode
def function[_read_para_hip_transport_mode, parameter[self, code, cbit, clen]]: constant[Read HIP HIP_TRANSPORT_MODE parameter. Structure of HIP HIP_TRANSPORT_MODE parameter [RFC 6261]: 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Type | Length | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Port | Mode ID #1 | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Mode ID #2 | Mode ID #3 | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Mode ID #n | Padding | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 hip_transport_mode.type Parameter Type 1 15 hip_transport_mode.critical Critical Bit 2 16 hip_transport_mode.length Length of Contents 4 32 hip_transport_mode.port Port 6 48 hip_transport_mode.id Mode ID ............ ? ? - Padding ] if compare[binary_operation[name[clen] <ast.Mod object at 0x7da2590d6920> constant[2]] not_equal[!=] constant[0]] begin[:] <ast.Raise object at 0x7da18bc72bf0> variable[_port] assign[=] call[name[self]._read_unpack, parameter[constant[2]]] variable[_mdid] assign[=] call[name[list], parameter[]] for taget[name[_]] in starred[call[name[range], parameter[binary_operation[binary_operation[name[clen] - constant[2]] <ast.FloorDiv object at 0x7da2590d6bc0> constant[2]]]]] begin[:] call[name[_mdid].append, parameter[call[name[_TP_MODE_ID].get, parameter[call[name[self]._read_unpack, parameter[constant[2]]], constant[Unassigned]]]]] variable[hip_transport_mode] assign[=] call[name[dict], parameter[]] variable[_plen] assign[=] binary_operation[name[length] - name[clen]] if name[_plen] begin[:] call[name[self]._read_fileng, parameter[name[_plen]]] return[name[hip_transport_mode]]
keyword[def] identifier[_read_para_hip_transport_mode] ( identifier[self] , identifier[code] , identifier[cbit] , identifier[clen] ,*, identifier[desc] , identifier[length] , identifier[version] ): literal[string] keyword[if] identifier[clen] % literal[int] != literal[int] : keyword[raise] identifier[ProtocolError] ( literal[string] ) identifier[_port] = identifier[self] . identifier[_read_unpack] ( literal[int] ) identifier[_mdid] = identifier[list] () keyword[for] identifier[_] keyword[in] identifier[range] (( identifier[clen] - literal[int] )// literal[int] ): identifier[_mdid] . identifier[append] ( identifier[_TP_MODE_ID] . identifier[get] ( identifier[self] . identifier[_read_unpack] ( literal[int] ), literal[string] )) identifier[hip_transport_mode] = identifier[dict] ( identifier[type] = identifier[desc] , identifier[critical] = identifier[cbit] , identifier[length] = identifier[clen] , identifier[port] = identifier[_port] , identifier[id] = identifier[tuple] ( identifier[_mdid] ), ) identifier[_plen] = identifier[length] - identifier[clen] keyword[if] identifier[_plen] : identifier[self] . identifier[_read_fileng] ( identifier[_plen] ) keyword[return] identifier[hip_transport_mode]
def _read_para_hip_transport_mode(self, code, cbit, clen, *, desc, length, version): """Read HIP HIP_TRANSPORT_MODE parameter. Structure of HIP HIP_TRANSPORT_MODE parameter [RFC 6261]: 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Type | Length | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Port | Mode ID #1 | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Mode ID #2 | Mode ID #3 | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Mode ID #n | Padding | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 hip_transport_mode.type Parameter Type 1 15 hip_transport_mode.critical Critical Bit 2 16 hip_transport_mode.length Length of Contents 4 32 hip_transport_mode.port Port 6 48 hip_transport_mode.id Mode ID ............ ? ? - Padding """ if clen % 2 != 0: raise ProtocolError(f'HIPv{version}: [Parano {code}] invalid format') # depends on [control=['if'], data=[]] _port = self._read_unpack(2) _mdid = list() for _ in range((clen - 2) // 2): _mdid.append(_TP_MODE_ID.get(self._read_unpack(2), 'Unassigned')) # depends on [control=['for'], data=[]] hip_transport_mode = dict(type=desc, critical=cbit, length=clen, port=_port, id=tuple(_mdid)) _plen = length - clen if _plen: self._read_fileng(_plen) # depends on [control=['if'], data=[]] return hip_transport_mode
def ip_rtm_config_route_static_bfd_bfd_static_route_bfd_interval_attributes_interval(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") ip = ET.SubElement(config, "ip", xmlns="urn:brocade.com:mgmt:brocade-common-def") rtm_config = ET.SubElement(ip, "rtm-config", xmlns="urn:brocade.com:mgmt:brocade-rtm") route = ET.SubElement(rtm_config, "route") static = ET.SubElement(route, "static") bfd = ET.SubElement(static, "bfd") bfd_static_route = ET.SubElement(bfd, "bfd-static-route") bfd_static_route_dest_key = ET.SubElement(bfd_static_route, "bfd-static-route-dest") bfd_static_route_dest_key.text = kwargs.pop('bfd_static_route_dest') bfd_static_route_src_key = ET.SubElement(bfd_static_route, "bfd-static-route-src") bfd_static_route_src_key.text = kwargs.pop('bfd_static_route_src') bfd_interval_attributes = ET.SubElement(bfd_static_route, "bfd-interval-attributes") interval = ET.SubElement(bfd_interval_attributes, "interval") interval.text = kwargs.pop('interval') callback = kwargs.pop('callback', self._callback) return callback(config)
def function[ip_rtm_config_route_static_bfd_bfd_static_route_bfd_interval_attributes_interval, parameter[self]]: constant[Auto Generated Code ] variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]] variable[ip] assign[=] call[name[ET].SubElement, parameter[name[config], constant[ip]]] variable[rtm_config] assign[=] call[name[ET].SubElement, parameter[name[ip], constant[rtm-config]]] variable[route] assign[=] call[name[ET].SubElement, parameter[name[rtm_config], constant[route]]] variable[static] assign[=] call[name[ET].SubElement, parameter[name[route], constant[static]]] variable[bfd] assign[=] call[name[ET].SubElement, parameter[name[static], constant[bfd]]] variable[bfd_static_route] assign[=] call[name[ET].SubElement, parameter[name[bfd], constant[bfd-static-route]]] variable[bfd_static_route_dest_key] assign[=] call[name[ET].SubElement, parameter[name[bfd_static_route], constant[bfd-static-route-dest]]] name[bfd_static_route_dest_key].text assign[=] call[name[kwargs].pop, parameter[constant[bfd_static_route_dest]]] variable[bfd_static_route_src_key] assign[=] call[name[ET].SubElement, parameter[name[bfd_static_route], constant[bfd-static-route-src]]] name[bfd_static_route_src_key].text assign[=] call[name[kwargs].pop, parameter[constant[bfd_static_route_src]]] variable[bfd_interval_attributes] assign[=] call[name[ET].SubElement, parameter[name[bfd_static_route], constant[bfd-interval-attributes]]] variable[interval] assign[=] call[name[ET].SubElement, parameter[name[bfd_interval_attributes], constant[interval]]] name[interval].text assign[=] call[name[kwargs].pop, parameter[constant[interval]]] variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]] return[call[name[callback], parameter[name[config]]]]
keyword[def] identifier[ip_rtm_config_route_static_bfd_bfd_static_route_bfd_interval_attributes_interval] ( identifier[self] ,** identifier[kwargs] ): literal[string] identifier[config] = identifier[ET] . identifier[Element] ( literal[string] ) identifier[ip] = identifier[ET] . identifier[SubElement] ( identifier[config] , literal[string] , identifier[xmlns] = literal[string] ) identifier[rtm_config] = identifier[ET] . identifier[SubElement] ( identifier[ip] , literal[string] , identifier[xmlns] = literal[string] ) identifier[route] = identifier[ET] . identifier[SubElement] ( identifier[rtm_config] , literal[string] ) identifier[static] = identifier[ET] . identifier[SubElement] ( identifier[route] , literal[string] ) identifier[bfd] = identifier[ET] . identifier[SubElement] ( identifier[static] , literal[string] ) identifier[bfd_static_route] = identifier[ET] . identifier[SubElement] ( identifier[bfd] , literal[string] ) identifier[bfd_static_route_dest_key] = identifier[ET] . identifier[SubElement] ( identifier[bfd_static_route] , literal[string] ) identifier[bfd_static_route_dest_key] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] ) identifier[bfd_static_route_src_key] = identifier[ET] . identifier[SubElement] ( identifier[bfd_static_route] , literal[string] ) identifier[bfd_static_route_src_key] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] ) identifier[bfd_interval_attributes] = identifier[ET] . identifier[SubElement] ( identifier[bfd_static_route] , literal[string] ) identifier[interval] = identifier[ET] . identifier[SubElement] ( identifier[bfd_interval_attributes] , literal[string] ) identifier[interval] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] ) identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] ) keyword[return] identifier[callback] ( identifier[config] )
def ip_rtm_config_route_static_bfd_bfd_static_route_bfd_interval_attributes_interval(self, **kwargs): """Auto Generated Code """ config = ET.Element('config') ip = ET.SubElement(config, 'ip', xmlns='urn:brocade.com:mgmt:brocade-common-def') rtm_config = ET.SubElement(ip, 'rtm-config', xmlns='urn:brocade.com:mgmt:brocade-rtm') route = ET.SubElement(rtm_config, 'route') static = ET.SubElement(route, 'static') bfd = ET.SubElement(static, 'bfd') bfd_static_route = ET.SubElement(bfd, 'bfd-static-route') bfd_static_route_dest_key = ET.SubElement(bfd_static_route, 'bfd-static-route-dest') bfd_static_route_dest_key.text = kwargs.pop('bfd_static_route_dest') bfd_static_route_src_key = ET.SubElement(bfd_static_route, 'bfd-static-route-src') bfd_static_route_src_key.text = kwargs.pop('bfd_static_route_src') bfd_interval_attributes = ET.SubElement(bfd_static_route, 'bfd-interval-attributes') interval = ET.SubElement(bfd_interval_attributes, 'interval') interval.text = kwargs.pop('interval') callback = kwargs.pop('callback', self._callback) return callback(config)
def equal(actual, expected): ''' Compare actual and expected using == >>> expect = Expector([]) >>> expect(1).to_not(equal, 2) (True, 'equal: expect 1 == 2') >>> expect(1).to(equal, 1) (True, 'equal: expect 1 == 1') ''' is_passing = (actual == expected) types_to_diff = (str, dict, list, tuple) if not is_passing and isinstance(expected, types_to_diff) and isinstance(actual, types_to_diff): readable_diff = difflib.unified_diff(pformat(expected).split('\n'), pformat(actual).split('\n'), n=99) description = '\n'.join(['equal:'] + list(readable_diff)) else: description = "equal: expect {} == {}".format(actual, expected) outcome = (is_passing, description) return outcome
def function[equal, parameter[actual, expected]]: constant[ Compare actual and expected using == >>> expect = Expector([]) >>> expect(1).to_not(equal, 2) (True, 'equal: expect 1 == 2') >>> expect(1).to(equal, 1) (True, 'equal: expect 1 == 1') ] variable[is_passing] assign[=] compare[name[actual] equal[==] name[expected]] variable[types_to_diff] assign[=] tuple[[<ast.Name object at 0x7da1b25d5c60>, <ast.Name object at 0x7da1b25d6710>, <ast.Name object at 0x7da1b25d5060>, <ast.Name object at 0x7da1b25d59f0>]] if <ast.BoolOp object at 0x7da1b25d50c0> begin[:] variable[readable_diff] assign[=] call[name[difflib].unified_diff, parameter[call[call[name[pformat], parameter[name[expected]]].split, parameter[constant[ ]]], call[call[name[pformat], parameter[name[actual]]].split, parameter[constant[ ]]]]] variable[description] assign[=] call[constant[ ].join, parameter[binary_operation[list[[<ast.Constant object at 0x7da18ede5f60>]] + call[name[list], parameter[name[readable_diff]]]]]] variable[outcome] assign[=] tuple[[<ast.Name object at 0x7da18ede52d0>, <ast.Name object at 0x7da18ede6740>]] return[name[outcome]]
keyword[def] identifier[equal] ( identifier[actual] , identifier[expected] ): literal[string] identifier[is_passing] =( identifier[actual] == identifier[expected] ) identifier[types_to_diff] =( identifier[str] , identifier[dict] , identifier[list] , identifier[tuple] ) keyword[if] keyword[not] identifier[is_passing] keyword[and] identifier[isinstance] ( identifier[expected] , identifier[types_to_diff] ) keyword[and] identifier[isinstance] ( identifier[actual] , identifier[types_to_diff] ): identifier[readable_diff] = identifier[difflib] . identifier[unified_diff] ( identifier[pformat] ( identifier[expected] ). identifier[split] ( literal[string] ), identifier[pformat] ( identifier[actual] ). identifier[split] ( literal[string] ), identifier[n] = literal[int] ) identifier[description] = literal[string] . identifier[join] ([ literal[string] ]+ identifier[list] ( identifier[readable_diff] )) keyword[else] : identifier[description] = literal[string] . identifier[format] ( identifier[actual] , identifier[expected] ) identifier[outcome] =( identifier[is_passing] , identifier[description] ) keyword[return] identifier[outcome]
def equal(actual, expected): """ Compare actual and expected using == >>> expect = Expector([]) >>> expect(1).to_not(equal, 2) (True, 'equal: expect 1 == 2') >>> expect(1).to(equal, 1) (True, 'equal: expect 1 == 1') """ is_passing = actual == expected types_to_diff = (str, dict, list, tuple) if not is_passing and isinstance(expected, types_to_diff) and isinstance(actual, types_to_diff): readable_diff = difflib.unified_diff(pformat(expected).split('\n'), pformat(actual).split('\n'), n=99) description = '\n'.join(['equal:'] + list(readable_diff)) # depends on [control=['if'], data=[]] else: description = 'equal: expect {} == {}'.format(actual, expected) outcome = (is_passing, description) return outcome
def gamma(arr, g): r""" Gamma correction is a nonlinear operation that adjusts the image's channel values pixel-by-pixel according to a power-law: .. math:: pixel_{out} = pixel_{in} ^ {\gamma} Setting gamma (:math:`\gamma`) to be less than 1.0 darkens the image and setting gamma to be greater than 1.0 lightens it. Parameters ---------- gamma (:math:`\gamma`): float Reasonable values range from 0.8 to 2.4. """ if (arr.max() > 1.0 + epsilon) or (arr.min() < 0 - epsilon): raise ValueError("Input array must have float values between 0 and 1") if g <= 0 or np.isnan(g): raise ValueError("gamma must be greater than 0") return arr ** (1.0 / g)
def function[gamma, parameter[arr, g]]: constant[ Gamma correction is a nonlinear operation that adjusts the image's channel values pixel-by-pixel according to a power-law: .. math:: pixel_{out} = pixel_{in} ^ {\gamma} Setting gamma (:math:`\gamma`) to be less than 1.0 darkens the image and setting gamma to be greater than 1.0 lightens it. Parameters ---------- gamma (:math:`\gamma`): float Reasonable values range from 0.8 to 2.4. ] if <ast.BoolOp object at 0x7da2044c3b80> begin[:] <ast.Raise object at 0x7da2044c3340> if <ast.BoolOp object at 0x7da2044c3c70> begin[:] <ast.Raise object at 0x7da204347910> return[binary_operation[name[arr] ** binary_operation[constant[1.0] / name[g]]]]
keyword[def] identifier[gamma] ( identifier[arr] , identifier[g] ): literal[string] keyword[if] ( identifier[arr] . identifier[max] ()> literal[int] + identifier[epsilon] ) keyword[or] ( identifier[arr] . identifier[min] ()< literal[int] - identifier[epsilon] ): keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] identifier[g] <= literal[int] keyword[or] identifier[np] . identifier[isnan] ( identifier[g] ): keyword[raise] identifier[ValueError] ( literal[string] ) keyword[return] identifier[arr] **( literal[int] / identifier[g] )
def gamma(arr, g): """ Gamma correction is a nonlinear operation that adjusts the image's channel values pixel-by-pixel according to a power-law: .. math:: pixel_{out} = pixel_{in} ^ {\\gamma} Setting gamma (:math:`\\gamma`) to be less than 1.0 darkens the image and setting gamma to be greater than 1.0 lightens it. Parameters ---------- gamma (:math:`\\gamma`): float Reasonable values range from 0.8 to 2.4. """ if arr.max() > 1.0 + epsilon or arr.min() < 0 - epsilon: raise ValueError('Input array must have float values between 0 and 1') # depends on [control=['if'], data=[]] if g <= 0 or np.isnan(g): raise ValueError('gamma must be greater than 0') # depends on [control=['if'], data=[]] return arr ** (1.0 / g)
def get_data_object(data_id, use_data_config=True): """ Normalize the data_id and query the server. If that is unavailable try the raw ID """ normalized_data_reference = normalize_data_name(data_id, use_data_config=use_data_config) client = DataClient() data_obj = client.get(normalized_data_reference) # Try with the raw ID if not data_obj and data_id != normalized_data_reference: data_obj = client.get(data_id) return data_obj
def function[get_data_object, parameter[data_id, use_data_config]]: constant[ Normalize the data_id and query the server. If that is unavailable try the raw ID ] variable[normalized_data_reference] assign[=] call[name[normalize_data_name], parameter[name[data_id]]] variable[client] assign[=] call[name[DataClient], parameter[]] variable[data_obj] assign[=] call[name[client].get, parameter[name[normalized_data_reference]]] if <ast.BoolOp object at 0x7da1b0da2470> begin[:] variable[data_obj] assign[=] call[name[client].get, parameter[name[data_id]]] return[name[data_obj]]
keyword[def] identifier[get_data_object] ( identifier[data_id] , identifier[use_data_config] = keyword[True] ): literal[string] identifier[normalized_data_reference] = identifier[normalize_data_name] ( identifier[data_id] , identifier[use_data_config] = identifier[use_data_config] ) identifier[client] = identifier[DataClient] () identifier[data_obj] = identifier[client] . identifier[get] ( identifier[normalized_data_reference] ) keyword[if] keyword[not] identifier[data_obj] keyword[and] identifier[data_id] != identifier[normalized_data_reference] : identifier[data_obj] = identifier[client] . identifier[get] ( identifier[data_id] ) keyword[return] identifier[data_obj]
def get_data_object(data_id, use_data_config=True): """ Normalize the data_id and query the server. If that is unavailable try the raw ID """ normalized_data_reference = normalize_data_name(data_id, use_data_config=use_data_config) client = DataClient() data_obj = client.get(normalized_data_reference) # Try with the raw ID if not data_obj and data_id != normalized_data_reference: data_obj = client.get(data_id) # depends on [control=['if'], data=[]] return data_obj
def BE32(value, min_value=None, max_value=None, fuzzable=True, name=None, full_range=False): '''32-bit field, Big endian encoded''' return UInt32(value, min_value=min_value, max_value=max_value, encoder=ENC_INT_BE, fuzzable=fuzzable, name=name, full_range=full_range)
def function[BE32, parameter[value, min_value, max_value, fuzzable, name, full_range]]: constant[32-bit field, Big endian encoded] return[call[name[UInt32], parameter[name[value]]]]
keyword[def] identifier[BE32] ( identifier[value] , identifier[min_value] = keyword[None] , identifier[max_value] = keyword[None] , identifier[fuzzable] = keyword[True] , identifier[name] = keyword[None] , identifier[full_range] = keyword[False] ): literal[string] keyword[return] identifier[UInt32] ( identifier[value] , identifier[min_value] = identifier[min_value] , identifier[max_value] = identifier[max_value] , identifier[encoder] = identifier[ENC_INT_BE] , identifier[fuzzable] = identifier[fuzzable] , identifier[name] = identifier[name] , identifier[full_range] = identifier[full_range] )
def BE32(value, min_value=None, max_value=None, fuzzable=True, name=None, full_range=False): """32-bit field, Big endian encoded""" return UInt32(value, min_value=min_value, max_value=max_value, encoder=ENC_INT_BE, fuzzable=fuzzable, name=name, full_range=full_range)
def getMonth(s): """ Known formats: Month ("%b") Month Day ("%b %d") Month-Month ("%b-%b") --- this gets coerced to the first %b, dropping the month range Season ("%s") --- this gets coerced to use the first month of the given season Month Day Year ("%b %d %Y") Month Year ("%b %Y") Year Month Day ("%Y %m %d") """ monthOrSeason = s.split('-')[0].upper() if monthOrSeason in monthDict: return monthDict[monthOrSeason] else: monthOrSeason = s.split('-')[1].upper() if monthOrSeason.isdigit(): return monthOrSeason else: return monthDict[monthOrSeason] raise ValueError("Month format not recognized: " + s)
def function[getMonth, parameter[s]]: constant[ Known formats: Month ("%b") Month Day ("%b %d") Month-Month ("%b-%b") --- this gets coerced to the first %b, dropping the month range Season ("%s") --- this gets coerced to use the first month of the given season Month Day Year ("%b %d %Y") Month Year ("%b %Y") Year Month Day ("%Y %m %d") ] variable[monthOrSeason] assign[=] call[call[call[name[s].split, parameter[constant[-]]]][constant[0]].upper, parameter[]] if compare[name[monthOrSeason] in name[monthDict]] begin[:] return[call[name[monthDict]][name[monthOrSeason]]] <ast.Raise object at 0x7da1b0ef7a60>
keyword[def] identifier[getMonth] ( identifier[s] ): literal[string] identifier[monthOrSeason] = identifier[s] . identifier[split] ( literal[string] )[ literal[int] ]. identifier[upper] () keyword[if] identifier[monthOrSeason] keyword[in] identifier[monthDict] : keyword[return] identifier[monthDict] [ identifier[monthOrSeason] ] keyword[else] : identifier[monthOrSeason] = identifier[s] . identifier[split] ( literal[string] )[ literal[int] ]. identifier[upper] () keyword[if] identifier[monthOrSeason] . identifier[isdigit] (): keyword[return] identifier[monthOrSeason] keyword[else] : keyword[return] identifier[monthDict] [ identifier[monthOrSeason] ] keyword[raise] identifier[ValueError] ( literal[string] + identifier[s] )
def getMonth(s): """ Known formats: Month ("%b") Month Day ("%b %d") Month-Month ("%b-%b") --- this gets coerced to the first %b, dropping the month range Season ("%s") --- this gets coerced to use the first month of the given season Month Day Year ("%b %d %Y") Month Year ("%b %Y") Year Month Day ("%Y %m %d") """ monthOrSeason = s.split('-')[0].upper() if monthOrSeason in monthDict: return monthDict[monthOrSeason] # depends on [control=['if'], data=['monthOrSeason', 'monthDict']] else: monthOrSeason = s.split('-')[1].upper() if monthOrSeason.isdigit(): return monthOrSeason # depends on [control=['if'], data=[]] else: return monthDict[monthOrSeason] raise ValueError('Month format not recognized: ' + s)
def unzip(self, directory): """ Write contents of zipfile to directory """ if not os.path.exists(directory): os.makedirs(directory) shutil.copytree(self.src_dir, directory)
def function[unzip, parameter[self, directory]]: constant[ Write contents of zipfile to directory ] if <ast.UnaryOp object at 0x7da18f720e50> begin[:] call[name[os].makedirs, parameter[name[directory]]] call[name[shutil].copytree, parameter[name[self].src_dir, name[directory]]]
keyword[def] identifier[unzip] ( identifier[self] , identifier[directory] ): literal[string] keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[directory] ): identifier[os] . identifier[makedirs] ( identifier[directory] ) identifier[shutil] . identifier[copytree] ( identifier[self] . identifier[src_dir] , identifier[directory] )
def unzip(self, directory): """ Write contents of zipfile to directory """ if not os.path.exists(directory): os.makedirs(directory) # depends on [control=['if'], data=[]] shutil.copytree(self.src_dir, directory)
def render_confirm_form(self): """ Second step of ExpressCheckout. Display an order confirmation form which contains hidden fields with the token / PayerID from PayPal. """ warn_untested() initial = dict(token=self.request.GET['token'], PayerID=self.request.GET['PayerID']) self.context[self.form_context_name] = self.confirm_form_cls(initial=initial) return TemplateResponse(self.request, self.confirm_template, self.context)
def function[render_confirm_form, parameter[self]]: constant[ Second step of ExpressCheckout. Display an order confirmation form which contains hidden fields with the token / PayerID from PayPal. ] call[name[warn_untested], parameter[]] variable[initial] assign[=] call[name[dict], parameter[]] call[name[self].context][name[self].form_context_name] assign[=] call[name[self].confirm_form_cls, parameter[]] return[call[name[TemplateResponse], parameter[name[self].request, name[self].confirm_template, name[self].context]]]
keyword[def] identifier[render_confirm_form] ( identifier[self] ): literal[string] identifier[warn_untested] () identifier[initial] = identifier[dict] ( identifier[token] = identifier[self] . identifier[request] . identifier[GET] [ literal[string] ], identifier[PayerID] = identifier[self] . identifier[request] . identifier[GET] [ literal[string] ]) identifier[self] . identifier[context] [ identifier[self] . identifier[form_context_name] ]= identifier[self] . identifier[confirm_form_cls] ( identifier[initial] = identifier[initial] ) keyword[return] identifier[TemplateResponse] ( identifier[self] . identifier[request] , identifier[self] . identifier[confirm_template] , identifier[self] . identifier[context] )
def render_confirm_form(self): """ Second step of ExpressCheckout. Display an order confirmation form which contains hidden fields with the token / PayerID from PayPal. """ warn_untested() initial = dict(token=self.request.GET['token'], PayerID=self.request.GET['PayerID']) self.context[self.form_context_name] = self.confirm_form_cls(initial=initial) return TemplateResponse(self.request, self.confirm_template, self.context)
def item_perceel_adapter(obj, request): """ Adapter for rendering an object of :class: `crabpy.gateway.capakey.Perceel` to json. """ return { 'id': obj.id, 'sectie': { 'id': obj.sectie.id, 'afdeling': { 'id': obj.sectie.afdeling.id, 'naam': obj.sectie.afdeling.naam, 'gemeente': { 'id': obj.sectie.afdeling.gemeente.id, 'naam': obj.sectie.afdeling.gemeente.naam }, }, }, 'capakey': obj.capakey, 'percid': obj.percid, 'centroid': obj.centroid, 'bounding_box': obj.bounding_box }
def function[item_perceel_adapter, parameter[obj, request]]: constant[ Adapter for rendering an object of :class: `crabpy.gateway.capakey.Perceel` to json. ] return[dictionary[[<ast.Constant object at 0x7da1b0a65600>, <ast.Constant object at 0x7da1b0a65210>, <ast.Constant object at 0x7da1b0a65450>, <ast.Constant object at 0x7da1b0a66620>, <ast.Constant object at 0x7da1b0a64340>, <ast.Constant object at 0x7da1b0a643a0>], [<ast.Attribute object at 0x7da1b0a66d70>, <ast.Dict object at 0x7da1b0a65f30>, <ast.Attribute object at 0x7da1b0ab7d00>, <ast.Attribute object at 0x7da1b0ab4d00>, <ast.Attribute object at 0x7da1b0ab4490>, <ast.Attribute object at 0x7da1b0ab5330>]]]
keyword[def] identifier[item_perceel_adapter] ( identifier[obj] , identifier[request] ): literal[string] keyword[return] { literal[string] : identifier[obj] . identifier[id] , literal[string] :{ literal[string] : identifier[obj] . identifier[sectie] . identifier[id] , literal[string] :{ literal[string] : identifier[obj] . identifier[sectie] . identifier[afdeling] . identifier[id] , literal[string] : identifier[obj] . identifier[sectie] . identifier[afdeling] . identifier[naam] , literal[string] :{ literal[string] : identifier[obj] . identifier[sectie] . identifier[afdeling] . identifier[gemeente] . identifier[id] , literal[string] : identifier[obj] . identifier[sectie] . identifier[afdeling] . identifier[gemeente] . identifier[naam] }, }, }, literal[string] : identifier[obj] . identifier[capakey] , literal[string] : identifier[obj] . identifier[percid] , literal[string] : identifier[obj] . identifier[centroid] , literal[string] : identifier[obj] . identifier[bounding_box] }
def item_perceel_adapter(obj, request): """ Adapter for rendering an object of :class: `crabpy.gateway.capakey.Perceel` to json. """ return {'id': obj.id, 'sectie': {'id': obj.sectie.id, 'afdeling': {'id': obj.sectie.afdeling.id, 'naam': obj.sectie.afdeling.naam, 'gemeente': {'id': obj.sectie.afdeling.gemeente.id, 'naam': obj.sectie.afdeling.gemeente.naam}}}, 'capakey': obj.capakey, 'percid': obj.percid, 'centroid': obj.centroid, 'bounding_box': obj.bounding_box}
def eeg_select_electrodes(eeg, include="all", exclude=None, hemisphere="both", central=True): """ Returns electrodes/sensors names of selected region (according to a 10-20 EEG montage). Parameters ---------- eeg : mne.Raw or mne.Epochs EEG data. include : str ot list Sensor area to include. exclude : str or list or None Sensor area to exclude. hemisphere : str Select both hemispheres? "both", "left" or "right". central : bool Select the central line. Returns ---------- electrodes : list List of electrodes/sensors corresponding to the selected area. Example ---------- >>> import neurokit as nk >>> nk.eeg_select_electrodes(include="F", exclude="C") Notes ---------- *Authors* - `Dominique Makowski <https://dominiquemakowski.github.io/>`_ """ # Get all channel names eeg = eeg.copy().pick_types(meg=False, eeg=True) channel_list = eeg.ch_names # Include if include == "all": electrodes = channel_list elif isinstance(include, str): electrodes = [s for s in channel_list if include in s] elif isinstance(include, list): electrodes = [] for i in include: electrodes += [s for s in channel_list if i in s] else: print("NeuroKit Warning: eeg_select_electrodes(): 'include' parameter must be 'all', str or list.") # Exclude if exclude is not None: if isinstance(exclude, str): to_remove = [s for s in channel_list if exclude in s] electrodes = [s for s in electrodes if s not in to_remove] elif isinstance(exclude, list): to_remove = [] for i in exclude: to_remove += [s for s in channel_list if i in s] electrodes = [s for s in electrodes if s not in to_remove] else: print("NeuroKit Warning: eeg_select_electrodes(): 'exclude' parameter must be None, str or list.") # Laterality if hemisphere != "both": if hemisphere.lower() == "left" or hemisphere.lower() == "l": hemi = [s for s in electrodes if len(re.findall(r'\d+', s)) > 0 and int(re.findall(r'\d+', s)[0])%2 > 0] elif hemisphere.lower() == "right" or hemisphere.lower() == "r": hemi = [s for s in electrodes if len(re.findall(r'\d+', s)) > 0 and int(re.findall(r'\d+', s)[0])%2 == 0] else: print("NeuroKit Warning: eeg_select_electrodes(): 'hemisphere' parameter must be 'both', 'left' or 'right'. Returning both.") if central is True: hemi += [s for s in electrodes if 'z' in s] electrodes = hemi return(electrodes)
def function[eeg_select_electrodes, parameter[eeg, include, exclude, hemisphere, central]]: constant[ Returns electrodes/sensors names of selected region (according to a 10-20 EEG montage). Parameters ---------- eeg : mne.Raw or mne.Epochs EEG data. include : str ot list Sensor area to include. exclude : str or list or None Sensor area to exclude. hemisphere : str Select both hemispheres? "both", "left" or "right". central : bool Select the central line. Returns ---------- electrodes : list List of electrodes/sensors corresponding to the selected area. Example ---------- >>> import neurokit as nk >>> nk.eeg_select_electrodes(include="F", exclude="C") Notes ---------- *Authors* - `Dominique Makowski <https://dominiquemakowski.github.io/>`_ ] variable[eeg] assign[=] call[call[name[eeg].copy, parameter[]].pick_types, parameter[]] variable[channel_list] assign[=] name[eeg].ch_names if compare[name[include] equal[==] constant[all]] begin[:] variable[electrodes] assign[=] name[channel_list] if compare[name[exclude] is_not constant[None]] begin[:] if call[name[isinstance], parameter[name[exclude], name[str]]] begin[:] variable[to_remove] assign[=] <ast.ListComp object at 0x7da18f8114e0> variable[electrodes] assign[=] <ast.ListComp object at 0x7da18f8135e0> if compare[name[hemisphere] not_equal[!=] constant[both]] begin[:] if <ast.BoolOp object at 0x7da18f812dd0> begin[:] variable[hemi] assign[=] <ast.ListComp object at 0x7da18f811f00> if compare[name[central] is constant[True]] begin[:] <ast.AugAssign object at 0x7da18f811de0> variable[electrodes] assign[=] name[hemi] return[name[electrodes]]
keyword[def] identifier[eeg_select_electrodes] ( identifier[eeg] , identifier[include] = literal[string] , identifier[exclude] = keyword[None] , identifier[hemisphere] = literal[string] , identifier[central] = keyword[True] ): literal[string] identifier[eeg] = identifier[eeg] . identifier[copy] (). identifier[pick_types] ( identifier[meg] = keyword[False] , identifier[eeg] = keyword[True] ) identifier[channel_list] = identifier[eeg] . identifier[ch_names] keyword[if] identifier[include] == literal[string] : identifier[electrodes] = identifier[channel_list] keyword[elif] identifier[isinstance] ( identifier[include] , identifier[str] ): identifier[electrodes] =[ identifier[s] keyword[for] identifier[s] keyword[in] identifier[channel_list] keyword[if] identifier[include] keyword[in] identifier[s] ] keyword[elif] identifier[isinstance] ( identifier[include] , identifier[list] ): identifier[electrodes] =[] keyword[for] identifier[i] keyword[in] identifier[include] : identifier[electrodes] +=[ identifier[s] keyword[for] identifier[s] keyword[in] identifier[channel_list] keyword[if] identifier[i] keyword[in] identifier[s] ] keyword[else] : identifier[print] ( literal[string] ) keyword[if] identifier[exclude] keyword[is] keyword[not] keyword[None] : keyword[if] identifier[isinstance] ( identifier[exclude] , identifier[str] ): identifier[to_remove] =[ identifier[s] keyword[for] identifier[s] keyword[in] identifier[channel_list] keyword[if] identifier[exclude] keyword[in] identifier[s] ] identifier[electrodes] =[ identifier[s] keyword[for] identifier[s] keyword[in] identifier[electrodes] keyword[if] identifier[s] keyword[not] keyword[in] identifier[to_remove] ] keyword[elif] identifier[isinstance] ( identifier[exclude] , identifier[list] ): identifier[to_remove] =[] keyword[for] identifier[i] keyword[in] identifier[exclude] : identifier[to_remove] +=[ identifier[s] keyword[for] identifier[s] keyword[in] identifier[channel_list] keyword[if] identifier[i] keyword[in] identifier[s] ] identifier[electrodes] =[ identifier[s] keyword[for] identifier[s] keyword[in] identifier[electrodes] keyword[if] identifier[s] keyword[not] keyword[in] identifier[to_remove] ] keyword[else] : identifier[print] ( literal[string] ) keyword[if] identifier[hemisphere] != literal[string] : keyword[if] identifier[hemisphere] . identifier[lower] ()== literal[string] keyword[or] identifier[hemisphere] . identifier[lower] ()== literal[string] : identifier[hemi] =[ identifier[s] keyword[for] identifier[s] keyword[in] identifier[electrodes] keyword[if] identifier[len] ( identifier[re] . identifier[findall] ( literal[string] , identifier[s] ))> literal[int] keyword[and] identifier[int] ( identifier[re] . identifier[findall] ( literal[string] , identifier[s] )[ literal[int] ])% literal[int] > literal[int] ] keyword[elif] identifier[hemisphere] . identifier[lower] ()== literal[string] keyword[or] identifier[hemisphere] . identifier[lower] ()== literal[string] : identifier[hemi] =[ identifier[s] keyword[for] identifier[s] keyword[in] identifier[electrodes] keyword[if] identifier[len] ( identifier[re] . identifier[findall] ( literal[string] , identifier[s] ))> literal[int] keyword[and] identifier[int] ( identifier[re] . identifier[findall] ( literal[string] , identifier[s] )[ literal[int] ])% literal[int] == literal[int] ] keyword[else] : identifier[print] ( literal[string] ) keyword[if] identifier[central] keyword[is] keyword[True] : identifier[hemi] +=[ identifier[s] keyword[for] identifier[s] keyword[in] identifier[electrodes] keyword[if] literal[string] keyword[in] identifier[s] ] identifier[electrodes] = identifier[hemi] keyword[return] ( identifier[electrodes] )
def eeg_select_electrodes(eeg, include='all', exclude=None, hemisphere='both', central=True): """ Returns electrodes/sensors names of selected region (according to a 10-20 EEG montage). Parameters ---------- eeg : mne.Raw or mne.Epochs EEG data. include : str ot list Sensor area to include. exclude : str or list or None Sensor area to exclude. hemisphere : str Select both hemispheres? "both", "left" or "right". central : bool Select the central line. Returns ---------- electrodes : list List of electrodes/sensors corresponding to the selected area. Example ---------- >>> import neurokit as nk >>> nk.eeg_select_electrodes(include="F", exclude="C") Notes ---------- *Authors* - `Dominique Makowski <https://dominiquemakowski.github.io/>`_ """ # Get all channel names eeg = eeg.copy().pick_types(meg=False, eeg=True) channel_list = eeg.ch_names # Include if include == 'all': electrodes = channel_list # depends on [control=['if'], data=[]] elif isinstance(include, str): electrodes = [s for s in channel_list if include in s] # depends on [control=['if'], data=[]] elif isinstance(include, list): electrodes = [] for i in include: electrodes += [s for s in channel_list if i in s] # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]] else: print("NeuroKit Warning: eeg_select_electrodes(): 'include' parameter must be 'all', str or list.") # Exclude if exclude is not None: if isinstance(exclude, str): to_remove = [s for s in channel_list if exclude in s] electrodes = [s for s in electrodes if s not in to_remove] # depends on [control=['if'], data=[]] elif isinstance(exclude, list): to_remove = [] for i in exclude: to_remove += [s for s in channel_list if i in s] # depends on [control=['for'], data=['i']] electrodes = [s for s in electrodes if s not in to_remove] # depends on [control=['if'], data=[]] else: print("NeuroKit Warning: eeg_select_electrodes(): 'exclude' parameter must be None, str or list.") # depends on [control=['if'], data=['exclude']] # Laterality if hemisphere != 'both': if hemisphere.lower() == 'left' or hemisphere.lower() == 'l': hemi = [s for s in electrodes if len(re.findall('\\d+', s)) > 0 and int(re.findall('\\d+', s)[0]) % 2 > 0] # depends on [control=['if'], data=[]] elif hemisphere.lower() == 'right' or hemisphere.lower() == 'r': hemi = [s for s in electrodes if len(re.findall('\\d+', s)) > 0 and int(re.findall('\\d+', s)[0]) % 2 == 0] # depends on [control=['if'], data=[]] else: print("NeuroKit Warning: eeg_select_electrodes(): 'hemisphere' parameter must be 'both', 'left' or 'right'. Returning both.") if central is True: hemi += [s for s in electrodes if 'z' in s] # depends on [control=['if'], data=[]] electrodes = hemi # depends on [control=['if'], data=['hemisphere']] return electrodes
def setOverlayTransformOverlayRelative(self, ulOverlayHandle, ulOverlayHandleParent): """Sets the transform to relative to the transform of the specified overlay. This overlays visibility will also track the parents visibility""" fn = self.function_table.setOverlayTransformOverlayRelative pmatParentOverlayToOverlayTransform = HmdMatrix34_t() result = fn(ulOverlayHandle, ulOverlayHandleParent, byref(pmatParentOverlayToOverlayTransform)) return result, pmatParentOverlayToOverlayTransform
def function[setOverlayTransformOverlayRelative, parameter[self, ulOverlayHandle, ulOverlayHandleParent]]: constant[Sets the transform to relative to the transform of the specified overlay. This overlays visibility will also track the parents visibility] variable[fn] assign[=] name[self].function_table.setOverlayTransformOverlayRelative variable[pmatParentOverlayToOverlayTransform] assign[=] call[name[HmdMatrix34_t], parameter[]] variable[result] assign[=] call[name[fn], parameter[name[ulOverlayHandle], name[ulOverlayHandleParent], call[name[byref], parameter[name[pmatParentOverlayToOverlayTransform]]]]] return[tuple[[<ast.Name object at 0x7da2046229e0>, <ast.Name object at 0x7da204623b50>]]]
keyword[def] identifier[setOverlayTransformOverlayRelative] ( identifier[self] , identifier[ulOverlayHandle] , identifier[ulOverlayHandleParent] ): literal[string] identifier[fn] = identifier[self] . identifier[function_table] . identifier[setOverlayTransformOverlayRelative] identifier[pmatParentOverlayToOverlayTransform] = identifier[HmdMatrix34_t] () identifier[result] = identifier[fn] ( identifier[ulOverlayHandle] , identifier[ulOverlayHandleParent] , identifier[byref] ( identifier[pmatParentOverlayToOverlayTransform] )) keyword[return] identifier[result] , identifier[pmatParentOverlayToOverlayTransform]
def setOverlayTransformOverlayRelative(self, ulOverlayHandle, ulOverlayHandleParent): """Sets the transform to relative to the transform of the specified overlay. This overlays visibility will also track the parents visibility""" fn = self.function_table.setOverlayTransformOverlayRelative pmatParentOverlayToOverlayTransform = HmdMatrix34_t() result = fn(ulOverlayHandle, ulOverlayHandleParent, byref(pmatParentOverlayToOverlayTransform)) return (result, pmatParentOverlayToOverlayTransform)
def next_page(self): """Return the next `Page` after this one in the result sequence it's from. If the current page is the last page in the sequence, calling this method raises a `ValueError`. """ try: next_url = self.next_url except AttributeError: raise PageError("Page %r has no next page" % self) return self.page_for_url(next_url)
def function[next_page, parameter[self]]: constant[Return the next `Page` after this one in the result sequence it's from. If the current page is the last page in the sequence, calling this method raises a `ValueError`. ] <ast.Try object at 0x7da1b0395cf0> return[call[name[self].page_for_url, parameter[name[next_url]]]]
keyword[def] identifier[next_page] ( identifier[self] ): literal[string] keyword[try] : identifier[next_url] = identifier[self] . identifier[next_url] keyword[except] identifier[AttributeError] : keyword[raise] identifier[PageError] ( literal[string] % identifier[self] ) keyword[return] identifier[self] . identifier[page_for_url] ( identifier[next_url] )
def next_page(self): """Return the next `Page` after this one in the result sequence it's from. If the current page is the last page in the sequence, calling this method raises a `ValueError`. """ try: next_url = self.next_url # depends on [control=['try'], data=[]] except AttributeError: raise PageError('Page %r has no next page' % self) # depends on [control=['except'], data=[]] return self.page_for_url(next_url)
def _save_translations(sender, instance, *args, **kwargs): """ This signal saves model translations. """ # If we are in a site with one language there is no need of saving translations if site_is_monolingual(): return False cls = sender # If its class has no "translatable_fields" then there are no translations if not hasattr(cls._meta, "translatable_fields"): return False # For each translatable field, get its value, computes its md5 and for each language creates its # empty translation. for field in cls._meta.translatable_fields: value = getattr(instance,field) if not value is None: md5_value = checksum(value) setattr( instance, u"md5"+field, md5_value ) for lang in settings.LANGUAGES: lang = lang[0] # print "({0}!={1}) = {2}".format(lang, settings.LANGUAGE_CODE,lang!=settings.LANGUAGE_CODE) if lang != settings.LANGUAGE_CODE: context = u"Updating from object" if hasattr(instance, "trans_context"): context = getattr(instance, "trans_context") trans = FieldTranslation.update(instance, field, lang, context)
def function[_save_translations, parameter[sender, instance]]: constant[ This signal saves model translations. ] if call[name[site_is_monolingual], parameter[]] begin[:] return[constant[False]] variable[cls] assign[=] name[sender] if <ast.UnaryOp object at 0x7da18dc99870> begin[:] return[constant[False]] for taget[name[field]] in starred[name[cls]._meta.translatable_fields] begin[:] variable[value] assign[=] call[name[getattr], parameter[name[instance], name[field]]] if <ast.UnaryOp object at 0x7da18dc9a3b0> begin[:] variable[md5_value] assign[=] call[name[checksum], parameter[name[value]]] call[name[setattr], parameter[name[instance], binary_operation[constant[md5] + name[field]], name[md5_value]]] for taget[name[lang]] in starred[name[settings].LANGUAGES] begin[:] variable[lang] assign[=] call[name[lang]][constant[0]] if compare[name[lang] not_equal[!=] name[settings].LANGUAGE_CODE] begin[:] variable[context] assign[=] constant[Updating from object] if call[name[hasattr], parameter[name[instance], constant[trans_context]]] begin[:] variable[context] assign[=] call[name[getattr], parameter[name[instance], constant[trans_context]]] variable[trans] assign[=] call[name[FieldTranslation].update, parameter[name[instance], name[field], name[lang], name[context]]]
keyword[def] identifier[_save_translations] ( identifier[sender] , identifier[instance] ,* identifier[args] ,** identifier[kwargs] ): literal[string] keyword[if] identifier[site_is_monolingual] (): keyword[return] keyword[False] identifier[cls] = identifier[sender] keyword[if] keyword[not] identifier[hasattr] ( identifier[cls] . identifier[_meta] , literal[string] ): keyword[return] keyword[False] keyword[for] identifier[field] keyword[in] identifier[cls] . identifier[_meta] . identifier[translatable_fields] : identifier[value] = identifier[getattr] ( identifier[instance] , identifier[field] ) keyword[if] keyword[not] identifier[value] keyword[is] keyword[None] : identifier[md5_value] = identifier[checksum] ( identifier[value] ) identifier[setattr] ( identifier[instance] , literal[string] + identifier[field] , identifier[md5_value] ) keyword[for] identifier[lang] keyword[in] identifier[settings] . identifier[LANGUAGES] : identifier[lang] = identifier[lang] [ literal[int] ] keyword[if] identifier[lang] != identifier[settings] . identifier[LANGUAGE_CODE] : identifier[context] = literal[string] keyword[if] identifier[hasattr] ( identifier[instance] , literal[string] ): identifier[context] = identifier[getattr] ( identifier[instance] , literal[string] ) identifier[trans] = identifier[FieldTranslation] . identifier[update] ( identifier[instance] , identifier[field] , identifier[lang] , identifier[context] )
def _save_translations(sender, instance, *args, **kwargs): """ This signal saves model translations. """ # If we are in a site with one language there is no need of saving translations if site_is_monolingual(): return False # depends on [control=['if'], data=[]] cls = sender # If its class has no "translatable_fields" then there are no translations if not hasattr(cls._meta, 'translatable_fields'): return False # depends on [control=['if'], data=[]] # For each translatable field, get its value, computes its md5 and for each language creates its # empty translation. for field in cls._meta.translatable_fields: value = getattr(instance, field) if not value is None: md5_value = checksum(value) setattr(instance, u'md5' + field, md5_value) for lang in settings.LANGUAGES: lang = lang[0] # print "({0}!={1}) = {2}".format(lang, settings.LANGUAGE_CODE,lang!=settings.LANGUAGE_CODE) if lang != settings.LANGUAGE_CODE: context = u'Updating from object' if hasattr(instance, 'trans_context'): context = getattr(instance, 'trans_context') # depends on [control=['if'], data=[]] trans = FieldTranslation.update(instance, field, lang, context) # depends on [control=['if'], data=['lang']] # depends on [control=['for'], data=['lang']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['field']]
def plotExampleInputOutput(sp, inputVectors, saveFigPrefix=None): """ Plot example input & output @param sp: an spatial pooler instance @param inputVectors: a set of input vectors """ numInputVector, inputSize = inputVectors.shape numColumns = np.prod(sp.getColumnDimensions()) outputColumns = np.zeros((numInputVector, numColumns), dtype=uintType) inputOverlap = np.zeros((numInputVector, numColumns), dtype=uintType) connectedCounts = np.zeros((numColumns,), dtype=uintType) sp.getConnectedCounts(connectedCounts) winnerInputOverlap = np.zeros(numInputVector) for i in range(numInputVector): sp.compute(inputVectors[i][:], False, outputColumns[i][:]) inputOverlap[i][:] = sp.getOverlaps() activeColumns = np.where(outputColumns[i][:] > 0)[0] if len(activeColumns) > 0: winnerInputOverlap[i] = np.mean( inputOverlap[i][np.where(outputColumns[i][:] > 0)[0]]) fig, axs = plt.subplots(2, 1) axs[0].imshow(inputVectors[:, :200], cmap='gray', interpolation="nearest") axs[0].set_ylabel('input #') axs[0].set_title('input vectors') axs[1].imshow(outputColumns[:, :200], cmap='gray', interpolation="nearest") axs[1].set_ylabel('input #') axs[1].set_title('output vectors') if saveFigPrefix is not None: plt.savefig('figures/{}_example_input_output.pdf'.format(saveFigPrefix)) inputDensity = np.sum(inputVectors, 1) / float(inputSize) outputDensity = np.sum(outputColumns, 1) / float(numColumns) fig, axs = plt.subplots(2, 1) axs[0].plot(inputDensity) axs[0].set_xlabel('input #') axs[0].set_ylim([0, 0.2]) axs[1].plot(outputDensity) axs[1].set_xlabel('input #') axs[1].set_ylim([0, 0.05]) if saveFigPrefix is not None: plt.savefig('figures/{}_example_input_output_density.pdf'.format(saveFigPrefix))
def function[plotExampleInputOutput, parameter[sp, inputVectors, saveFigPrefix]]: constant[ Plot example input & output @param sp: an spatial pooler instance @param inputVectors: a set of input vectors ] <ast.Tuple object at 0x7da1b0860c10> assign[=] name[inputVectors].shape variable[numColumns] assign[=] call[name[np].prod, parameter[call[name[sp].getColumnDimensions, parameter[]]]] variable[outputColumns] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Name object at 0x7da1b09262f0>, <ast.Name object at 0x7da1b09244f0>]]]] variable[inputOverlap] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Name object at 0x7da1b0925ff0>, <ast.Name object at 0x7da1b0926140>]]]] variable[connectedCounts] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Name object at 0x7da1b0927610>]]]] call[name[sp].getConnectedCounts, parameter[name[connectedCounts]]] variable[winnerInputOverlap] assign[=] call[name[np].zeros, parameter[name[numInputVector]]] for taget[name[i]] in starred[call[name[range], parameter[name[numInputVector]]]] begin[:] call[name[sp].compute, parameter[call[call[name[inputVectors]][name[i]]][<ast.Slice object at 0x7da1b0888dc0>], constant[False], call[call[name[outputColumns]][name[i]]][<ast.Slice object at 0x7da1b088bfa0>]]] call[call[name[inputOverlap]][name[i]]][<ast.Slice object at 0x7da1b0888a60>] assign[=] call[name[sp].getOverlaps, parameter[]] variable[activeColumns] assign[=] call[call[name[np].where, parameter[compare[call[call[name[outputColumns]][name[i]]][<ast.Slice object at 0x7da1b0888580>] greater[>] constant[0]]]]][constant[0]] if compare[call[name[len], parameter[name[activeColumns]]] greater[>] constant[0]] begin[:] call[name[winnerInputOverlap]][name[i]] assign[=] call[name[np].mean, parameter[call[call[name[inputOverlap]][name[i]]][call[call[name[np].where, parameter[compare[call[call[name[outputColumns]][name[i]]][<ast.Slice object at 0x7da1b0888940>] greater[>] constant[0]]]]][constant[0]]]]] <ast.Tuple object at 0x7da1b0888700> assign[=] call[name[plt].subplots, parameter[constant[2], constant[1]]] call[call[name[axs]][constant[0]].imshow, parameter[call[name[inputVectors]][tuple[[<ast.Slice object at 0x7da1b088b9a0>, <ast.Slice object at 0x7da1b0888b50>]]]]] call[call[name[axs]][constant[0]].set_ylabel, parameter[constant[input #]]] call[call[name[axs]][constant[0]].set_title, parameter[constant[input vectors]]] call[call[name[axs]][constant[1]].imshow, parameter[call[name[outputColumns]][tuple[[<ast.Slice object at 0x7da1b0888f10>, <ast.Slice object at 0x7da1b0888520>]]]]] call[call[name[axs]][constant[1]].set_ylabel, parameter[constant[input #]]] call[call[name[axs]][constant[1]].set_title, parameter[constant[output vectors]]] if compare[name[saveFigPrefix] is_not constant[None]] begin[:] call[name[plt].savefig, parameter[call[constant[figures/{}_example_input_output.pdf].format, parameter[name[saveFigPrefix]]]]] variable[inputDensity] assign[=] binary_operation[call[name[np].sum, parameter[name[inputVectors], constant[1]]] / call[name[float], parameter[name[inputSize]]]] variable[outputDensity] assign[=] binary_operation[call[name[np].sum, parameter[name[outputColumns], constant[1]]] / call[name[float], parameter[name[numColumns]]]] <ast.Tuple object at 0x7da1b088b370> assign[=] call[name[plt].subplots, parameter[constant[2], constant[1]]] call[call[name[axs]][constant[0]].plot, parameter[name[inputDensity]]] call[call[name[axs]][constant[0]].set_xlabel, parameter[constant[input #]]] call[call[name[axs]][constant[0]].set_ylim, parameter[list[[<ast.Constant object at 0x7da1b0844880>, <ast.Constant object at 0x7da1b0845c30>]]]] call[call[name[axs]][constant[1]].plot, parameter[name[outputDensity]]] call[call[name[axs]][constant[1]].set_xlabel, parameter[constant[input #]]] call[call[name[axs]][constant[1]].set_ylim, parameter[list[[<ast.Constant object at 0x7da1b0847df0>, <ast.Constant object at 0x7da1b0847eb0>]]]] if compare[name[saveFigPrefix] is_not constant[None]] begin[:] call[name[plt].savefig, parameter[call[constant[figures/{}_example_input_output_density.pdf].format, parameter[name[saveFigPrefix]]]]]
keyword[def] identifier[plotExampleInputOutput] ( identifier[sp] , identifier[inputVectors] , identifier[saveFigPrefix] = keyword[None] ): literal[string] identifier[numInputVector] , identifier[inputSize] = identifier[inputVectors] . identifier[shape] identifier[numColumns] = identifier[np] . identifier[prod] ( identifier[sp] . identifier[getColumnDimensions] ()) identifier[outputColumns] = identifier[np] . identifier[zeros] (( identifier[numInputVector] , identifier[numColumns] ), identifier[dtype] = identifier[uintType] ) identifier[inputOverlap] = identifier[np] . identifier[zeros] (( identifier[numInputVector] , identifier[numColumns] ), identifier[dtype] = identifier[uintType] ) identifier[connectedCounts] = identifier[np] . identifier[zeros] (( identifier[numColumns] ,), identifier[dtype] = identifier[uintType] ) identifier[sp] . identifier[getConnectedCounts] ( identifier[connectedCounts] ) identifier[winnerInputOverlap] = identifier[np] . identifier[zeros] ( identifier[numInputVector] ) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[numInputVector] ): identifier[sp] . identifier[compute] ( identifier[inputVectors] [ identifier[i] ][:], keyword[False] , identifier[outputColumns] [ identifier[i] ][:]) identifier[inputOverlap] [ identifier[i] ][:]= identifier[sp] . identifier[getOverlaps] () identifier[activeColumns] = identifier[np] . identifier[where] ( identifier[outputColumns] [ identifier[i] ][:]> literal[int] )[ literal[int] ] keyword[if] identifier[len] ( identifier[activeColumns] )> literal[int] : identifier[winnerInputOverlap] [ identifier[i] ]= identifier[np] . identifier[mean] ( identifier[inputOverlap] [ identifier[i] ][ identifier[np] . identifier[where] ( identifier[outputColumns] [ identifier[i] ][:]> literal[int] )[ literal[int] ]]) identifier[fig] , identifier[axs] = identifier[plt] . identifier[subplots] ( literal[int] , literal[int] ) identifier[axs] [ literal[int] ]. identifier[imshow] ( identifier[inputVectors] [:,: literal[int] ], identifier[cmap] = literal[string] , identifier[interpolation] = literal[string] ) identifier[axs] [ literal[int] ]. identifier[set_ylabel] ( literal[string] ) identifier[axs] [ literal[int] ]. identifier[set_title] ( literal[string] ) identifier[axs] [ literal[int] ]. identifier[imshow] ( identifier[outputColumns] [:,: literal[int] ], identifier[cmap] = literal[string] , identifier[interpolation] = literal[string] ) identifier[axs] [ literal[int] ]. identifier[set_ylabel] ( literal[string] ) identifier[axs] [ literal[int] ]. identifier[set_title] ( literal[string] ) keyword[if] identifier[saveFigPrefix] keyword[is] keyword[not] keyword[None] : identifier[plt] . identifier[savefig] ( literal[string] . identifier[format] ( identifier[saveFigPrefix] )) identifier[inputDensity] = identifier[np] . identifier[sum] ( identifier[inputVectors] , literal[int] )/ identifier[float] ( identifier[inputSize] ) identifier[outputDensity] = identifier[np] . identifier[sum] ( identifier[outputColumns] , literal[int] )/ identifier[float] ( identifier[numColumns] ) identifier[fig] , identifier[axs] = identifier[plt] . identifier[subplots] ( literal[int] , literal[int] ) identifier[axs] [ literal[int] ]. identifier[plot] ( identifier[inputDensity] ) identifier[axs] [ literal[int] ]. identifier[set_xlabel] ( literal[string] ) identifier[axs] [ literal[int] ]. identifier[set_ylim] ([ literal[int] , literal[int] ]) identifier[axs] [ literal[int] ]. identifier[plot] ( identifier[outputDensity] ) identifier[axs] [ literal[int] ]. identifier[set_xlabel] ( literal[string] ) identifier[axs] [ literal[int] ]. identifier[set_ylim] ([ literal[int] , literal[int] ]) keyword[if] identifier[saveFigPrefix] keyword[is] keyword[not] keyword[None] : identifier[plt] . identifier[savefig] ( literal[string] . identifier[format] ( identifier[saveFigPrefix] ))
def plotExampleInputOutput(sp, inputVectors, saveFigPrefix=None): """ Plot example input & output @param sp: an spatial pooler instance @param inputVectors: a set of input vectors """ (numInputVector, inputSize) = inputVectors.shape numColumns = np.prod(sp.getColumnDimensions()) outputColumns = np.zeros((numInputVector, numColumns), dtype=uintType) inputOverlap = np.zeros((numInputVector, numColumns), dtype=uintType) connectedCounts = np.zeros((numColumns,), dtype=uintType) sp.getConnectedCounts(connectedCounts) winnerInputOverlap = np.zeros(numInputVector) for i in range(numInputVector): sp.compute(inputVectors[i][:], False, outputColumns[i][:]) inputOverlap[i][:] = sp.getOverlaps() activeColumns = np.where(outputColumns[i][:] > 0)[0] if len(activeColumns) > 0: winnerInputOverlap[i] = np.mean(inputOverlap[i][np.where(outputColumns[i][:] > 0)[0]]) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']] (fig, axs) = plt.subplots(2, 1) axs[0].imshow(inputVectors[:, :200], cmap='gray', interpolation='nearest') axs[0].set_ylabel('input #') axs[0].set_title('input vectors') axs[1].imshow(outputColumns[:, :200], cmap='gray', interpolation='nearest') axs[1].set_ylabel('input #') axs[1].set_title('output vectors') if saveFigPrefix is not None: plt.savefig('figures/{}_example_input_output.pdf'.format(saveFigPrefix)) # depends on [control=['if'], data=['saveFigPrefix']] inputDensity = np.sum(inputVectors, 1) / float(inputSize) outputDensity = np.sum(outputColumns, 1) / float(numColumns) (fig, axs) = plt.subplots(2, 1) axs[0].plot(inputDensity) axs[0].set_xlabel('input #') axs[0].set_ylim([0, 0.2]) axs[1].plot(outputDensity) axs[1].set_xlabel('input #') axs[1].set_ylim([0, 0.05]) if saveFigPrefix is not None: plt.savefig('figures/{}_example_input_output_density.pdf'.format(saveFigPrefix)) # depends on [control=['if'], data=['saveFigPrefix']]
def _format_num(self, value): """Return the number value for value, given this field's `num_type`.""" # (value is True or value is False) is ~5x faster than isinstance(value, bool) if value is True or value is False: raise TypeError('value must be a Number, not a boolean.') return self.num_type(value)
def function[_format_num, parameter[self, value]]: constant[Return the number value for value, given this field's `num_type`.] if <ast.BoolOp object at 0x7da18dc047f0> begin[:] <ast.Raise object at 0x7da18dc046a0> return[call[name[self].num_type, parameter[name[value]]]]
keyword[def] identifier[_format_num] ( identifier[self] , identifier[value] ): literal[string] keyword[if] identifier[value] keyword[is] keyword[True] keyword[or] identifier[value] keyword[is] keyword[False] : keyword[raise] identifier[TypeError] ( literal[string] ) keyword[return] identifier[self] . identifier[num_type] ( identifier[value] )
def _format_num(self, value): """Return the number value for value, given this field's `num_type`.""" # (value is True or value is False) is ~5x faster than isinstance(value, bool) if value is True or value is False: raise TypeError('value must be a Number, not a boolean.') # depends on [control=['if'], data=[]] return self.num_type(value)
def getNym(self, nym, role=None, isCommitted=True): """ Get a nym, if role is provided then get nym with that role :param nym: :param role: :param isCommitted: :return: """ try: seqNo, txnTime, ta, actual_role, verkey = self.get(nym, isCommitted) except KeyError: return None if role and role != actual_role: return None return { ROLE: actual_role or None, VERKEY: verkey or None, f.IDENTIFIER.nm: ta or None, f.SEQ_NO.nm: seqNo or None, TXN_TIME: txnTime or None, }
def function[getNym, parameter[self, nym, role, isCommitted]]: constant[ Get a nym, if role is provided then get nym with that role :param nym: :param role: :param isCommitted: :return: ] <ast.Try object at 0x7da2054a4970> if <ast.BoolOp object at 0x7da2054a7fd0> begin[:] return[constant[None]] return[dictionary[[<ast.Name object at 0x7da1b1b47160>, <ast.Name object at 0x7da1b1b46cb0>, <ast.Attribute object at 0x7da1b1b44e20>, <ast.Attribute object at 0x7da1b1b44820>, <ast.Name object at 0x7da1b1b467d0>], [<ast.BoolOp object at 0x7da1b1b46bf0>, <ast.BoolOp object at 0x7da1b1b453f0>, <ast.BoolOp object at 0x7da1b1b46920>, <ast.BoolOp object at 0x7da1b1b476d0>, <ast.BoolOp object at 0x7da1b1b45a50>]]]
keyword[def] identifier[getNym] ( identifier[self] , identifier[nym] , identifier[role] = keyword[None] , identifier[isCommitted] = keyword[True] ): literal[string] keyword[try] : identifier[seqNo] , identifier[txnTime] , identifier[ta] , identifier[actual_role] , identifier[verkey] = identifier[self] . identifier[get] ( identifier[nym] , identifier[isCommitted] ) keyword[except] identifier[KeyError] : keyword[return] keyword[None] keyword[if] identifier[role] keyword[and] identifier[role] != identifier[actual_role] : keyword[return] keyword[None] keyword[return] { identifier[ROLE] : identifier[actual_role] keyword[or] keyword[None] , identifier[VERKEY] : identifier[verkey] keyword[or] keyword[None] , identifier[f] . identifier[IDENTIFIER] . identifier[nm] : identifier[ta] keyword[or] keyword[None] , identifier[f] . identifier[SEQ_NO] . identifier[nm] : identifier[seqNo] keyword[or] keyword[None] , identifier[TXN_TIME] : identifier[txnTime] keyword[or] keyword[None] , }
def getNym(self, nym, role=None, isCommitted=True): """ Get a nym, if role is provided then get nym with that role :param nym: :param role: :param isCommitted: :return: """ try: (seqNo, txnTime, ta, actual_role, verkey) = self.get(nym, isCommitted) # depends on [control=['try'], data=[]] except KeyError: return None # depends on [control=['except'], data=[]] if role and role != actual_role: return None # depends on [control=['if'], data=[]] return {ROLE: actual_role or None, VERKEY: verkey or None, f.IDENTIFIER.nm: ta or None, f.SEQ_NO.nm: seqNo or None, TXN_TIME: txnTime or None}
def _poll(self): """ Poll Trusted Advisor (Support) API for limit checks. Return a dict of service name (string) keys to nested dict vals, where each key is a limit name and each value the current numeric limit. e.g.: :: { 'EC2': { 'SomeLimit': 10, } } """ logger.info("Beginning TrustedAdvisor poll") tmp = self._get_limit_check_id() if not self.have_ta: logger.info('TrustedAdvisor.have_ta is False; not polling TA') return {} if tmp is None: logger.critical("Unable to find 'Service Limits' Trusted Advisor " "check; not using Trusted Advisor data.") return check_id, metadata = tmp checks = self._get_refreshed_check_result(check_id) region = self.ta_region or self.conn._client_config.region_name res = {} if checks['result'].get('status', '') == 'not_available': logger.warning( 'Trusted Advisor returned status "not_available" for ' 'service limit check; cannot retrieve limits from TA.' ) return {} if 'flaggedResources' not in checks['result']: logger.warning( 'Trusted Advisor returned no results for ' 'service limit check; cannot retrieve limits from TA.' ) return {} for check in checks['result']['flaggedResources']: if 'region' in check and check['region'] != region: continue data = dict(zip(metadata, check['metadata'])) if data['Service'] not in res: res[data['Service']] = {} try: val = int(data['Limit Amount']) except ValueError: val = data['Limit Amount'] if val != 'Unlimited': logger.error('TrustedAdvisor returned unknown Limit ' 'Amount %s for %s - %s', val, data['Service'], data['Limit Name']) continue else: logger.debug('TrustedAdvisor setting explicit "Unlimited" ' 'limit for %s - %s', data['Service'], data['Limit Name']) res[data['Service']][data['Limit Name']] = val logger.info("Finished TrustedAdvisor poll") return res
def function[_poll, parameter[self]]: constant[ Poll Trusted Advisor (Support) API for limit checks. Return a dict of service name (string) keys to nested dict vals, where each key is a limit name and each value the current numeric limit. e.g.: :: { 'EC2': { 'SomeLimit': 10, } } ] call[name[logger].info, parameter[constant[Beginning TrustedAdvisor poll]]] variable[tmp] assign[=] call[name[self]._get_limit_check_id, parameter[]] if <ast.UnaryOp object at 0x7da18f00fd00> begin[:] call[name[logger].info, parameter[constant[TrustedAdvisor.have_ta is False; not polling TA]]] return[dictionary[[], []]] if compare[name[tmp] is constant[None]] begin[:] call[name[logger].critical, parameter[constant[Unable to find 'Service Limits' Trusted Advisor check; not using Trusted Advisor data.]]] return[None] <ast.Tuple object at 0x7da18f00d1b0> assign[=] name[tmp] variable[checks] assign[=] call[name[self]._get_refreshed_check_result, parameter[name[check_id]]] variable[region] assign[=] <ast.BoolOp object at 0x7da18f00c0a0> variable[res] assign[=] dictionary[[], []] if compare[call[call[name[checks]][constant[result]].get, parameter[constant[status], constant[]]] equal[==] constant[not_available]] begin[:] call[name[logger].warning, parameter[constant[Trusted Advisor returned status "not_available" for service limit check; cannot retrieve limits from TA.]]] return[dictionary[[], []]] if compare[constant[flaggedResources] <ast.NotIn object at 0x7da2590d7190> call[name[checks]][constant[result]]] begin[:] call[name[logger].warning, parameter[constant[Trusted Advisor returned no results for service limit check; cannot retrieve limits from TA.]]] return[dictionary[[], []]] for taget[name[check]] in starred[call[call[name[checks]][constant[result]]][constant[flaggedResources]]] begin[:] if <ast.BoolOp object at 0x7da18f00e050> begin[:] continue variable[data] assign[=] call[name[dict], parameter[call[name[zip], parameter[name[metadata], call[name[check]][constant[metadata]]]]]] if compare[call[name[data]][constant[Service]] <ast.NotIn object at 0x7da2590d7190> name[res]] begin[:] call[name[res]][call[name[data]][constant[Service]]] assign[=] dictionary[[], []] <ast.Try object at 0x7da18f00e2c0> call[call[name[res]][call[name[data]][constant[Service]]]][call[name[data]][constant[Limit Name]]] assign[=] name[val] call[name[logger].info, parameter[constant[Finished TrustedAdvisor poll]]] return[name[res]]
keyword[def] identifier[_poll] ( identifier[self] ): literal[string] identifier[logger] . identifier[info] ( literal[string] ) identifier[tmp] = identifier[self] . identifier[_get_limit_check_id] () keyword[if] keyword[not] identifier[self] . identifier[have_ta] : identifier[logger] . identifier[info] ( literal[string] ) keyword[return] {} keyword[if] identifier[tmp] keyword[is] keyword[None] : identifier[logger] . identifier[critical] ( literal[string] literal[string] ) keyword[return] identifier[check_id] , identifier[metadata] = identifier[tmp] identifier[checks] = identifier[self] . identifier[_get_refreshed_check_result] ( identifier[check_id] ) identifier[region] = identifier[self] . identifier[ta_region] keyword[or] identifier[self] . identifier[conn] . identifier[_client_config] . identifier[region_name] identifier[res] ={} keyword[if] identifier[checks] [ literal[string] ]. identifier[get] ( literal[string] , literal[string] )== literal[string] : identifier[logger] . identifier[warning] ( literal[string] literal[string] ) keyword[return] {} keyword[if] literal[string] keyword[not] keyword[in] identifier[checks] [ literal[string] ]: identifier[logger] . identifier[warning] ( literal[string] literal[string] ) keyword[return] {} keyword[for] identifier[check] keyword[in] identifier[checks] [ literal[string] ][ literal[string] ]: keyword[if] literal[string] keyword[in] identifier[check] keyword[and] identifier[check] [ literal[string] ]!= identifier[region] : keyword[continue] identifier[data] = identifier[dict] ( identifier[zip] ( identifier[metadata] , identifier[check] [ literal[string] ])) keyword[if] identifier[data] [ literal[string] ] keyword[not] keyword[in] identifier[res] : identifier[res] [ identifier[data] [ literal[string] ]]={} keyword[try] : identifier[val] = identifier[int] ( identifier[data] [ literal[string] ]) keyword[except] identifier[ValueError] : identifier[val] = identifier[data] [ literal[string] ] keyword[if] identifier[val] != literal[string] : identifier[logger] . identifier[error] ( literal[string] literal[string] , identifier[val] , identifier[data] [ literal[string] ], identifier[data] [ literal[string] ]) keyword[continue] keyword[else] : identifier[logger] . identifier[debug] ( literal[string] literal[string] , identifier[data] [ literal[string] ], identifier[data] [ literal[string] ]) identifier[res] [ identifier[data] [ literal[string] ]][ identifier[data] [ literal[string] ]]= identifier[val] identifier[logger] . identifier[info] ( literal[string] ) keyword[return] identifier[res]
def _poll(self): """ Poll Trusted Advisor (Support) API for limit checks. Return a dict of service name (string) keys to nested dict vals, where each key is a limit name and each value the current numeric limit. e.g.: :: { 'EC2': { 'SomeLimit': 10, } } """ logger.info('Beginning TrustedAdvisor poll') tmp = self._get_limit_check_id() if not self.have_ta: logger.info('TrustedAdvisor.have_ta is False; not polling TA') return {} # depends on [control=['if'], data=[]] if tmp is None: logger.critical("Unable to find 'Service Limits' Trusted Advisor check; not using Trusted Advisor data.") return # depends on [control=['if'], data=[]] (check_id, metadata) = tmp checks = self._get_refreshed_check_result(check_id) region = self.ta_region or self.conn._client_config.region_name res = {} if checks['result'].get('status', '') == 'not_available': logger.warning('Trusted Advisor returned status "not_available" for service limit check; cannot retrieve limits from TA.') return {} # depends on [control=['if'], data=[]] if 'flaggedResources' not in checks['result']: logger.warning('Trusted Advisor returned no results for service limit check; cannot retrieve limits from TA.') return {} # depends on [control=['if'], data=[]] for check in checks['result']['flaggedResources']: if 'region' in check and check['region'] != region: continue # depends on [control=['if'], data=[]] data = dict(zip(metadata, check['metadata'])) if data['Service'] not in res: res[data['Service']] = {} # depends on [control=['if'], data=['res']] try: val = int(data['Limit Amount']) # depends on [control=['try'], data=[]] except ValueError: val = data['Limit Amount'] if val != 'Unlimited': logger.error('TrustedAdvisor returned unknown Limit Amount %s for %s - %s', val, data['Service'], data['Limit Name']) continue # depends on [control=['if'], data=['val']] else: logger.debug('TrustedAdvisor setting explicit "Unlimited" limit for %s - %s', data['Service'], data['Limit Name']) # depends on [control=['except'], data=[]] res[data['Service']][data['Limit Name']] = val # depends on [control=['for'], data=['check']] logger.info('Finished TrustedAdvisor poll') return res
def RgbToPil(r, g, b): '''Convert the color from RGB to a PIL-compatible integer. Parameters: :r: The Red component value [0...1] :g: The Green component value [0...1] :b: The Blue component value [0...1] Returns: A PIL compatible integer (0xBBGGRR). >>> '0x%06x' % Color.RgbToPil(1, 0.5, 0) '0x0080ff' ''' r, g, b = [min(int(round(v*255)), 255) for v in (r, g, b)] return (b << 16) + (g << 8) + r
def function[RgbToPil, parameter[r, g, b]]: constant[Convert the color from RGB to a PIL-compatible integer. Parameters: :r: The Red component value [0...1] :g: The Green component value [0...1] :b: The Blue component value [0...1] Returns: A PIL compatible integer (0xBBGGRR). >>> '0x%06x' % Color.RgbToPil(1, 0.5, 0) '0x0080ff' ] <ast.Tuple object at 0x7da18fe90190> assign[=] <ast.ListComp object at 0x7da18fe93b20> return[binary_operation[binary_operation[binary_operation[name[b] <ast.LShift object at 0x7da2590d69e0> constant[16]] + binary_operation[name[g] <ast.LShift object at 0x7da2590d69e0> constant[8]]] + name[r]]]
keyword[def] identifier[RgbToPil] ( identifier[r] , identifier[g] , identifier[b] ): literal[string] identifier[r] , identifier[g] , identifier[b] =[ identifier[min] ( identifier[int] ( identifier[round] ( identifier[v] * literal[int] )), literal[int] ) keyword[for] identifier[v] keyword[in] ( identifier[r] , identifier[g] , identifier[b] )] keyword[return] ( identifier[b] << literal[int] )+( identifier[g] << literal[int] )+ identifier[r]
def RgbToPil(r, g, b): """Convert the color from RGB to a PIL-compatible integer. Parameters: :r: The Red component value [0...1] :g: The Green component value [0...1] :b: The Blue component value [0...1] Returns: A PIL compatible integer (0xBBGGRR). >>> '0x%06x' % Color.RgbToPil(1, 0.5, 0) '0x0080ff' """ (r, g, b) = [min(int(round(v * 255)), 255) for v in (r, g, b)] return (b << 16) + (g << 8) + r
def _block_shape(values, ndim=1, shape=None): """ guarantee the shape of the values to be at least 1 d """ if values.ndim < ndim: if shape is None: shape = values.shape if not is_extension_array_dtype(values): # TODO: https://github.com/pandas-dev/pandas/issues/23023 # block.shape is incorrect for "2D" ExtensionArrays # We can't, and don't need to, reshape. values = values.reshape(tuple((1, ) + shape)) return values
def function[_block_shape, parameter[values, ndim, shape]]: constant[ guarantee the shape of the values to be at least 1 d ] if compare[name[values].ndim less[<] name[ndim]] begin[:] if compare[name[shape] is constant[None]] begin[:] variable[shape] assign[=] name[values].shape if <ast.UnaryOp object at 0x7da18f7236d0> begin[:] variable[values] assign[=] call[name[values].reshape, parameter[call[name[tuple], parameter[binary_operation[tuple[[<ast.Constant object at 0x7da18f721210>]] + name[shape]]]]]] return[name[values]]
keyword[def] identifier[_block_shape] ( identifier[values] , identifier[ndim] = literal[int] , identifier[shape] = keyword[None] ): literal[string] keyword[if] identifier[values] . identifier[ndim] < identifier[ndim] : keyword[if] identifier[shape] keyword[is] keyword[None] : identifier[shape] = identifier[values] . identifier[shape] keyword[if] keyword[not] identifier[is_extension_array_dtype] ( identifier[values] ): identifier[values] = identifier[values] . identifier[reshape] ( identifier[tuple] (( literal[int] ,)+ identifier[shape] )) keyword[return] identifier[values]
def _block_shape(values, ndim=1, shape=None): """ guarantee the shape of the values to be at least 1 d """ if values.ndim < ndim: if shape is None: shape = values.shape # depends on [control=['if'], data=['shape']] if not is_extension_array_dtype(values): # TODO: https://github.com/pandas-dev/pandas/issues/23023 # block.shape is incorrect for "2D" ExtensionArrays # We can't, and don't need to, reshape. values = values.reshape(tuple((1,) + shape)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] return values
def get_email_content(file_path): """Email content in file :param file_path: Path to file with email text :return: Email text (html formatted) """ with open(file_path, "r") as in_file: text = str(in_file.read()) return text.replace("\n\n", "<br>")
def function[get_email_content, parameter[file_path]]: constant[Email content in file :param file_path: Path to file with email text :return: Email text (html formatted) ] with call[name[open], parameter[name[file_path], constant[r]]] begin[:] variable[text] assign[=] call[name[str], parameter[call[name[in_file].read, parameter[]]]] return[call[name[text].replace, parameter[constant[ ], constant[<br>]]]]
keyword[def] identifier[get_email_content] ( identifier[file_path] ): literal[string] keyword[with] identifier[open] ( identifier[file_path] , literal[string] ) keyword[as] identifier[in_file] : identifier[text] = identifier[str] ( identifier[in_file] . identifier[read] ()) keyword[return] identifier[text] . identifier[replace] ( literal[string] , literal[string] )
def get_email_content(file_path): """Email content in file :param file_path: Path to file with email text :return: Email text (html formatted) """ with open(file_path, 'r') as in_file: text = str(in_file.read()) return text.replace('\n\n', '<br>') # depends on [control=['with'], data=['in_file']]
def load_commands(self, namespace): """Load all the commands from an entrypoint""" for ep in pkg_resources.iter_entry_points(namespace): LOG.debug('found command %r', ep.name) cmd_name = (ep.name.replace('_', ' ') if self.convert_underscores else ep.name) self.commands[cmd_name] = ep return
def function[load_commands, parameter[self, namespace]]: constant[Load all the commands from an entrypoint] for taget[name[ep]] in starred[call[name[pkg_resources].iter_entry_points, parameter[name[namespace]]]] begin[:] call[name[LOG].debug, parameter[constant[found command %r], name[ep].name]] variable[cmd_name] assign[=] <ast.IfExp object at 0x7da1b1b68850> call[name[self].commands][name[cmd_name]] assign[=] name[ep] return[None]
keyword[def] identifier[load_commands] ( identifier[self] , identifier[namespace] ): literal[string] keyword[for] identifier[ep] keyword[in] identifier[pkg_resources] . identifier[iter_entry_points] ( identifier[namespace] ): identifier[LOG] . identifier[debug] ( literal[string] , identifier[ep] . identifier[name] ) identifier[cmd_name] =( identifier[ep] . identifier[name] . identifier[replace] ( literal[string] , literal[string] ) keyword[if] identifier[self] . identifier[convert_underscores] keyword[else] identifier[ep] . identifier[name] ) identifier[self] . identifier[commands] [ identifier[cmd_name] ]= identifier[ep] keyword[return]
def load_commands(self, namespace): """Load all the commands from an entrypoint""" for ep in pkg_resources.iter_entry_points(namespace): LOG.debug('found command %r', ep.name) cmd_name = ep.name.replace('_', ' ') if self.convert_underscores else ep.name self.commands[cmd_name] = ep # depends on [control=['for'], data=['ep']] return
def monitor(self): """ Access the Monitor Twilio Domain :returns: Monitor Twilio Domain :rtype: twilio.rest.monitor.Monitor """ if self._monitor is None: from twilio.rest.monitor import Monitor self._monitor = Monitor(self) return self._monitor
def function[monitor, parameter[self]]: constant[ Access the Monitor Twilio Domain :returns: Monitor Twilio Domain :rtype: twilio.rest.monitor.Monitor ] if compare[name[self]._monitor is constant[None]] begin[:] from relative_module[twilio.rest.monitor] import module[Monitor] name[self]._monitor assign[=] call[name[Monitor], parameter[name[self]]] return[name[self]._monitor]
keyword[def] identifier[monitor] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[_monitor] keyword[is] keyword[None] : keyword[from] identifier[twilio] . identifier[rest] . identifier[monitor] keyword[import] identifier[Monitor] identifier[self] . identifier[_monitor] = identifier[Monitor] ( identifier[self] ) keyword[return] identifier[self] . identifier[_monitor]
def monitor(self): """ Access the Monitor Twilio Domain :returns: Monitor Twilio Domain :rtype: twilio.rest.monitor.Monitor """ if self._monitor is None: from twilio.rest.monitor import Monitor self._monitor = Monitor(self) # depends on [control=['if'], data=[]] return self._monitor
def enforce_type(cls, jobject, intf_or_class): """ Raises an exception if the object does not implement the specified interface or is not a subclass. :param jobject: the Java object to check :type jobject: JB_Object :param intf_or_class: the classname in Java notation (eg "weka.core.DenseInstance") :type intf_or_class: str """ if not cls.check_type(jobject, intf_or_class): raise TypeError("Object does not implement or subclass " + intf_or_class + ": " + get_classname(jobject))
def function[enforce_type, parameter[cls, jobject, intf_or_class]]: constant[ Raises an exception if the object does not implement the specified interface or is not a subclass. :param jobject: the Java object to check :type jobject: JB_Object :param intf_or_class: the classname in Java notation (eg "weka.core.DenseInstance") :type intf_or_class: str ] if <ast.UnaryOp object at 0x7da1b06f15d0> begin[:] <ast.Raise object at 0x7da1b06f3250>
keyword[def] identifier[enforce_type] ( identifier[cls] , identifier[jobject] , identifier[intf_or_class] ): literal[string] keyword[if] keyword[not] identifier[cls] . identifier[check_type] ( identifier[jobject] , identifier[intf_or_class] ): keyword[raise] identifier[TypeError] ( literal[string] + identifier[intf_or_class] + literal[string] + identifier[get_classname] ( identifier[jobject] ))
def enforce_type(cls, jobject, intf_or_class): """ Raises an exception if the object does not implement the specified interface or is not a subclass. :param jobject: the Java object to check :type jobject: JB_Object :param intf_or_class: the classname in Java notation (eg "weka.core.DenseInstance") :type intf_or_class: str """ if not cls.check_type(jobject, intf_or_class): raise TypeError('Object does not implement or subclass ' + intf_or_class + ': ' + get_classname(jobject)) # depends on [control=['if'], data=[]]
def psetex(self, key, milliseconds, value): """:meth:`~tredis.RedisClient.psetex` works exactly like :meth:`~tredis.RedisClient.psetex` with the sole difference that the expire time is specified in milliseconds instead of seconds. .. versionadded:: 0.2.0 .. note:: **Time complexity**: ``O(1)`` :param key: The key to set :type key: :class:`str`, :class:`bytes` :param int milliseconds: Number of milliseconds for TTL :param value: The value to set :type value: :class:`str`, :class:`bytes` :rtype: bool :raises: :exc:`~tredis.exceptions.RedisError` """ return self._execute( [b'PSETEX', key, ascii(milliseconds), value], b'OK')
def function[psetex, parameter[self, key, milliseconds, value]]: constant[:meth:`~tredis.RedisClient.psetex` works exactly like :meth:`~tredis.RedisClient.psetex` with the sole difference that the expire time is specified in milliseconds instead of seconds. .. versionadded:: 0.2.0 .. note:: **Time complexity**: ``O(1)`` :param key: The key to set :type key: :class:`str`, :class:`bytes` :param int milliseconds: Number of milliseconds for TTL :param value: The value to set :type value: :class:`str`, :class:`bytes` :rtype: bool :raises: :exc:`~tredis.exceptions.RedisError` ] return[call[name[self]._execute, parameter[list[[<ast.Constant object at 0x7da1b0bab0d0>, <ast.Name object at 0x7da1b0ba9990>, <ast.Call object at 0x7da1b0bab070>, <ast.Name object at 0x7da1b0baadd0>]], constant[b'OK']]]]
keyword[def] identifier[psetex] ( identifier[self] , identifier[key] , identifier[milliseconds] , identifier[value] ): literal[string] keyword[return] identifier[self] . identifier[_execute] ( [ literal[string] , identifier[key] , identifier[ascii] ( identifier[milliseconds] ), identifier[value] ], literal[string] )
def psetex(self, key, milliseconds, value): """:meth:`~tredis.RedisClient.psetex` works exactly like :meth:`~tredis.RedisClient.psetex` with the sole difference that the expire time is specified in milliseconds instead of seconds. .. versionadded:: 0.2.0 .. note:: **Time complexity**: ``O(1)`` :param key: The key to set :type key: :class:`str`, :class:`bytes` :param int milliseconds: Number of milliseconds for TTL :param value: The value to set :type value: :class:`str`, :class:`bytes` :rtype: bool :raises: :exc:`~tredis.exceptions.RedisError` """ return self._execute([b'PSETEX', key, ascii(milliseconds), value], b'OK')
def _parse_q2r(self, f): """Parse q2r output file The format of q2r output is described at the mailing list below: http://www.democritos.it/pipermail/pw_forum/2005-April/002408.html http://www.democritos.it/pipermail/pw_forum/2008-September/010099.html http://www.democritos.it/pipermail/pw_forum/2009-August/013613.html https://www.mail-archive.com/pw_forum@pwscf.org/msg24388.html """ natom, dim, epsilon, borns = self._parse_parameters(f) fc_dct = {'fc': self._parse_fc(f, natom, dim), 'dimension': dim, 'dielectric': epsilon, 'born': borns} return fc_dct
def function[_parse_q2r, parameter[self, f]]: constant[Parse q2r output file The format of q2r output is described at the mailing list below: http://www.democritos.it/pipermail/pw_forum/2005-April/002408.html http://www.democritos.it/pipermail/pw_forum/2008-September/010099.html http://www.democritos.it/pipermail/pw_forum/2009-August/013613.html https://www.mail-archive.com/pw_forum@pwscf.org/msg24388.html ] <ast.Tuple object at 0x7da18ede4eb0> assign[=] call[name[self]._parse_parameters, parameter[name[f]]] variable[fc_dct] assign[=] dictionary[[<ast.Constant object at 0x7da20c6c6da0>, <ast.Constant object at 0x7da20c6c6b00>, <ast.Constant object at 0x7da20c6c5720>, <ast.Constant object at 0x7da20c6c6bf0>], [<ast.Call object at 0x7da20c6c6200>, <ast.Name object at 0x7da20c6c4340>, <ast.Name object at 0x7da20c6c5e10>, <ast.Name object at 0x7da20c6c4280>]] return[name[fc_dct]]
keyword[def] identifier[_parse_q2r] ( identifier[self] , identifier[f] ): literal[string] identifier[natom] , identifier[dim] , identifier[epsilon] , identifier[borns] = identifier[self] . identifier[_parse_parameters] ( identifier[f] ) identifier[fc_dct] ={ literal[string] : identifier[self] . identifier[_parse_fc] ( identifier[f] , identifier[natom] , identifier[dim] ), literal[string] : identifier[dim] , literal[string] : identifier[epsilon] , literal[string] : identifier[borns] } keyword[return] identifier[fc_dct]
def _parse_q2r(self, f): """Parse q2r output file The format of q2r output is described at the mailing list below: http://www.democritos.it/pipermail/pw_forum/2005-April/002408.html http://www.democritos.it/pipermail/pw_forum/2008-September/010099.html http://www.democritos.it/pipermail/pw_forum/2009-August/013613.html https://www.mail-archive.com/pw_forum@pwscf.org/msg24388.html """ (natom, dim, epsilon, borns) = self._parse_parameters(f) fc_dct = {'fc': self._parse_fc(f, natom, dim), 'dimension': dim, 'dielectric': epsilon, 'born': borns} return fc_dct
def hessian(f, delta=DELTA): """ Returns numerical hessian function of given input function Input: f, scalar function of one or two variables delta(optional), finite difference step Output: hessian function object """ def hessian_f(*args, **kwargs): if len(args) == 1: x, = args hessianf_x = ( f(x+delta) + f(x-delta) - 2*f(x) )/delta**2 return hessianf_x elif len(args) == 2: x, y = args if type(x) in [float, int] and type(y) in [float, int]: hess_xx = ( f(x + delta, y) + f(x - delta, y) - 2*f(x, y) )/delta**2 hess_yy = ( f(x, y + delta) + f(x, y - delta) - 2*f(x, y) )/delta**2 hess_xy = ( + f(x+delta/2, y+delta/2) + f(x-delta/2, y-delta/2) - f(x+delta/2, y-delta/2) - f(x-delta/2, y+delta/2) )/delta**2 return hess_xx, hess_xy, hess_yy return hessian_f
def function[hessian, parameter[f, delta]]: constant[ Returns numerical hessian function of given input function Input: f, scalar function of one or two variables delta(optional), finite difference step Output: hessian function object ] def function[hessian_f, parameter[]]: if compare[call[name[len], parameter[name[args]]] equal[==] constant[1]] begin[:] <ast.Tuple object at 0x7da1b23473a0> assign[=] name[args] variable[hessianf_x] assign[=] binary_operation[binary_operation[binary_operation[call[name[f], parameter[binary_operation[name[x] + name[delta]]]] + call[name[f], parameter[binary_operation[name[x] - name[delta]]]]] - binary_operation[constant[2] * call[name[f], parameter[name[x]]]]] / binary_operation[name[delta] ** constant[2]]] return[name[hessianf_x]] return[name[hessian_f]]
keyword[def] identifier[hessian] ( identifier[f] , identifier[delta] = identifier[DELTA] ): literal[string] keyword[def] identifier[hessian_f] (* identifier[args] ,** identifier[kwargs] ): keyword[if] identifier[len] ( identifier[args] )== literal[int] : identifier[x] ,= identifier[args] identifier[hessianf_x] =( identifier[f] ( identifier[x] + identifier[delta] )+ identifier[f] ( identifier[x] - identifier[delta] )- literal[int] * identifier[f] ( identifier[x] ) )/ identifier[delta] ** literal[int] keyword[return] identifier[hessianf_x] keyword[elif] identifier[len] ( identifier[args] )== literal[int] : identifier[x] , identifier[y] = identifier[args] keyword[if] identifier[type] ( identifier[x] ) keyword[in] [ identifier[float] , identifier[int] ] keyword[and] identifier[type] ( identifier[y] ) keyword[in] [ identifier[float] , identifier[int] ]: identifier[hess_xx] =( identifier[f] ( identifier[x] + identifier[delta] , identifier[y] )+ identifier[f] ( identifier[x] - identifier[delta] , identifier[y] )- literal[int] * identifier[f] ( identifier[x] , identifier[y] ) )/ identifier[delta] ** literal[int] identifier[hess_yy] =( identifier[f] ( identifier[x] , identifier[y] + identifier[delta] )+ identifier[f] ( identifier[x] , identifier[y] - identifier[delta] )- literal[int] * identifier[f] ( identifier[x] , identifier[y] ) )/ identifier[delta] ** literal[int] identifier[hess_xy] =( + identifier[f] ( identifier[x] + identifier[delta] / literal[int] , identifier[y] + identifier[delta] / literal[int] ) + identifier[f] ( identifier[x] - identifier[delta] / literal[int] , identifier[y] - identifier[delta] / literal[int] ) - identifier[f] ( identifier[x] + identifier[delta] / literal[int] , identifier[y] - identifier[delta] / literal[int] ) - identifier[f] ( identifier[x] - identifier[delta] / literal[int] , identifier[y] + identifier[delta] / literal[int] ) )/ identifier[delta] ** literal[int] keyword[return] identifier[hess_xx] , identifier[hess_xy] , identifier[hess_yy] keyword[return] identifier[hessian_f]
def hessian(f, delta=DELTA): """ Returns numerical hessian function of given input function Input: f, scalar function of one or two variables delta(optional), finite difference step Output: hessian function object """ def hessian_f(*args, **kwargs): if len(args) == 1: (x,) = args hessianf_x = (f(x + delta) + f(x - delta) - 2 * f(x)) / delta ** 2 return hessianf_x # depends on [control=['if'], data=[]] elif len(args) == 2: (x, y) = args if type(x) in [float, int] and type(y) in [float, int]: hess_xx = (f(x + delta, y) + f(x - delta, y) - 2 * f(x, y)) / delta ** 2 hess_yy = (f(x, y + delta) + f(x, y - delta) - 2 * f(x, y)) / delta ** 2 hess_xy = (+f(x + delta / 2, y + delta / 2) + f(x - delta / 2, y - delta / 2) - f(x + delta / 2, y - delta / 2) - f(x - delta / 2, y + delta / 2)) / delta ** 2 return (hess_xx, hess_xy, hess_yy) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] return hessian_f
def _set_interface_hello_interval(self, v, load=False): """ Setter method for interface_hello_interval, mapped from YANG variable /routing_system/interface/ve/intf_isis/interface_isis/interface_hello_interval (list) If this variable is read-only (config: false) in the source YANG file, then _set_interface_hello_interval is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_interface_hello_interval() directly. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=YANGListType("interface_hello_interval_level",interface_hello_interval.interface_hello_interval, yang_name="interface-hello-interval", rest_name="hello-interval", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='interface-hello-interval-level', extensions={u'tailf-common': {u'info': u'Define interval between hello PDUs', u'cli-suppress-mode': None, u'callpoint': u'IsisVeInterfaceHelloInterval', u'cli-full-no': None, u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'cli-suppress-key-abbreviation': None, u'cli-incomplete-command': None, u'alt-name': u'hello-interval'}}), is_container='list', yang_name="interface-hello-interval", rest_name="hello-interval", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Define interval between hello PDUs', u'cli-suppress-mode': None, u'callpoint': u'IsisVeInterfaceHelloInterval', u'cli-full-no': None, u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'cli-suppress-key-abbreviation': None, u'cli-incomplete-command': None, u'alt-name': u'hello-interval'}}, namespace='urn:brocade.com:mgmt:brocade-isis', defining_module='brocade-isis', yang_type='list', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """interface_hello_interval must be of a type compatible with list""", 'defined-type': "list", 'generated-type': """YANGDynClass(base=YANGListType("interface_hello_interval_level",interface_hello_interval.interface_hello_interval, yang_name="interface-hello-interval", rest_name="hello-interval", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='interface-hello-interval-level', extensions={u'tailf-common': {u'info': u'Define interval between hello PDUs', u'cli-suppress-mode': None, u'callpoint': u'IsisVeInterfaceHelloInterval', u'cli-full-no': None, u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'cli-suppress-key-abbreviation': None, u'cli-incomplete-command': None, u'alt-name': u'hello-interval'}}), is_container='list', yang_name="interface-hello-interval", rest_name="hello-interval", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Define interval between hello PDUs', u'cli-suppress-mode': None, u'callpoint': u'IsisVeInterfaceHelloInterval', u'cli-full-no': None, u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'cli-suppress-key-abbreviation': None, u'cli-incomplete-command': None, u'alt-name': u'hello-interval'}}, namespace='urn:brocade.com:mgmt:brocade-isis', defining_module='brocade-isis', yang_type='list', is_config=True)""", }) self.__interface_hello_interval = t if hasattr(self, '_set'): self._set()
def function[_set_interface_hello_interval, parameter[self, v, load]]: constant[ Setter method for interface_hello_interval, mapped from YANG variable /routing_system/interface/ve/intf_isis/interface_isis/interface_hello_interval (list) If this variable is read-only (config: false) in the source YANG file, then _set_interface_hello_interval is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_interface_hello_interval() directly. ] if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:] variable[v] assign[=] call[name[v]._utype, parameter[name[v]]] <ast.Try object at 0x7da20c6c5030> name[self].__interface_hello_interval assign[=] name[t] if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:] call[name[self]._set, parameter[]]
keyword[def] identifier[_set_interface_hello_interval] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ): literal[string] keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ): identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] ) keyword[try] : identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[YANGListType] ( literal[string] , identifier[interface_hello_interval] . identifier[interface_hello_interval] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[is_container] = literal[string] , identifier[user_ordered] = keyword[False] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[yang_keys] = literal[string] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : keyword[None] , literal[string] : literal[string] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : literal[string] }}), identifier[is_container] = literal[string] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[True] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : keyword[None] , literal[string] : literal[string] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : literal[string] }}, identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[True] ) keyword[except] ( identifier[TypeError] , identifier[ValueError] ): keyword[raise] identifier[ValueError] ({ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , }) identifier[self] . identifier[__interface_hello_interval] = identifier[t] keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ): identifier[self] . identifier[_set] ()
def _set_interface_hello_interval(self, v, load=False): """ Setter method for interface_hello_interval, mapped from YANG variable /routing_system/interface/ve/intf_isis/interface_isis/interface_hello_interval (list) If this variable is read-only (config: false) in the source YANG file, then _set_interface_hello_interval is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_interface_hello_interval() directly. """ if hasattr(v, '_utype'): v = v._utype(v) # depends on [control=['if'], data=[]] try: t = YANGDynClass(v, base=YANGListType('interface_hello_interval_level', interface_hello_interval.interface_hello_interval, yang_name='interface-hello-interval', rest_name='hello-interval', parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='interface-hello-interval-level', extensions={u'tailf-common': {u'info': u'Define interval between hello PDUs', u'cli-suppress-mode': None, u'callpoint': u'IsisVeInterfaceHelloInterval', u'cli-full-no': None, u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'cli-suppress-key-abbreviation': None, u'cli-incomplete-command': None, u'alt-name': u'hello-interval'}}), is_container='list', yang_name='interface-hello-interval', rest_name='hello-interval', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Define interval between hello PDUs', u'cli-suppress-mode': None, u'callpoint': u'IsisVeInterfaceHelloInterval', u'cli-full-no': None, u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'cli-suppress-key-abbreviation': None, u'cli-incomplete-command': None, u'alt-name': u'hello-interval'}}, namespace='urn:brocade.com:mgmt:brocade-isis', defining_module='brocade-isis', yang_type='list', is_config=True) # depends on [control=['try'], data=[]] except (TypeError, ValueError): raise ValueError({'error-string': 'interface_hello_interval must be of a type compatible with list', 'defined-type': 'list', 'generated-type': 'YANGDynClass(base=YANGListType("interface_hello_interval_level",interface_hello_interval.interface_hello_interval, yang_name="interface-hello-interval", rest_name="hello-interval", parent=self, is_container=\'list\', user_ordered=False, path_helper=self._path_helper, yang_keys=\'interface-hello-interval-level\', extensions={u\'tailf-common\': {u\'info\': u\'Define interval between hello PDUs\', u\'cli-suppress-mode\': None, u\'callpoint\': u\'IsisVeInterfaceHelloInterval\', u\'cli-full-no\': None, u\'cli-compact-syntax\': None, u\'cli-sequence-commands\': None, u\'cli-suppress-key-abbreviation\': None, u\'cli-incomplete-command\': None, u\'alt-name\': u\'hello-interval\'}}), is_container=\'list\', yang_name="interface-hello-interval", rest_name="hello-interval", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u\'tailf-common\': {u\'info\': u\'Define interval between hello PDUs\', u\'cli-suppress-mode\': None, u\'callpoint\': u\'IsisVeInterfaceHelloInterval\', u\'cli-full-no\': None, u\'cli-compact-syntax\': None, u\'cli-sequence-commands\': None, u\'cli-suppress-key-abbreviation\': None, u\'cli-incomplete-command\': None, u\'alt-name\': u\'hello-interval\'}}, namespace=\'urn:brocade.com:mgmt:brocade-isis\', defining_module=\'brocade-isis\', yang_type=\'list\', is_config=True)'}) # depends on [control=['except'], data=[]] self.__interface_hello_interval = t if hasattr(self, '_set'): self._set() # depends on [control=['if'], data=[]]
def resolve_dns(opts, fallback=True): ''' Resolves the master_ip and master_uri options ''' ret = {} check_dns = True if (opts.get('file_client', 'remote') == 'local' and not opts.get('use_master_when_local', False)): check_dns = False # Since salt.log is imported below, salt.utils.network needs to be imported here as well import salt.utils.network if check_dns is True: try: if opts['master'] == '': raise SaltSystemExit ret['master_ip'] = salt.utils.network.dns_check( opts['master'], int(opts['master_port']), True, opts['ipv6'], attempt_connect=False) except SaltClientError: retry_dns_count = opts.get('retry_dns_count', None) if opts['retry_dns']: while True: if retry_dns_count is not None: if retry_dns_count == 0: raise SaltMasterUnresolvableError retry_dns_count -= 1 import salt.log msg = ('Master hostname: \'{0}\' not found or not responsive. ' 'Retrying in {1} seconds').format(opts['master'], opts['retry_dns']) if salt.log.setup.is_console_configured(): log.error(msg) else: print('WARNING: {0}'.format(msg)) time.sleep(opts['retry_dns']) try: ret['master_ip'] = salt.utils.network.dns_check( opts['master'], int(opts['master_port']), True, opts['ipv6'], attempt_connect=False) break except SaltClientError: pass else: if fallback: ret['master_ip'] = '127.0.0.1' else: raise except SaltSystemExit: unknown_str = 'unknown address' master = opts.get('master', unknown_str) if master == '': master = unknown_str if opts.get('__role') == 'syndic': err = 'Master address: \'{0}\' could not be resolved. Invalid or unresolveable address. ' \ 'Set \'syndic_master\' value in minion config.'.format(master) else: err = 'Master address: \'{0}\' could not be resolved. Invalid or unresolveable address. ' \ 'Set \'master\' value in minion config.'.format(master) log.error(err) raise SaltSystemExit(code=42, msg=err) else: ret['master_ip'] = '127.0.0.1' if 'master_ip' in ret and 'master_ip' in opts: if ret['master_ip'] != opts['master_ip']: log.warning( 'Master ip address changed from %s to %s', opts['master_ip'], ret['master_ip'] ) if opts['source_interface_name']: log.trace('Custom source interface required: %s', opts['source_interface_name']) interfaces = salt.utils.network.interfaces() log.trace('The following interfaces are available on this Minion:') log.trace(interfaces) if opts['source_interface_name'] in interfaces: if interfaces[opts['source_interface_name']]['up']: addrs = interfaces[opts['source_interface_name']]['inet'] if not opts['ipv6'] else\ interfaces[opts['source_interface_name']]['inet6'] ret['source_ip'] = addrs[0]['address'] log.debug('Using %s as source IP address', ret['source_ip']) else: log.warning('The interface %s is down so it cannot be used as source to connect to the Master', opts['source_interface_name']) else: log.warning('%s is not a valid interface. Ignoring.', opts['source_interface_name']) elif opts['source_address']: ret['source_ip'] = salt.utils.network.dns_check( opts['source_address'], int(opts['source_ret_port']), True, opts['ipv6'], attempt_connect=False) log.debug('Using %s as source IP address', ret['source_ip']) if opts['source_ret_port']: ret['source_ret_port'] = int(opts['source_ret_port']) log.debug('Using %d as source port for the ret server', ret['source_ret_port']) if opts['source_publish_port']: ret['source_publish_port'] = int(opts['source_publish_port']) log.debug('Using %d as source port for the master pub', ret['source_publish_port']) ret['master_uri'] = 'tcp://{ip}:{port}'.format( ip=ret['master_ip'], port=opts['master_port']) log.debug('Master URI: %s', ret['master_uri']) return ret
def function[resolve_dns, parameter[opts, fallback]]: constant[ Resolves the master_ip and master_uri options ] variable[ret] assign[=] dictionary[[], []] variable[check_dns] assign[=] constant[True] if <ast.BoolOp object at 0x7da1b1f94a30> begin[:] variable[check_dns] assign[=] constant[False] import module[salt.utils.network] if compare[name[check_dns] is constant[True]] begin[:] <ast.Try object at 0x7da1b1f95240> if <ast.BoolOp object at 0x7da1b21a2d10> begin[:] if compare[call[name[ret]][constant[master_ip]] not_equal[!=] call[name[opts]][constant[master_ip]]] begin[:] call[name[log].warning, parameter[constant[Master ip address changed from %s to %s], call[name[opts]][constant[master_ip]], call[name[ret]][constant[master_ip]]]] if call[name[opts]][constant[source_interface_name]] begin[:] call[name[log].trace, parameter[constant[Custom source interface required: %s], call[name[opts]][constant[source_interface_name]]]] variable[interfaces] assign[=] call[name[salt].utils.network.interfaces, parameter[]] call[name[log].trace, parameter[constant[The following interfaces are available on this Minion:]]] call[name[log].trace, parameter[name[interfaces]]] if compare[call[name[opts]][constant[source_interface_name]] in name[interfaces]] begin[:] if call[call[name[interfaces]][call[name[opts]][constant[source_interface_name]]]][constant[up]] begin[:] variable[addrs] assign[=] <ast.IfExp object at 0x7da1b21a0940> call[name[ret]][constant[source_ip]] assign[=] call[call[name[addrs]][constant[0]]][constant[address]] call[name[log].debug, parameter[constant[Using %s as source IP address], call[name[ret]][constant[source_ip]]]] if call[name[opts]][constant[source_ret_port]] begin[:] call[name[ret]][constant[source_ret_port]] assign[=] call[name[int], parameter[call[name[opts]][constant[source_ret_port]]]] call[name[log].debug, parameter[constant[Using %d as source port for the ret server], call[name[ret]][constant[source_ret_port]]]] if call[name[opts]][constant[source_publish_port]] begin[:] call[name[ret]][constant[source_publish_port]] assign[=] call[name[int], parameter[call[name[opts]][constant[source_publish_port]]]] call[name[log].debug, parameter[constant[Using %d as source port for the master pub], call[name[ret]][constant[source_publish_port]]]] call[name[ret]][constant[master_uri]] assign[=] call[constant[tcp://{ip}:{port}].format, parameter[]] call[name[log].debug, parameter[constant[Master URI: %s], call[name[ret]][constant[master_uri]]]] return[name[ret]]
keyword[def] identifier[resolve_dns] ( identifier[opts] , identifier[fallback] = keyword[True] ): literal[string] identifier[ret] ={} identifier[check_dns] = keyword[True] keyword[if] ( identifier[opts] . identifier[get] ( literal[string] , literal[string] )== literal[string] keyword[and] keyword[not] identifier[opts] . identifier[get] ( literal[string] , keyword[False] )): identifier[check_dns] = keyword[False] keyword[import] identifier[salt] . identifier[utils] . identifier[network] keyword[if] identifier[check_dns] keyword[is] keyword[True] : keyword[try] : keyword[if] identifier[opts] [ literal[string] ]== literal[string] : keyword[raise] identifier[SaltSystemExit] identifier[ret] [ literal[string] ]= identifier[salt] . identifier[utils] . identifier[network] . identifier[dns_check] ( identifier[opts] [ literal[string] ], identifier[int] ( identifier[opts] [ literal[string] ]), keyword[True] , identifier[opts] [ literal[string] ], identifier[attempt_connect] = keyword[False] ) keyword[except] identifier[SaltClientError] : identifier[retry_dns_count] = identifier[opts] . identifier[get] ( literal[string] , keyword[None] ) keyword[if] identifier[opts] [ literal[string] ]: keyword[while] keyword[True] : keyword[if] identifier[retry_dns_count] keyword[is] keyword[not] keyword[None] : keyword[if] identifier[retry_dns_count] == literal[int] : keyword[raise] identifier[SaltMasterUnresolvableError] identifier[retry_dns_count] -= literal[int] keyword[import] identifier[salt] . identifier[log] identifier[msg] =( literal[string] literal[string] ). identifier[format] ( identifier[opts] [ literal[string] ], identifier[opts] [ literal[string] ]) keyword[if] identifier[salt] . identifier[log] . identifier[setup] . identifier[is_console_configured] (): identifier[log] . identifier[error] ( identifier[msg] ) keyword[else] : identifier[print] ( literal[string] . identifier[format] ( identifier[msg] )) identifier[time] . identifier[sleep] ( identifier[opts] [ literal[string] ]) keyword[try] : identifier[ret] [ literal[string] ]= identifier[salt] . identifier[utils] . identifier[network] . identifier[dns_check] ( identifier[opts] [ literal[string] ], identifier[int] ( identifier[opts] [ literal[string] ]), keyword[True] , identifier[opts] [ literal[string] ], identifier[attempt_connect] = keyword[False] ) keyword[break] keyword[except] identifier[SaltClientError] : keyword[pass] keyword[else] : keyword[if] identifier[fallback] : identifier[ret] [ literal[string] ]= literal[string] keyword[else] : keyword[raise] keyword[except] identifier[SaltSystemExit] : identifier[unknown_str] = literal[string] identifier[master] = identifier[opts] . identifier[get] ( literal[string] , identifier[unknown_str] ) keyword[if] identifier[master] == literal[string] : identifier[master] = identifier[unknown_str] keyword[if] identifier[opts] . identifier[get] ( literal[string] )== literal[string] : identifier[err] = literal[string] literal[string] . identifier[format] ( identifier[master] ) keyword[else] : identifier[err] = literal[string] literal[string] . identifier[format] ( identifier[master] ) identifier[log] . identifier[error] ( identifier[err] ) keyword[raise] identifier[SaltSystemExit] ( identifier[code] = literal[int] , identifier[msg] = identifier[err] ) keyword[else] : identifier[ret] [ literal[string] ]= literal[string] keyword[if] literal[string] keyword[in] identifier[ret] keyword[and] literal[string] keyword[in] identifier[opts] : keyword[if] identifier[ret] [ literal[string] ]!= identifier[opts] [ literal[string] ]: identifier[log] . identifier[warning] ( literal[string] , identifier[opts] [ literal[string] ], identifier[ret] [ literal[string] ] ) keyword[if] identifier[opts] [ literal[string] ]: identifier[log] . identifier[trace] ( literal[string] , identifier[opts] [ literal[string] ]) identifier[interfaces] = identifier[salt] . identifier[utils] . identifier[network] . identifier[interfaces] () identifier[log] . identifier[trace] ( literal[string] ) identifier[log] . identifier[trace] ( identifier[interfaces] ) keyword[if] identifier[opts] [ literal[string] ] keyword[in] identifier[interfaces] : keyword[if] identifier[interfaces] [ identifier[opts] [ literal[string] ]][ literal[string] ]: identifier[addrs] = identifier[interfaces] [ identifier[opts] [ literal[string] ]][ literal[string] ] keyword[if] keyword[not] identifier[opts] [ literal[string] ] keyword[else] identifier[interfaces] [ identifier[opts] [ literal[string] ]][ literal[string] ] identifier[ret] [ literal[string] ]= identifier[addrs] [ literal[int] ][ literal[string] ] identifier[log] . identifier[debug] ( literal[string] , identifier[ret] [ literal[string] ]) keyword[else] : identifier[log] . identifier[warning] ( literal[string] , identifier[opts] [ literal[string] ]) keyword[else] : identifier[log] . identifier[warning] ( literal[string] , identifier[opts] [ literal[string] ]) keyword[elif] identifier[opts] [ literal[string] ]: identifier[ret] [ literal[string] ]= identifier[salt] . identifier[utils] . identifier[network] . identifier[dns_check] ( identifier[opts] [ literal[string] ], identifier[int] ( identifier[opts] [ literal[string] ]), keyword[True] , identifier[opts] [ literal[string] ], identifier[attempt_connect] = keyword[False] ) identifier[log] . identifier[debug] ( literal[string] , identifier[ret] [ literal[string] ]) keyword[if] identifier[opts] [ literal[string] ]: identifier[ret] [ literal[string] ]= identifier[int] ( identifier[opts] [ literal[string] ]) identifier[log] . identifier[debug] ( literal[string] , identifier[ret] [ literal[string] ]) keyword[if] identifier[opts] [ literal[string] ]: identifier[ret] [ literal[string] ]= identifier[int] ( identifier[opts] [ literal[string] ]) identifier[log] . identifier[debug] ( literal[string] , identifier[ret] [ literal[string] ]) identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[ip] = identifier[ret] [ literal[string] ], identifier[port] = identifier[opts] [ literal[string] ]) identifier[log] . identifier[debug] ( literal[string] , identifier[ret] [ literal[string] ]) keyword[return] identifier[ret]
def resolve_dns(opts, fallback=True): """ Resolves the master_ip and master_uri options """ ret = {} check_dns = True if opts.get('file_client', 'remote') == 'local' and (not opts.get('use_master_when_local', False)): check_dns = False # depends on [control=['if'], data=[]] # Since salt.log is imported below, salt.utils.network needs to be imported here as well import salt.utils.network if check_dns is True: try: if opts['master'] == '': raise SaltSystemExit # depends on [control=['if'], data=[]] ret['master_ip'] = salt.utils.network.dns_check(opts['master'], int(opts['master_port']), True, opts['ipv6'], attempt_connect=False) # depends on [control=['try'], data=[]] except SaltClientError: retry_dns_count = opts.get('retry_dns_count', None) if opts['retry_dns']: while True: if retry_dns_count is not None: if retry_dns_count == 0: raise SaltMasterUnresolvableError # depends on [control=['if'], data=[]] retry_dns_count -= 1 # depends on [control=['if'], data=['retry_dns_count']] import salt.log msg = "Master hostname: '{0}' not found or not responsive. Retrying in {1} seconds".format(opts['master'], opts['retry_dns']) if salt.log.setup.is_console_configured(): log.error(msg) # depends on [control=['if'], data=[]] else: print('WARNING: {0}'.format(msg)) time.sleep(opts['retry_dns']) try: ret['master_ip'] = salt.utils.network.dns_check(opts['master'], int(opts['master_port']), True, opts['ipv6'], attempt_connect=False) break # depends on [control=['try'], data=[]] except SaltClientError: pass # depends on [control=['except'], data=[]] # depends on [control=['while'], data=[]] # depends on [control=['if'], data=[]] elif fallback: ret['master_ip'] = '127.0.0.1' # depends on [control=['if'], data=[]] else: raise # depends on [control=['except'], data=[]] except SaltSystemExit: unknown_str = 'unknown address' master = opts.get('master', unknown_str) if master == '': master = unknown_str # depends on [control=['if'], data=['master']] if opts.get('__role') == 'syndic': err = "Master address: '{0}' could not be resolved. Invalid or unresolveable address. Set 'syndic_master' value in minion config.".format(master) # depends on [control=['if'], data=[]] else: err = "Master address: '{0}' could not be resolved. Invalid or unresolveable address. Set 'master' value in minion config.".format(master) log.error(err) raise SaltSystemExit(code=42, msg=err) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] else: ret['master_ip'] = '127.0.0.1' if 'master_ip' in ret and 'master_ip' in opts: if ret['master_ip'] != opts['master_ip']: log.warning('Master ip address changed from %s to %s', opts['master_ip'], ret['master_ip']) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] if opts['source_interface_name']: log.trace('Custom source interface required: %s', opts['source_interface_name']) interfaces = salt.utils.network.interfaces() log.trace('The following interfaces are available on this Minion:') log.trace(interfaces) if opts['source_interface_name'] in interfaces: if interfaces[opts['source_interface_name']]['up']: addrs = interfaces[opts['source_interface_name']]['inet'] if not opts['ipv6'] else interfaces[opts['source_interface_name']]['inet6'] ret['source_ip'] = addrs[0]['address'] log.debug('Using %s as source IP address', ret['source_ip']) # depends on [control=['if'], data=[]] else: log.warning('The interface %s is down so it cannot be used as source to connect to the Master', opts['source_interface_name']) # depends on [control=['if'], data=['interfaces']] else: log.warning('%s is not a valid interface. Ignoring.', opts['source_interface_name']) # depends on [control=['if'], data=[]] elif opts['source_address']: ret['source_ip'] = salt.utils.network.dns_check(opts['source_address'], int(opts['source_ret_port']), True, opts['ipv6'], attempt_connect=False) log.debug('Using %s as source IP address', ret['source_ip']) # depends on [control=['if'], data=[]] if opts['source_ret_port']: ret['source_ret_port'] = int(opts['source_ret_port']) log.debug('Using %d as source port for the ret server', ret['source_ret_port']) # depends on [control=['if'], data=[]] if opts['source_publish_port']: ret['source_publish_port'] = int(opts['source_publish_port']) log.debug('Using %d as source port for the master pub', ret['source_publish_port']) # depends on [control=['if'], data=[]] ret['master_uri'] = 'tcp://{ip}:{port}'.format(ip=ret['master_ip'], port=opts['master_port']) log.debug('Master URI: %s', ret['master_uri']) return ret
def build_attachment1(): """Build attachment mock. Make sure your content is base64 encoded before passing into attachment.content. Another example: https://github.com/sendgrid/sendgrid-python/blob/master/use_cases/attachment.md""" attachment = Attachment() attachment.content = ("TG9yZW0gaXBzdW0gZG9sb3Igc2l0IGFtZXQsIGNvbnNl" "Y3RldHVyIGFkaXBpc2NpbmcgZWxpdC4gQ3JhcyBwdW12") attachment.type = "application/pdf" attachment.filename = "balance_001.pdf" attachment.disposition = "attachment" attachment.content_id = "Balance Sheet" return attachment
def function[build_attachment1, parameter[]]: constant[Build attachment mock. Make sure your content is base64 encoded before passing into attachment.content. Another example: https://github.com/sendgrid/sendgrid-python/blob/master/use_cases/attachment.md] variable[attachment] assign[=] call[name[Attachment], parameter[]] name[attachment].content assign[=] constant[TG9yZW0gaXBzdW0gZG9sb3Igc2l0IGFtZXQsIGNvbnNlY3RldHVyIGFkaXBpc2NpbmcgZWxpdC4gQ3JhcyBwdW12] name[attachment].type assign[=] constant[application/pdf] name[attachment].filename assign[=] constant[balance_001.pdf] name[attachment].disposition assign[=] constant[attachment] name[attachment].content_id assign[=] constant[Balance Sheet] return[name[attachment]]
keyword[def] identifier[build_attachment1] (): literal[string] identifier[attachment] = identifier[Attachment] () identifier[attachment] . identifier[content] =( literal[string] literal[string] ) identifier[attachment] . identifier[type] = literal[string] identifier[attachment] . identifier[filename] = literal[string] identifier[attachment] . identifier[disposition] = literal[string] identifier[attachment] . identifier[content_id] = literal[string] keyword[return] identifier[attachment]
def build_attachment1(): """Build attachment mock. Make sure your content is base64 encoded before passing into attachment.content. Another example: https://github.com/sendgrid/sendgrid-python/blob/master/use_cases/attachment.md""" attachment = Attachment() attachment.content = 'TG9yZW0gaXBzdW0gZG9sb3Igc2l0IGFtZXQsIGNvbnNlY3RldHVyIGFkaXBpc2NpbmcgZWxpdC4gQ3JhcyBwdW12' attachment.type = 'application/pdf' attachment.filename = 'balance_001.pdf' attachment.disposition = 'attachment' attachment.content_id = 'Balance Sheet' return attachment
def set_mask(self, mask_img): """Sets a mask img to this. So every operation to self, this mask will be taken into account. Parameters ---------- mask_img: nifti-like image, NeuroImage or str 3D mask array: True where a voxel should be used. Can either be: - a file path to a Nifti image - any object with get_data() and get_affine() methods, e.g., nibabel.Nifti1Image. If niimg is a string, consider it as a path to Nifti image and call nibabel.load on it. If it is an object, check if get_data() and get_affine() methods are present, raise TypeError otherwise. Note ---- self.img and mask_file must have the same shape. Raises ------ FileNotFound, NiftiFilesNotCompatible """ mask = load_mask(mask_img, allow_empty=True) check_img_compatibility(self.img, mask, only_check_3d=True) # this will raise an exception if something is wrong self.mask = mask
def function[set_mask, parameter[self, mask_img]]: constant[Sets a mask img to this. So every operation to self, this mask will be taken into account. Parameters ---------- mask_img: nifti-like image, NeuroImage or str 3D mask array: True where a voxel should be used. Can either be: - a file path to a Nifti image - any object with get_data() and get_affine() methods, e.g., nibabel.Nifti1Image. If niimg is a string, consider it as a path to Nifti image and call nibabel.load on it. If it is an object, check if get_data() and get_affine() methods are present, raise TypeError otherwise. Note ---- self.img and mask_file must have the same shape. Raises ------ FileNotFound, NiftiFilesNotCompatible ] variable[mask] assign[=] call[name[load_mask], parameter[name[mask_img]]] call[name[check_img_compatibility], parameter[name[self].img, name[mask]]] name[self].mask assign[=] name[mask]
keyword[def] identifier[set_mask] ( identifier[self] , identifier[mask_img] ): literal[string] identifier[mask] = identifier[load_mask] ( identifier[mask_img] , identifier[allow_empty] = keyword[True] ) identifier[check_img_compatibility] ( identifier[self] . identifier[img] , identifier[mask] , identifier[only_check_3d] = keyword[True] ) identifier[self] . identifier[mask] = identifier[mask]
def set_mask(self, mask_img): """Sets a mask img to this. So every operation to self, this mask will be taken into account. Parameters ---------- mask_img: nifti-like image, NeuroImage or str 3D mask array: True where a voxel should be used. Can either be: - a file path to a Nifti image - any object with get_data() and get_affine() methods, e.g., nibabel.Nifti1Image. If niimg is a string, consider it as a path to Nifti image and call nibabel.load on it. If it is an object, check if get_data() and get_affine() methods are present, raise TypeError otherwise. Note ---- self.img and mask_file must have the same shape. Raises ------ FileNotFound, NiftiFilesNotCompatible """ mask = load_mask(mask_img, allow_empty=True) check_img_compatibility(self.img, mask, only_check_3d=True) # this will raise an exception if something is wrong self.mask = mask
def destroy(self): """ A reimplemented destructor. This destructor will clear the reference to the toolkit widget and set its parent to None. """ widget = self.widget if widget is not None: parent = self.parent_widget() if parent is not None: parent.removeView(widget) del self.widget super(AndroidToolkitObject, self).destroy()
def function[destroy, parameter[self]]: constant[ A reimplemented destructor. This destructor will clear the reference to the toolkit widget and set its parent to None. ] variable[widget] assign[=] name[self].widget if compare[name[widget] is_not constant[None]] begin[:] variable[parent] assign[=] call[name[self].parent_widget, parameter[]] if compare[name[parent] is_not constant[None]] begin[:] call[name[parent].removeView, parameter[name[widget]]] <ast.Delete object at 0x7da1b1b9fa90> call[call[name[super], parameter[name[AndroidToolkitObject], name[self]]].destroy, parameter[]]
keyword[def] identifier[destroy] ( identifier[self] ): literal[string] identifier[widget] = identifier[self] . identifier[widget] keyword[if] identifier[widget] keyword[is] keyword[not] keyword[None] : identifier[parent] = identifier[self] . identifier[parent_widget] () keyword[if] identifier[parent] keyword[is] keyword[not] keyword[None] : identifier[parent] . identifier[removeView] ( identifier[widget] ) keyword[del] identifier[self] . identifier[widget] identifier[super] ( identifier[AndroidToolkitObject] , identifier[self] ). identifier[destroy] ()
def destroy(self): """ A reimplemented destructor. This destructor will clear the reference to the toolkit widget and set its parent to None. """ widget = self.widget if widget is not None: parent = self.parent_widget() if parent is not None: parent.removeView(widget) # depends on [control=['if'], data=['parent']] del self.widget # depends on [control=['if'], data=['widget']] super(AndroidToolkitObject, self).destroy()
def _get_capirca_platform(): # pylint: disable=too-many-return-statements ''' Given the following NAPALM grains, we can determine the Capirca platform name: - vendor - device model - operating system Not the most optimal. ''' vendor = __grains__['vendor'].lower() os_ = __grains__['os'].lower() model = __grains__['model'].lower() if vendor == 'juniper' and 'srx' in model: return 'junipersrx' elif vendor == 'cisco' and os_ == 'ios': return 'cisco' elif vendor == 'cisco' and os_ == 'iosxr': return 'ciscoxr' elif vendor == 'cisco' and os_ == 'asa': return 'ciscoasa' elif os_ == 'linux': return 'iptables' elif vendor == 'palo alto networks': return 'paloaltofw' # anything else will point to the vendor # i.e.: some of the Capirca platforms are named by the device vendor # e.g.: eOS => arista, junos => juniper, etc. return vendor
def function[_get_capirca_platform, parameter[]]: constant[ Given the following NAPALM grains, we can determine the Capirca platform name: - vendor - device model - operating system Not the most optimal. ] variable[vendor] assign[=] call[call[name[__grains__]][constant[vendor]].lower, parameter[]] variable[os_] assign[=] call[call[name[__grains__]][constant[os]].lower, parameter[]] variable[model] assign[=] call[call[name[__grains__]][constant[model]].lower, parameter[]] if <ast.BoolOp object at 0x7da1b2022650> begin[:] return[constant[junipersrx]] return[name[vendor]]
keyword[def] identifier[_get_capirca_platform] (): literal[string] identifier[vendor] = identifier[__grains__] [ literal[string] ]. identifier[lower] () identifier[os_] = identifier[__grains__] [ literal[string] ]. identifier[lower] () identifier[model] = identifier[__grains__] [ literal[string] ]. identifier[lower] () keyword[if] identifier[vendor] == literal[string] keyword[and] literal[string] keyword[in] identifier[model] : keyword[return] literal[string] keyword[elif] identifier[vendor] == literal[string] keyword[and] identifier[os_] == literal[string] : keyword[return] literal[string] keyword[elif] identifier[vendor] == literal[string] keyword[and] identifier[os_] == literal[string] : keyword[return] literal[string] keyword[elif] identifier[vendor] == literal[string] keyword[and] identifier[os_] == literal[string] : keyword[return] literal[string] keyword[elif] identifier[os_] == literal[string] : keyword[return] literal[string] keyword[elif] identifier[vendor] == literal[string] : keyword[return] literal[string] keyword[return] identifier[vendor]
def _get_capirca_platform(): # pylint: disable=too-many-return-statements '\n Given the following NAPALM grains, we can determine the Capirca platform name:\n\n - vendor\n - device model\n - operating system\n\n Not the most optimal.\n ' vendor = __grains__['vendor'].lower() os_ = __grains__['os'].lower() model = __grains__['model'].lower() if vendor == 'juniper' and 'srx' in model: return 'junipersrx' # depends on [control=['if'], data=[]] elif vendor == 'cisco' and os_ == 'ios': return 'cisco' # depends on [control=['if'], data=[]] elif vendor == 'cisco' and os_ == 'iosxr': return 'ciscoxr' # depends on [control=['if'], data=[]] elif vendor == 'cisco' and os_ == 'asa': return 'ciscoasa' # depends on [control=['if'], data=[]] elif os_ == 'linux': return 'iptables' # depends on [control=['if'], data=[]] elif vendor == 'palo alto networks': return 'paloaltofw' # depends on [control=['if'], data=[]] # anything else will point to the vendor # i.e.: some of the Capirca platforms are named by the device vendor # e.g.: eOS => arista, junos => juniper, etc. return vendor
def porthistory(port_number, start_date=None, end_date=None, return_format=None): """Returns port data for a range of dates. In the return data: Records: Total number of records for a given date range. Targets: Number of unique destination IP addresses. Sources: Number of unique originating IPs. :param port_number: a valid port number (required) :param start_date: string or datetime.date(), default is 30 days ago :param end_date: string or datetime.date(), default is today """ uri = 'porthistory/{port}'.format(port=port_number) if not start_date: # default 30 days ago start_date = datetime.datetime.now() - datetime.timedelta(days=30) try: uri = '/'.join([uri, start_date.strftime("%Y-%m-%d")]) except AttributeError: uri = '/'.join([uri, start_date]) if end_date: try: uri = '/'.join([uri, end_date.strftime("%Y-%m-%d")]) except AttributeError: uri = '/'.join([uri, end_date]) response = _get(uri, return_format) if 'bad port number' in str(response): raise Error('Bad port, {port}'.format(port=port_number)) else: return response
def function[porthistory, parameter[port_number, start_date, end_date, return_format]]: constant[Returns port data for a range of dates. In the return data: Records: Total number of records for a given date range. Targets: Number of unique destination IP addresses. Sources: Number of unique originating IPs. :param port_number: a valid port number (required) :param start_date: string or datetime.date(), default is 30 days ago :param end_date: string or datetime.date(), default is today ] variable[uri] assign[=] call[constant[porthistory/{port}].format, parameter[]] if <ast.UnaryOp object at 0x7da1b1957670> begin[:] variable[start_date] assign[=] binary_operation[call[name[datetime].datetime.now, parameter[]] - call[name[datetime].timedelta, parameter[]]] <ast.Try object at 0x7da1b1957c10> if name[end_date] begin[:] <ast.Try object at 0x7da1b19576a0> variable[response] assign[=] call[name[_get], parameter[name[uri], name[return_format]]] if compare[constant[bad port number] in call[name[str], parameter[name[response]]]] begin[:] <ast.Raise object at 0x7da1b1956410>
keyword[def] identifier[porthistory] ( identifier[port_number] , identifier[start_date] = keyword[None] , identifier[end_date] = keyword[None] , identifier[return_format] = keyword[None] ): literal[string] identifier[uri] = literal[string] . identifier[format] ( identifier[port] = identifier[port_number] ) keyword[if] keyword[not] identifier[start_date] : identifier[start_date] = identifier[datetime] . identifier[datetime] . identifier[now] ()- identifier[datetime] . identifier[timedelta] ( identifier[days] = literal[int] ) keyword[try] : identifier[uri] = literal[string] . identifier[join] ([ identifier[uri] , identifier[start_date] . identifier[strftime] ( literal[string] )]) keyword[except] identifier[AttributeError] : identifier[uri] = literal[string] . identifier[join] ([ identifier[uri] , identifier[start_date] ]) keyword[if] identifier[end_date] : keyword[try] : identifier[uri] = literal[string] . identifier[join] ([ identifier[uri] , identifier[end_date] . identifier[strftime] ( literal[string] )]) keyword[except] identifier[AttributeError] : identifier[uri] = literal[string] . identifier[join] ([ identifier[uri] , identifier[end_date] ]) identifier[response] = identifier[_get] ( identifier[uri] , identifier[return_format] ) keyword[if] literal[string] keyword[in] identifier[str] ( identifier[response] ): keyword[raise] identifier[Error] ( literal[string] . identifier[format] ( identifier[port] = identifier[port_number] )) keyword[else] : keyword[return] identifier[response]
def porthistory(port_number, start_date=None, end_date=None, return_format=None): """Returns port data for a range of dates. In the return data: Records: Total number of records for a given date range. Targets: Number of unique destination IP addresses. Sources: Number of unique originating IPs. :param port_number: a valid port number (required) :param start_date: string or datetime.date(), default is 30 days ago :param end_date: string or datetime.date(), default is today """ uri = 'porthistory/{port}'.format(port=port_number) if not start_date: # default 30 days ago start_date = datetime.datetime.now() - datetime.timedelta(days=30) # depends on [control=['if'], data=[]] try: uri = '/'.join([uri, start_date.strftime('%Y-%m-%d')]) # depends on [control=['try'], data=[]] except AttributeError: uri = '/'.join([uri, start_date]) # depends on [control=['except'], data=[]] if end_date: try: uri = '/'.join([uri, end_date.strftime('%Y-%m-%d')]) # depends on [control=['try'], data=[]] except AttributeError: uri = '/'.join([uri, end_date]) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] response = _get(uri, return_format) if 'bad port number' in str(response): raise Error('Bad port, {port}'.format(port=port_number)) # depends on [control=['if'], data=[]] else: return response