code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def cli_command_restart(self, msg): """\ restart the subprocess i. we set our state to RESTARTING - on restarting we still send heartbeat ii. we kill the subprocess iii. we start again iv. if its started we set our state to RUNNING, else we set it to WAITING :param msg: :return: """ info = '' if self.state == State.RUNNING and self.sprocess and self.sprocess.proc: self.state = State.RESTARTING self.sprocess.set_exit_callback(self.proc_exit_cb_restart) self.sprocess.proc.kill() info = 'killed' # TODO: check if process is really dead etc. return info
def function[cli_command_restart, parameter[self, msg]]: constant[ restart the subprocess i. we set our state to RESTARTING - on restarting we still send heartbeat ii. we kill the subprocess iii. we start again iv. if its started we set our state to RUNNING, else we set it to WAITING :param msg: :return: ] variable[info] assign[=] constant[] if <ast.BoolOp object at 0x7da1b0295f30> begin[:] name[self].state assign[=] name[State].RESTARTING call[name[self].sprocess.set_exit_callback, parameter[name[self].proc_exit_cb_restart]] call[name[self].sprocess.proc.kill, parameter[]] variable[info] assign[=] constant[killed] return[name[info]]
keyword[def] identifier[cli_command_restart] ( identifier[self] , identifier[msg] ): literal[string] identifier[info] = literal[string] keyword[if] identifier[self] . identifier[state] == identifier[State] . identifier[RUNNING] keyword[and] identifier[self] . identifier[sprocess] keyword[and] identifier[self] . identifier[sprocess] . identifier[proc] : identifier[self] . identifier[state] = identifier[State] . identifier[RESTARTING] identifier[self] . identifier[sprocess] . identifier[set_exit_callback] ( identifier[self] . identifier[proc_exit_cb_restart] ) identifier[self] . identifier[sprocess] . identifier[proc] . identifier[kill] () identifier[info] = literal[string] keyword[return] identifier[info]
def cli_command_restart(self, msg): """ restart the subprocess i. we set our state to RESTARTING - on restarting we still send heartbeat ii. we kill the subprocess iii. we start again iv. if its started we set our state to RUNNING, else we set it to WAITING :param msg: :return: """ info = '' if self.state == State.RUNNING and self.sprocess and self.sprocess.proc: self.state = State.RESTARTING self.sprocess.set_exit_callback(self.proc_exit_cb_restart) self.sprocess.proc.kill() info = 'killed' # depends on [control=['if'], data=[]] # TODO: check if process is really dead etc. return info
def _trunc(x, minval=None, maxval=None): """Truncate vector values to have values on range [minval, maxval] """ x = np.copy(x) if minval is not None: x[x < minval] = minval if maxval is not None: x[x > maxval] = maxval return x
def function[_trunc, parameter[x, minval, maxval]]: constant[Truncate vector values to have values on range [minval, maxval] ] variable[x] assign[=] call[name[np].copy, parameter[name[x]]] if compare[name[minval] is_not constant[None]] begin[:] call[name[x]][compare[name[x] less[<] name[minval]]] assign[=] name[minval] if compare[name[maxval] is_not constant[None]] begin[:] call[name[x]][compare[name[x] greater[>] name[maxval]]] assign[=] name[maxval] return[name[x]]
keyword[def] identifier[_trunc] ( identifier[x] , identifier[minval] = keyword[None] , identifier[maxval] = keyword[None] ): literal[string] identifier[x] = identifier[np] . identifier[copy] ( identifier[x] ) keyword[if] identifier[minval] keyword[is] keyword[not] keyword[None] : identifier[x] [ identifier[x] < identifier[minval] ]= identifier[minval] keyword[if] identifier[maxval] keyword[is] keyword[not] keyword[None] : identifier[x] [ identifier[x] > identifier[maxval] ]= identifier[maxval] keyword[return] identifier[x]
def _trunc(x, minval=None, maxval=None): """Truncate vector values to have values on range [minval, maxval] """ x = np.copy(x) if minval is not None: x[x < minval] = minval # depends on [control=['if'], data=['minval']] if maxval is not None: x[x > maxval] = maxval # depends on [control=['if'], data=['maxval']] return x
def set_selection(self, time, freqs, blarr, calname='', radec=(), dist=1, spwind=[], pols=['XX','YY']): """ Set select parameter that defines time, spw, and pol solutions to apply. time defines the time to find solutions near in mjd. freqs defines frequencies to select bandpass solution blarr is array of size 2xnbl that gives pairs of antennas in each baseline (a la tpipe.blarr). radec (radian tuple) and dist (deg) define optional location of source for filtering solutions. spwind is list of indices to be used (e.g., [0,2,4,10]) pols is from d['pols'] (e.g., ['RR']). single or dual parallel allowed. calname not used. here for uniformity with telcal_sol. """ self.spwind = spwind if calname: self.logger.warn('calname option not used for casa_sol. Applied based on radec.') # define pol index if 'X' in ''.join(pols) or 'Y' in ''.join(pols): polord = ['XX', 'YY'] elif 'R' in ''.join(pols) or 'L' in ''.join(pols): polord = ['RR', 'LL'] self.polind = [polord.index(pol) for pol in pols] self.ant1ind = [n.where(ant1 == n.unique(blarr))[0][0] for (ant1,ant2) in blarr] self.ant2ind = [n.where(ant2 == n.unique(blarr))[0][0] for (ant1,ant2) in blarr] # select by smallest time distance for source within some angular region of target if radec: ra, dec = radec calra = n.array(self.radec)[:,0] caldec = n.array(self.radec)[:,1] fields = n.where( (n.abs(calra - ra) < n.radians(dist)) & (n.abs(caldec - dec) < n.radians(dist)) )[0] if len(fields) == 0: self.logger.warn('Warning: no close calibrator found. Removing radec restriction.') fields = n.unique(self.uniquefield) else: fields = n.unique(self.uniquefield) sel = [] for field in fields: sel += list(n.where(field == self.uniquefield)[0]) mjddist = n.abs(time - self.uniquemjd[sel]) closestgain = n.where(mjddist == mjddist.min())[0][0] self.logger.info('Using gain solution for field %d at MJD %.5f, separated by %d min ' % (self.uniquefield[n.where(self.uniquemjd == self.uniquemjd[sel][closestgain])], self.uniquemjd[closestgain], mjddist[closestgain]*24*60)) self.gain = self.gain.take(self.spwind, axis=2).take(self.polind, axis=3)[closestgain] if hasattr(self, 'bandpass'): bins = [n.where(n.min(n.abs(self.bpfreq-selfreq)) == n.abs(self.bpfreq-selfreq))[0][0] for selfreq in freqs] self.bandpass = self.bandpass.take(bins, axis=1).take(self.polind, axis=2) self.freqs = freqs self.logger.debug('Using bandpass at BP bins (1000 bins per spw): %s', str(bins))
def function[set_selection, parameter[self, time, freqs, blarr, calname, radec, dist, spwind, pols]]: constant[ Set select parameter that defines time, spw, and pol solutions to apply. time defines the time to find solutions near in mjd. freqs defines frequencies to select bandpass solution blarr is array of size 2xnbl that gives pairs of antennas in each baseline (a la tpipe.blarr). radec (radian tuple) and dist (deg) define optional location of source for filtering solutions. spwind is list of indices to be used (e.g., [0,2,4,10]) pols is from d['pols'] (e.g., ['RR']). single or dual parallel allowed. calname not used. here for uniformity with telcal_sol. ] name[self].spwind assign[=] name[spwind] if name[calname] begin[:] call[name[self].logger.warn, parameter[constant[calname option not used for casa_sol. Applied based on radec.]]] if <ast.BoolOp object at 0x7da1b25b38e0> begin[:] variable[polord] assign[=] list[[<ast.Constant object at 0x7da1b25b2530>, <ast.Constant object at 0x7da1b25b2500>]] name[self].polind assign[=] <ast.ListComp object at 0x7da1b25b20b0> name[self].ant1ind assign[=] <ast.ListComp object at 0x7da1b25b1e70> name[self].ant2ind assign[=] <ast.ListComp object at 0x7da1b25b1a20> if name[radec] begin[:] <ast.Tuple object at 0x7da1b25b15a0> assign[=] name[radec] variable[calra] assign[=] call[call[name[n].array, parameter[name[self].radec]]][tuple[[<ast.Slice object at 0x7da1b25b1330>, <ast.Constant object at 0x7da1b25b1300>]]] variable[caldec] assign[=] call[call[name[n].array, parameter[name[self].radec]]][tuple[[<ast.Slice object at 0x7da1b25b1120>, <ast.Constant object at 0x7da1b25b10f0>]]] variable[fields] assign[=] call[call[name[n].where, parameter[binary_operation[compare[call[name[n].abs, parameter[binary_operation[name[calra] - name[ra]]]] less[<] call[name[n].radians, parameter[name[dist]]]] <ast.BitAnd object at 0x7da2590d6b60> compare[call[name[n].abs, parameter[binary_operation[name[caldec] - name[dec]]]] less[<] call[name[n].radians, parameter[name[dist]]]]]]]][constant[0]] if compare[call[name[len], parameter[name[fields]]] equal[==] constant[0]] begin[:] call[name[self].logger.warn, parameter[constant[Warning: no close calibrator found. Removing radec restriction.]]] variable[fields] assign[=] call[name[n].unique, parameter[name[self].uniquefield]] variable[sel] assign[=] list[[]] for taget[name[field]] in starred[name[fields]] begin[:] <ast.AugAssign object at 0x7da1b25b04f0> variable[mjddist] assign[=] call[name[n].abs, parameter[binary_operation[name[time] - call[name[self].uniquemjd][name[sel]]]]] variable[closestgain] assign[=] call[call[call[name[n].where, parameter[compare[name[mjddist] equal[==] call[name[mjddist].min, parameter[]]]]]][constant[0]]][constant[0]] call[name[self].logger.info, parameter[binary_operation[constant[Using gain solution for field %d at MJD %.5f, separated by %d min ] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Subscript object at 0x7da1b25c8520>, <ast.Subscript object at 0x7da1b25c8610>, <ast.BinOp object at 0x7da1b25c87f0>]]]]] name[self].gain assign[=] call[call[call[name[self].gain.take, parameter[name[self].spwind]].take, parameter[name[self].polind]]][name[closestgain]] if call[name[hasattr], parameter[name[self], constant[bandpass]]] begin[:] variable[bins] assign[=] <ast.ListComp object at 0x7da1b25c8dc0> name[self].bandpass assign[=] call[call[name[self].bandpass.take, parameter[name[bins]]].take, parameter[name[self].polind]] name[self].freqs assign[=] name[freqs] call[name[self].logger.debug, parameter[constant[Using bandpass at BP bins (1000 bins per spw): %s], call[name[str], parameter[name[bins]]]]]
keyword[def] identifier[set_selection] ( identifier[self] , identifier[time] , identifier[freqs] , identifier[blarr] , identifier[calname] = literal[string] , identifier[radec] =(), identifier[dist] = literal[int] , identifier[spwind] =[], identifier[pols] =[ literal[string] , literal[string] ]): literal[string] identifier[self] . identifier[spwind] = identifier[spwind] keyword[if] identifier[calname] : identifier[self] . identifier[logger] . identifier[warn] ( literal[string] ) keyword[if] literal[string] keyword[in] literal[string] . identifier[join] ( identifier[pols] ) keyword[or] literal[string] keyword[in] literal[string] . identifier[join] ( identifier[pols] ): identifier[polord] =[ literal[string] , literal[string] ] keyword[elif] literal[string] keyword[in] literal[string] . identifier[join] ( identifier[pols] ) keyword[or] literal[string] keyword[in] literal[string] . identifier[join] ( identifier[pols] ): identifier[polord] =[ literal[string] , literal[string] ] identifier[self] . identifier[polind] =[ identifier[polord] . identifier[index] ( identifier[pol] ) keyword[for] identifier[pol] keyword[in] identifier[pols] ] identifier[self] . identifier[ant1ind] =[ identifier[n] . identifier[where] ( identifier[ant1] == identifier[n] . identifier[unique] ( identifier[blarr] ))[ literal[int] ][ literal[int] ] keyword[for] ( identifier[ant1] , identifier[ant2] ) keyword[in] identifier[blarr] ] identifier[self] . identifier[ant2ind] =[ identifier[n] . identifier[where] ( identifier[ant2] == identifier[n] . identifier[unique] ( identifier[blarr] ))[ literal[int] ][ literal[int] ] keyword[for] ( identifier[ant1] , identifier[ant2] ) keyword[in] identifier[blarr] ] keyword[if] identifier[radec] : identifier[ra] , identifier[dec] = identifier[radec] identifier[calra] = identifier[n] . identifier[array] ( identifier[self] . identifier[radec] )[:, literal[int] ] identifier[caldec] = identifier[n] . identifier[array] ( identifier[self] . identifier[radec] )[:, literal[int] ] identifier[fields] = identifier[n] . identifier[where] (( identifier[n] . identifier[abs] ( identifier[calra] - identifier[ra] )< identifier[n] . identifier[radians] ( identifier[dist] ))&( identifier[n] . identifier[abs] ( identifier[caldec] - identifier[dec] )< identifier[n] . identifier[radians] ( identifier[dist] )))[ literal[int] ] keyword[if] identifier[len] ( identifier[fields] )== literal[int] : identifier[self] . identifier[logger] . identifier[warn] ( literal[string] ) identifier[fields] = identifier[n] . identifier[unique] ( identifier[self] . identifier[uniquefield] ) keyword[else] : identifier[fields] = identifier[n] . identifier[unique] ( identifier[self] . identifier[uniquefield] ) identifier[sel] =[] keyword[for] identifier[field] keyword[in] identifier[fields] : identifier[sel] += identifier[list] ( identifier[n] . identifier[where] ( identifier[field] == identifier[self] . identifier[uniquefield] )[ literal[int] ]) identifier[mjddist] = identifier[n] . identifier[abs] ( identifier[time] - identifier[self] . identifier[uniquemjd] [ identifier[sel] ]) identifier[closestgain] = identifier[n] . identifier[where] ( identifier[mjddist] == identifier[mjddist] . identifier[min] ())[ literal[int] ][ literal[int] ] identifier[self] . identifier[logger] . identifier[info] ( literal[string] %( identifier[self] . identifier[uniquefield] [ identifier[n] . identifier[where] ( identifier[self] . identifier[uniquemjd] == identifier[self] . identifier[uniquemjd] [ identifier[sel] ][ identifier[closestgain] ])], identifier[self] . identifier[uniquemjd] [ identifier[closestgain] ], identifier[mjddist] [ identifier[closestgain] ]* literal[int] * literal[int] )) identifier[self] . identifier[gain] = identifier[self] . identifier[gain] . identifier[take] ( identifier[self] . identifier[spwind] , identifier[axis] = literal[int] ). identifier[take] ( identifier[self] . identifier[polind] , identifier[axis] = literal[int] )[ identifier[closestgain] ] keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ): identifier[bins] =[ identifier[n] . identifier[where] ( identifier[n] . identifier[min] ( identifier[n] . identifier[abs] ( identifier[self] . identifier[bpfreq] - identifier[selfreq] ))== identifier[n] . identifier[abs] ( identifier[self] . identifier[bpfreq] - identifier[selfreq] ))[ literal[int] ][ literal[int] ] keyword[for] identifier[selfreq] keyword[in] identifier[freqs] ] identifier[self] . identifier[bandpass] = identifier[self] . identifier[bandpass] . identifier[take] ( identifier[bins] , identifier[axis] = literal[int] ). identifier[take] ( identifier[self] . identifier[polind] , identifier[axis] = literal[int] ) identifier[self] . identifier[freqs] = identifier[freqs] identifier[self] . identifier[logger] . identifier[debug] ( literal[string] , identifier[str] ( identifier[bins] ))
def set_selection(self, time, freqs, blarr, calname='', radec=(), dist=1, spwind=[], pols=['XX', 'YY']): """ Set select parameter that defines time, spw, and pol solutions to apply. time defines the time to find solutions near in mjd. freqs defines frequencies to select bandpass solution blarr is array of size 2xnbl that gives pairs of antennas in each baseline (a la tpipe.blarr). radec (radian tuple) and dist (deg) define optional location of source for filtering solutions. spwind is list of indices to be used (e.g., [0,2,4,10]) pols is from d['pols'] (e.g., ['RR']). single or dual parallel allowed. calname not used. here for uniformity with telcal_sol. """ self.spwind = spwind if calname: self.logger.warn('calname option not used for casa_sol. Applied based on radec.') # depends on [control=['if'], data=[]] # define pol index if 'X' in ''.join(pols) or 'Y' in ''.join(pols): polord = ['XX', 'YY'] # depends on [control=['if'], data=[]] elif 'R' in ''.join(pols) or 'L' in ''.join(pols): polord = ['RR', 'LL'] # depends on [control=['if'], data=[]] self.polind = [polord.index(pol) for pol in pols] self.ant1ind = [n.where(ant1 == n.unique(blarr))[0][0] for (ant1, ant2) in blarr] self.ant2ind = [n.where(ant2 == n.unique(blarr))[0][0] for (ant1, ant2) in blarr] # select by smallest time distance for source within some angular region of target if radec: (ra, dec) = radec calra = n.array(self.radec)[:, 0] caldec = n.array(self.radec)[:, 1] fields = n.where((n.abs(calra - ra) < n.radians(dist)) & (n.abs(caldec - dec) < n.radians(dist)))[0] if len(fields) == 0: self.logger.warn('Warning: no close calibrator found. Removing radec restriction.') fields = n.unique(self.uniquefield) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: fields = n.unique(self.uniquefield) sel = [] for field in fields: sel += list(n.where(field == self.uniquefield)[0]) # depends on [control=['for'], data=['field']] mjddist = n.abs(time - self.uniquemjd[sel]) closestgain = n.where(mjddist == mjddist.min())[0][0] self.logger.info('Using gain solution for field %d at MJD %.5f, separated by %d min ' % (self.uniquefield[n.where(self.uniquemjd == self.uniquemjd[sel][closestgain])], self.uniquemjd[closestgain], mjddist[closestgain] * 24 * 60)) self.gain = self.gain.take(self.spwind, axis=2).take(self.polind, axis=3)[closestgain] if hasattr(self, 'bandpass'): bins = [n.where(n.min(n.abs(self.bpfreq - selfreq)) == n.abs(self.bpfreq - selfreq))[0][0] for selfreq in freqs] self.bandpass = self.bandpass.take(bins, axis=1).take(self.polind, axis=2) self.freqs = freqs self.logger.debug('Using bandpass at BP bins (1000 bins per spw): %s', str(bins)) # depends on [control=['if'], data=[]]
def find(self, name=None, ns_uri=None, first_only=False): """ Find :class:`Element` node descendants of this node, with optional constraints to limit the results. :param name: limit results to elements with this name. If *None* or ``'*'`` all element names are matched. :type name: string or None :param ns_uri: limit results to elements within this namespace URI. If *None* all elements are matched, regardless of namespace. :type ns_uri: string or None :param bool first_only: if *True* only return the first result node or *None* if there is no matching node. :returns: a list of :class:`Element` nodes matching any given constraints, or a single node if ``first_only=True``. """ if name is None: name = '*' # Match all element names if ns_uri is None: ns_uri = '*' # Match all namespaces impl_nodelist = self.adapter.find_node_elements( self.impl_node, name=name, ns_uri=ns_uri) if first_only: if impl_nodelist: return self.adapter.wrap_node( impl_nodelist[0], self.adapter.impl_document, self.adapter) else: return None return self._convert_nodelist(impl_nodelist)
def function[find, parameter[self, name, ns_uri, first_only]]: constant[ Find :class:`Element` node descendants of this node, with optional constraints to limit the results. :param name: limit results to elements with this name. If *None* or ``'*'`` all element names are matched. :type name: string or None :param ns_uri: limit results to elements within this namespace URI. If *None* all elements are matched, regardless of namespace. :type ns_uri: string or None :param bool first_only: if *True* only return the first result node or *None* if there is no matching node. :returns: a list of :class:`Element` nodes matching any given constraints, or a single node if ``first_only=True``. ] if compare[name[name] is constant[None]] begin[:] variable[name] assign[=] constant[*] if compare[name[ns_uri] is constant[None]] begin[:] variable[ns_uri] assign[=] constant[*] variable[impl_nodelist] assign[=] call[name[self].adapter.find_node_elements, parameter[name[self].impl_node]] if name[first_only] begin[:] if name[impl_nodelist] begin[:] return[call[name[self].adapter.wrap_node, parameter[call[name[impl_nodelist]][constant[0]], name[self].adapter.impl_document, name[self].adapter]]] return[call[name[self]._convert_nodelist, parameter[name[impl_nodelist]]]]
keyword[def] identifier[find] ( identifier[self] , identifier[name] = keyword[None] , identifier[ns_uri] = keyword[None] , identifier[first_only] = keyword[False] ): literal[string] keyword[if] identifier[name] keyword[is] keyword[None] : identifier[name] = literal[string] keyword[if] identifier[ns_uri] keyword[is] keyword[None] : identifier[ns_uri] = literal[string] identifier[impl_nodelist] = identifier[self] . identifier[adapter] . identifier[find_node_elements] ( identifier[self] . identifier[impl_node] , identifier[name] = identifier[name] , identifier[ns_uri] = identifier[ns_uri] ) keyword[if] identifier[first_only] : keyword[if] identifier[impl_nodelist] : keyword[return] identifier[self] . identifier[adapter] . identifier[wrap_node] ( identifier[impl_nodelist] [ literal[int] ], identifier[self] . identifier[adapter] . identifier[impl_document] , identifier[self] . identifier[adapter] ) keyword[else] : keyword[return] keyword[None] keyword[return] identifier[self] . identifier[_convert_nodelist] ( identifier[impl_nodelist] )
def find(self, name=None, ns_uri=None, first_only=False): """ Find :class:`Element` node descendants of this node, with optional constraints to limit the results. :param name: limit results to elements with this name. If *None* or ``'*'`` all element names are matched. :type name: string or None :param ns_uri: limit results to elements within this namespace URI. If *None* all elements are matched, regardless of namespace. :type ns_uri: string or None :param bool first_only: if *True* only return the first result node or *None* if there is no matching node. :returns: a list of :class:`Element` nodes matching any given constraints, or a single node if ``first_only=True``. """ if name is None: name = '*' # Match all element names # depends on [control=['if'], data=['name']] if ns_uri is None: ns_uri = '*' # Match all namespaces # depends on [control=['if'], data=['ns_uri']] impl_nodelist = self.adapter.find_node_elements(self.impl_node, name=name, ns_uri=ns_uri) if first_only: if impl_nodelist: return self.adapter.wrap_node(impl_nodelist[0], self.adapter.impl_document, self.adapter) # depends on [control=['if'], data=[]] else: return None # depends on [control=['if'], data=[]] return self._convert_nodelist(impl_nodelist)
def update_unit(unit, **kwargs): """ Update a unit in the DB. Raises and exception if the unit does not exist """ try: db_unit = db.DBSession.query(Unit).join(Dimension).filter(Unit.id==unit["id"]).filter().one() db_unit.name = unit["name"] # Needed to uniform into to description db_unit.abbreviation = unit.abbreviation db_unit.description = unit.description db_unit.lf = unit["lf"] db_unit.cf = unit["cf"] if "project_id" in unit and unit['project_id'] is not None and unit['project_id'] != "": db_unit.project_id = unit["project_id"] except NoResultFound: raise ResourceNotFoundError("Unit (ID=%s) does not exist"%(unit["id"])) db.DBSession.flush() return JSONObject(db_unit)
def function[update_unit, parameter[unit]]: constant[ Update a unit in the DB. Raises and exception if the unit does not exist ] <ast.Try object at 0x7da18bccaec0> call[name[db].DBSession.flush, parameter[]] return[call[name[JSONObject], parameter[name[db_unit]]]]
keyword[def] identifier[update_unit] ( identifier[unit] ,** identifier[kwargs] ): literal[string] keyword[try] : identifier[db_unit] = identifier[db] . identifier[DBSession] . identifier[query] ( identifier[Unit] ). identifier[join] ( identifier[Dimension] ). identifier[filter] ( identifier[Unit] . identifier[id] == identifier[unit] [ literal[string] ]). identifier[filter] (). identifier[one] () identifier[db_unit] . identifier[name] = identifier[unit] [ literal[string] ] identifier[db_unit] . identifier[abbreviation] = identifier[unit] . identifier[abbreviation] identifier[db_unit] . identifier[description] = identifier[unit] . identifier[description] identifier[db_unit] . identifier[lf] = identifier[unit] [ literal[string] ] identifier[db_unit] . identifier[cf] = identifier[unit] [ literal[string] ] keyword[if] literal[string] keyword[in] identifier[unit] keyword[and] identifier[unit] [ literal[string] ] keyword[is] keyword[not] keyword[None] keyword[and] identifier[unit] [ literal[string] ]!= literal[string] : identifier[db_unit] . identifier[project_id] = identifier[unit] [ literal[string] ] keyword[except] identifier[NoResultFound] : keyword[raise] identifier[ResourceNotFoundError] ( literal[string] %( identifier[unit] [ literal[string] ])) identifier[db] . identifier[DBSession] . identifier[flush] () keyword[return] identifier[JSONObject] ( identifier[db_unit] )
def update_unit(unit, **kwargs): """ Update a unit in the DB. Raises and exception if the unit does not exist """ try: db_unit = db.DBSession.query(Unit).join(Dimension).filter(Unit.id == unit['id']).filter().one() db_unit.name = unit['name'] # Needed to uniform into to description db_unit.abbreviation = unit.abbreviation db_unit.description = unit.description db_unit.lf = unit['lf'] db_unit.cf = unit['cf'] if 'project_id' in unit and unit['project_id'] is not None and (unit['project_id'] != ''): db_unit.project_id = unit['project_id'] # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except NoResultFound: raise ResourceNotFoundError('Unit (ID=%s) does not exist' % unit['id']) # depends on [control=['except'], data=[]] db.DBSession.flush() return JSONObject(db_unit)
def t_QUOTED_STRING(t): r'"([^"\\]|\\["\\])*"' # TODO: Add support for: # - An undefined escape sequence (such as "\a" in a context where "a" # has no special meaning) is interpreted as if there were no backslash # (in this case, "\a" is just "a"), though that may be changed by # extensions. # - Non-printing characters such as tabs, CRLF, and control characters # are permitted in quoted strings. Quoted strings MAY span multiple # lines. An unencoded NUL (US-ASCII 0) is not allowed in strings. t.value = t.value.strip('"').replace(r'\"', '"').replace(r'\\', '\\') return t
def function[t_QUOTED_STRING, parameter[t]]: constant["([^"\\]|\\["\\])*"] name[t].value assign[=] call[call[call[name[t].value.strip, parameter[constant["]]].replace, parameter[constant[\"], constant["]]].replace, parameter[constant[\\], constant[\]]] return[name[t]]
keyword[def] identifier[t_QUOTED_STRING] ( identifier[t] ): literal[string] identifier[t] . identifier[value] = identifier[t] . identifier[value] . identifier[strip] ( literal[string] ). identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] ) keyword[return] identifier[t]
def t_QUOTED_STRING(t): '''"([^"\\\\]|\\\\["\\\\])*"''' # TODO: Add support for: # - An undefined escape sequence (such as "\a" in a context where "a" # has no special meaning) is interpreted as if there were no backslash # (in this case, "\a" is just "a"), though that may be changed by # extensions. # - Non-printing characters such as tabs, CRLF, and control characters # are permitted in quoted strings. Quoted strings MAY span multiple # lines. An unencoded NUL (US-ASCII 0) is not allowed in strings. t.value = t.value.strip('"').replace('\\"', '"').replace('\\\\', '\\') return t
def _osx_popup(title, message): """ Shows a popup dialog message via System Events daemon. `title` Notification title. `message` Notification message. """ message = message.replace('"', '\\"') # escape message # build applescript script = """ tell application "System Events" display dialog "{0}" end tell""".format(message) # run it common.shell_process(['osascript', '-e', script])
def function[_osx_popup, parameter[title, message]]: constant[ Shows a popup dialog message via System Events daemon. `title` Notification title. `message` Notification message. ] variable[message] assign[=] call[name[message].replace, parameter[constant["], constant[\"]]] variable[script] assign[=] call[constant[ tell application "System Events" display dialog "{0}" end tell].format, parameter[name[message]]] call[name[common].shell_process, parameter[list[[<ast.Constant object at 0x7da1b15b1930>, <ast.Constant object at 0x7da1b15b3b50>, <ast.Name object at 0x7da1b15b2a40>]]]]
keyword[def] identifier[_osx_popup] ( identifier[title] , identifier[message] ): literal[string] identifier[message] = identifier[message] . identifier[replace] ( literal[string] , literal[string] ) identifier[script] = literal[string] . identifier[format] ( identifier[message] ) identifier[common] . identifier[shell_process] ([ literal[string] , literal[string] , identifier[script] ])
def _osx_popup(title, message): """ Shows a popup dialog message via System Events daemon. `title` Notification title. `message` Notification message. """ message = message.replace('"', '\\"') # escape message # build applescript script = '\n tell application "System Events"\n display dialog "{0}"\n end tell'.format(message) # run it common.shell_process(['osascript', '-e', script])
def update_fixed(self, kwargs_lens, kwargs_source, kwargs_lens_light, kwargs_ps, kwargs_cosmo, lens_add_fixed=[], source_add_fixed=[], lens_light_add_fixed=[], ps_add_fixed=[], cosmo_add_fixed=[], lens_remove_fixed=[], source_remove_fixed=[], lens_light_remove_fixed=[], ps_remove_fixed=[], cosmo_remove_fixed=[]): """ adds the values of the keyword arguments that are stated in the _add_fixed to the existing fixed arguments. :param kwargs_lens: :param kwargs_source: :param kwargs_lens_light: :param kwargs_ps: :param kwargs_cosmo: :param lens_add_fixed: :param source_add_fixed: :param lens_light_add_fixed: :param ps_add_fixed: :param cosmo_add_fixed: :return: updated kwargs fixed """ lens_fixed = self._add_fixed(kwargs_lens, self._lens_fixed, lens_add_fixed) lens_fixed = self._remove_fixed(lens_fixed, lens_remove_fixed) source_fixed = self._add_fixed(kwargs_source, self._source_fixed, source_add_fixed) source_fixed = self._remove_fixed(source_fixed, source_remove_fixed) lens_light_fixed = self._add_fixed(kwargs_lens_light, self._lens_light_fixed, lens_light_add_fixed) lens_light_fixed = self._remove_fixed(lens_light_fixed, lens_light_remove_fixed) ps_fixed = self._add_fixed(kwargs_ps, self._ps_fixed, ps_add_fixed) ps_fixed = self._remove_fixed(ps_fixed, ps_remove_fixed) cosmo_fixed = copy.deepcopy(self._cosmo_fixed) for param_name in cosmo_add_fixed: if param_name in cosmo_fixed: pass else: cosmo_fixed[param_name] = kwargs_cosmo[param_name] for param_name in cosmo_remove_fixed: if param_name in cosmo_fixed: del cosmo_fixed[param_name] self._lens_fixed, self._source_fixed, self._lens_light_fixed, self._ps_fixed, self._cosmo_fixed = lens_fixed, source_fixed, lens_light_fixed, ps_fixed, cosmo_fixed
def function[update_fixed, parameter[self, kwargs_lens, kwargs_source, kwargs_lens_light, kwargs_ps, kwargs_cosmo, lens_add_fixed, source_add_fixed, lens_light_add_fixed, ps_add_fixed, cosmo_add_fixed, lens_remove_fixed, source_remove_fixed, lens_light_remove_fixed, ps_remove_fixed, cosmo_remove_fixed]]: constant[ adds the values of the keyword arguments that are stated in the _add_fixed to the existing fixed arguments. :param kwargs_lens: :param kwargs_source: :param kwargs_lens_light: :param kwargs_ps: :param kwargs_cosmo: :param lens_add_fixed: :param source_add_fixed: :param lens_light_add_fixed: :param ps_add_fixed: :param cosmo_add_fixed: :return: updated kwargs fixed ] variable[lens_fixed] assign[=] call[name[self]._add_fixed, parameter[name[kwargs_lens], name[self]._lens_fixed, name[lens_add_fixed]]] variable[lens_fixed] assign[=] call[name[self]._remove_fixed, parameter[name[lens_fixed], name[lens_remove_fixed]]] variable[source_fixed] assign[=] call[name[self]._add_fixed, parameter[name[kwargs_source], name[self]._source_fixed, name[source_add_fixed]]] variable[source_fixed] assign[=] call[name[self]._remove_fixed, parameter[name[source_fixed], name[source_remove_fixed]]] variable[lens_light_fixed] assign[=] call[name[self]._add_fixed, parameter[name[kwargs_lens_light], name[self]._lens_light_fixed, name[lens_light_add_fixed]]] variable[lens_light_fixed] assign[=] call[name[self]._remove_fixed, parameter[name[lens_light_fixed], name[lens_light_remove_fixed]]] variable[ps_fixed] assign[=] call[name[self]._add_fixed, parameter[name[kwargs_ps], name[self]._ps_fixed, name[ps_add_fixed]]] variable[ps_fixed] assign[=] call[name[self]._remove_fixed, parameter[name[ps_fixed], name[ps_remove_fixed]]] variable[cosmo_fixed] assign[=] call[name[copy].deepcopy, parameter[name[self]._cosmo_fixed]] for taget[name[param_name]] in starred[name[cosmo_add_fixed]] begin[:] if compare[name[param_name] in name[cosmo_fixed]] begin[:] pass for taget[name[param_name]] in starred[name[cosmo_remove_fixed]] begin[:] if compare[name[param_name] in name[cosmo_fixed]] begin[:] <ast.Delete object at 0x7da20c6e70a0> <ast.Tuple object at 0x7da20c6e6d70> assign[=] tuple[[<ast.Name object at 0x7da20c6e7f10>, <ast.Name object at 0x7da20c6e7b20>, <ast.Name object at 0x7da20c6e5930>, <ast.Name object at 0x7da20c6e7a30>, <ast.Name object at 0x7da20c6e7f70>]]
keyword[def] identifier[update_fixed] ( identifier[self] , identifier[kwargs_lens] , identifier[kwargs_source] , identifier[kwargs_lens_light] , identifier[kwargs_ps] , identifier[kwargs_cosmo] , identifier[lens_add_fixed] =[], identifier[source_add_fixed] =[], identifier[lens_light_add_fixed] =[], identifier[ps_add_fixed] =[], identifier[cosmo_add_fixed] =[], identifier[lens_remove_fixed] =[], identifier[source_remove_fixed] =[], identifier[lens_light_remove_fixed] =[], identifier[ps_remove_fixed] =[], identifier[cosmo_remove_fixed] =[]): literal[string] identifier[lens_fixed] = identifier[self] . identifier[_add_fixed] ( identifier[kwargs_lens] , identifier[self] . identifier[_lens_fixed] , identifier[lens_add_fixed] ) identifier[lens_fixed] = identifier[self] . identifier[_remove_fixed] ( identifier[lens_fixed] , identifier[lens_remove_fixed] ) identifier[source_fixed] = identifier[self] . identifier[_add_fixed] ( identifier[kwargs_source] , identifier[self] . identifier[_source_fixed] , identifier[source_add_fixed] ) identifier[source_fixed] = identifier[self] . identifier[_remove_fixed] ( identifier[source_fixed] , identifier[source_remove_fixed] ) identifier[lens_light_fixed] = identifier[self] . identifier[_add_fixed] ( identifier[kwargs_lens_light] , identifier[self] . identifier[_lens_light_fixed] , identifier[lens_light_add_fixed] ) identifier[lens_light_fixed] = identifier[self] . identifier[_remove_fixed] ( identifier[lens_light_fixed] , identifier[lens_light_remove_fixed] ) identifier[ps_fixed] = identifier[self] . identifier[_add_fixed] ( identifier[kwargs_ps] , identifier[self] . identifier[_ps_fixed] , identifier[ps_add_fixed] ) identifier[ps_fixed] = identifier[self] . identifier[_remove_fixed] ( identifier[ps_fixed] , identifier[ps_remove_fixed] ) identifier[cosmo_fixed] = identifier[copy] . identifier[deepcopy] ( identifier[self] . identifier[_cosmo_fixed] ) keyword[for] identifier[param_name] keyword[in] identifier[cosmo_add_fixed] : keyword[if] identifier[param_name] keyword[in] identifier[cosmo_fixed] : keyword[pass] keyword[else] : identifier[cosmo_fixed] [ identifier[param_name] ]= identifier[kwargs_cosmo] [ identifier[param_name] ] keyword[for] identifier[param_name] keyword[in] identifier[cosmo_remove_fixed] : keyword[if] identifier[param_name] keyword[in] identifier[cosmo_fixed] : keyword[del] identifier[cosmo_fixed] [ identifier[param_name] ] identifier[self] . identifier[_lens_fixed] , identifier[self] . identifier[_source_fixed] , identifier[self] . identifier[_lens_light_fixed] , identifier[self] . identifier[_ps_fixed] , identifier[self] . identifier[_cosmo_fixed] = identifier[lens_fixed] , identifier[source_fixed] , identifier[lens_light_fixed] , identifier[ps_fixed] , identifier[cosmo_fixed]
def update_fixed(self, kwargs_lens, kwargs_source, kwargs_lens_light, kwargs_ps, kwargs_cosmo, lens_add_fixed=[], source_add_fixed=[], lens_light_add_fixed=[], ps_add_fixed=[], cosmo_add_fixed=[], lens_remove_fixed=[], source_remove_fixed=[], lens_light_remove_fixed=[], ps_remove_fixed=[], cosmo_remove_fixed=[]): """ adds the values of the keyword arguments that are stated in the _add_fixed to the existing fixed arguments. :param kwargs_lens: :param kwargs_source: :param kwargs_lens_light: :param kwargs_ps: :param kwargs_cosmo: :param lens_add_fixed: :param source_add_fixed: :param lens_light_add_fixed: :param ps_add_fixed: :param cosmo_add_fixed: :return: updated kwargs fixed """ lens_fixed = self._add_fixed(kwargs_lens, self._lens_fixed, lens_add_fixed) lens_fixed = self._remove_fixed(lens_fixed, lens_remove_fixed) source_fixed = self._add_fixed(kwargs_source, self._source_fixed, source_add_fixed) source_fixed = self._remove_fixed(source_fixed, source_remove_fixed) lens_light_fixed = self._add_fixed(kwargs_lens_light, self._lens_light_fixed, lens_light_add_fixed) lens_light_fixed = self._remove_fixed(lens_light_fixed, lens_light_remove_fixed) ps_fixed = self._add_fixed(kwargs_ps, self._ps_fixed, ps_add_fixed) ps_fixed = self._remove_fixed(ps_fixed, ps_remove_fixed) cosmo_fixed = copy.deepcopy(self._cosmo_fixed) for param_name in cosmo_add_fixed: if param_name in cosmo_fixed: pass # depends on [control=['if'], data=[]] else: cosmo_fixed[param_name] = kwargs_cosmo[param_name] # depends on [control=['for'], data=['param_name']] for param_name in cosmo_remove_fixed: if param_name in cosmo_fixed: del cosmo_fixed[param_name] (self._lens_fixed, self._source_fixed, self._lens_light_fixed, self._ps_fixed, self._cosmo_fixed) = (lens_fixed, source_fixed, lens_light_fixed, ps_fixed, cosmo_fixed) # depends on [control=['if'], data=['param_name', 'cosmo_fixed']] # depends on [control=['for'], data=['param_name']]
def register_new_suffix_tree(case_insensitive=False): """Factory method, returns new suffix tree object. """ assert isinstance(case_insensitive, bool) root_node = register_new_node() suffix_tree_id = uuid4() event = SuffixTree.Created( originator_id=suffix_tree_id, root_node_id=root_node.id, case_insensitive=case_insensitive, ) entity = SuffixTree.mutate(event=event) assert isinstance(entity, SuffixTree) entity.nodes[root_node.id] = root_node publish(event) return entity
def function[register_new_suffix_tree, parameter[case_insensitive]]: constant[Factory method, returns new suffix tree object. ] assert[call[name[isinstance], parameter[name[case_insensitive], name[bool]]]] variable[root_node] assign[=] call[name[register_new_node], parameter[]] variable[suffix_tree_id] assign[=] call[name[uuid4], parameter[]] variable[event] assign[=] call[name[SuffixTree].Created, parameter[]] variable[entity] assign[=] call[name[SuffixTree].mutate, parameter[]] assert[call[name[isinstance], parameter[name[entity], name[SuffixTree]]]] call[name[entity].nodes][name[root_node].id] assign[=] name[root_node] call[name[publish], parameter[name[event]]] return[name[entity]]
keyword[def] identifier[register_new_suffix_tree] ( identifier[case_insensitive] = keyword[False] ): literal[string] keyword[assert] identifier[isinstance] ( identifier[case_insensitive] , identifier[bool] ) identifier[root_node] = identifier[register_new_node] () identifier[suffix_tree_id] = identifier[uuid4] () identifier[event] = identifier[SuffixTree] . identifier[Created] ( identifier[originator_id] = identifier[suffix_tree_id] , identifier[root_node_id] = identifier[root_node] . identifier[id] , identifier[case_insensitive] = identifier[case_insensitive] , ) identifier[entity] = identifier[SuffixTree] . identifier[mutate] ( identifier[event] = identifier[event] ) keyword[assert] identifier[isinstance] ( identifier[entity] , identifier[SuffixTree] ) identifier[entity] . identifier[nodes] [ identifier[root_node] . identifier[id] ]= identifier[root_node] identifier[publish] ( identifier[event] ) keyword[return] identifier[entity]
def register_new_suffix_tree(case_insensitive=False): """Factory method, returns new suffix tree object. """ assert isinstance(case_insensitive, bool) root_node = register_new_node() suffix_tree_id = uuid4() event = SuffixTree.Created(originator_id=suffix_tree_id, root_node_id=root_node.id, case_insensitive=case_insensitive) entity = SuffixTree.mutate(event=event) assert isinstance(entity, SuffixTree) entity.nodes[root_node.id] = root_node publish(event) return entity
def get_parameters(self, regex_exp, parameters): """ Given a regex expression and the string with the paramers, either return a regex match object or raise an exception if the regex did not find a match :param regex_exp: :param parameters: :return: """ # TODO find a better way to do the equate replacement for rep in self.equates: parameters = parameters.replace(rep, str(self.equates[rep])) match = re.match(regex_exp, parameters) if not match: raise iarm.exceptions.ParsingError("Parameters are None, did you miss a comma?") return match.groups()
def function[get_parameters, parameter[self, regex_exp, parameters]]: constant[ Given a regex expression and the string with the paramers, either return a regex match object or raise an exception if the regex did not find a match :param regex_exp: :param parameters: :return: ] for taget[name[rep]] in starred[name[self].equates] begin[:] variable[parameters] assign[=] call[name[parameters].replace, parameter[name[rep], call[name[str], parameter[call[name[self].equates][name[rep]]]]]] variable[match] assign[=] call[name[re].match, parameter[name[regex_exp], name[parameters]]] if <ast.UnaryOp object at 0x7da1b0ff33d0> begin[:] <ast.Raise object at 0x7da1b0ff2ec0> return[call[name[match].groups, parameter[]]]
keyword[def] identifier[get_parameters] ( identifier[self] , identifier[regex_exp] , identifier[parameters] ): literal[string] keyword[for] identifier[rep] keyword[in] identifier[self] . identifier[equates] : identifier[parameters] = identifier[parameters] . identifier[replace] ( identifier[rep] , identifier[str] ( identifier[self] . identifier[equates] [ identifier[rep] ])) identifier[match] = identifier[re] . identifier[match] ( identifier[regex_exp] , identifier[parameters] ) keyword[if] keyword[not] identifier[match] : keyword[raise] identifier[iarm] . identifier[exceptions] . identifier[ParsingError] ( literal[string] ) keyword[return] identifier[match] . identifier[groups] ()
def get_parameters(self, regex_exp, parameters): """ Given a regex expression and the string with the paramers, either return a regex match object or raise an exception if the regex did not find a match :param regex_exp: :param parameters: :return: """ # TODO find a better way to do the equate replacement for rep in self.equates: parameters = parameters.replace(rep, str(self.equates[rep])) # depends on [control=['for'], data=['rep']] match = re.match(regex_exp, parameters) if not match: raise iarm.exceptions.ParsingError('Parameters are None, did you miss a comma?') # depends on [control=['if'], data=[]] return match.groups()
def touvw(self, v): """Calculates a uvw measure from a baseline. The baseline can consist of a vector of actual baseline positions. Note that the baseline does not have to be a proper baseline, but can be a series of positions (to call positions baselines see :meth:`asbaseline` ) for speed reasons: operations are linear and can be done on positions, which are converted to baseline values at the end (with :meth:`expand` ). Whatever the reference code of the baseline, the returned uvw will be given in J2000. If the dot argument is given, that variable will be filled with a quantity array consisting of the time derivative of the uvw (note that only the sidereal rate is taken into account; not precession, earth tides and similar variations, which are much smaller). If the xyz variable is given, it will be filled with the quantity values of the uvw measure. The values of the input baselines can be given as a quantity vector per x, y or z value. uvw coordinates are calculated for a certain direction in the sky hence the frame has to contain the direction for the calculation to work. Since the baseline and the sky rotate with respect of each other, the time should be specified as well. Example:: >>> dm.do_frame(dm.observatory('atca')) >>> dm.do_frame(dm.source('1934-638')) >>> dm.do_frame(dm.epoch('utc', 'today')) >>> b = dm.baseline('itrf', '10m', '20m', '30m') """ if is_measure(v) and v['type'] == 'baseline': m = _measures.uvw(self, v) m['xyz'] = dq.quantity(m['xyz']) m['dot'] = dq.quantity(m['dot']) return m else: raise TypeError('Illegal Baseline specified')
def function[touvw, parameter[self, v]]: constant[Calculates a uvw measure from a baseline. The baseline can consist of a vector of actual baseline positions. Note that the baseline does not have to be a proper baseline, but can be a series of positions (to call positions baselines see :meth:`asbaseline` ) for speed reasons: operations are linear and can be done on positions, which are converted to baseline values at the end (with :meth:`expand` ). Whatever the reference code of the baseline, the returned uvw will be given in J2000. If the dot argument is given, that variable will be filled with a quantity array consisting of the time derivative of the uvw (note that only the sidereal rate is taken into account; not precession, earth tides and similar variations, which are much smaller). If the xyz variable is given, it will be filled with the quantity values of the uvw measure. The values of the input baselines can be given as a quantity vector per x, y or z value. uvw coordinates are calculated for a certain direction in the sky hence the frame has to contain the direction for the calculation to work. Since the baseline and the sky rotate with respect of each other, the time should be specified as well. Example:: >>> dm.do_frame(dm.observatory('atca')) >>> dm.do_frame(dm.source('1934-638')) >>> dm.do_frame(dm.epoch('utc', 'today')) >>> b = dm.baseline('itrf', '10m', '20m', '30m') ] if <ast.BoolOp object at 0x7da18dc042b0> begin[:] variable[m] assign[=] call[name[_measures].uvw, parameter[name[self], name[v]]] call[name[m]][constant[xyz]] assign[=] call[name[dq].quantity, parameter[call[name[m]][constant[xyz]]]] call[name[m]][constant[dot]] assign[=] call[name[dq].quantity, parameter[call[name[m]][constant[dot]]]] return[name[m]]
keyword[def] identifier[touvw] ( identifier[self] , identifier[v] ): literal[string] keyword[if] identifier[is_measure] ( identifier[v] ) keyword[and] identifier[v] [ literal[string] ]== literal[string] : identifier[m] = identifier[_measures] . identifier[uvw] ( identifier[self] , identifier[v] ) identifier[m] [ literal[string] ]= identifier[dq] . identifier[quantity] ( identifier[m] [ literal[string] ]) identifier[m] [ literal[string] ]= identifier[dq] . identifier[quantity] ( identifier[m] [ literal[string] ]) keyword[return] identifier[m] keyword[else] : keyword[raise] identifier[TypeError] ( literal[string] )
def touvw(self, v): """Calculates a uvw measure from a baseline. The baseline can consist of a vector of actual baseline positions. Note that the baseline does not have to be a proper baseline, but can be a series of positions (to call positions baselines see :meth:`asbaseline` ) for speed reasons: operations are linear and can be done on positions, which are converted to baseline values at the end (with :meth:`expand` ). Whatever the reference code of the baseline, the returned uvw will be given in J2000. If the dot argument is given, that variable will be filled with a quantity array consisting of the time derivative of the uvw (note that only the sidereal rate is taken into account; not precession, earth tides and similar variations, which are much smaller). If the xyz variable is given, it will be filled with the quantity values of the uvw measure. The values of the input baselines can be given as a quantity vector per x, y or z value. uvw coordinates are calculated for a certain direction in the sky hence the frame has to contain the direction for the calculation to work. Since the baseline and the sky rotate with respect of each other, the time should be specified as well. Example:: >>> dm.do_frame(dm.observatory('atca')) >>> dm.do_frame(dm.source('1934-638')) >>> dm.do_frame(dm.epoch('utc', 'today')) >>> b = dm.baseline('itrf', '10m', '20m', '30m') """ if is_measure(v) and v['type'] == 'baseline': m = _measures.uvw(self, v) m['xyz'] = dq.quantity(m['xyz']) m['dot'] = dq.quantity(m['dot']) return m # depends on [control=['if'], data=[]] else: raise TypeError('Illegal Baseline specified')
def has_bom(self, f): """Check for UTF8, UTF16, and UTF32 BOMs.""" content = f.read(4) encoding = None m = RE_UTF_BOM.match(content) if m is not None: if m.group(1): encoding = 'utf-8-sig' elif m.group(2): encoding = 'utf-32' elif m.group(3): encoding = 'utf-32' elif m.group(4): encoding = 'utf-16' elif m.group(5): encoding = 'utf-16' return encoding
def function[has_bom, parameter[self, f]]: constant[Check for UTF8, UTF16, and UTF32 BOMs.] variable[content] assign[=] call[name[f].read, parameter[constant[4]]] variable[encoding] assign[=] constant[None] variable[m] assign[=] call[name[RE_UTF_BOM].match, parameter[name[content]]] if compare[name[m] is_not constant[None]] begin[:] if call[name[m].group, parameter[constant[1]]] begin[:] variable[encoding] assign[=] constant[utf-8-sig] return[name[encoding]]
keyword[def] identifier[has_bom] ( identifier[self] , identifier[f] ): literal[string] identifier[content] = identifier[f] . identifier[read] ( literal[int] ) identifier[encoding] = keyword[None] identifier[m] = identifier[RE_UTF_BOM] . identifier[match] ( identifier[content] ) keyword[if] identifier[m] keyword[is] keyword[not] keyword[None] : keyword[if] identifier[m] . identifier[group] ( literal[int] ): identifier[encoding] = literal[string] keyword[elif] identifier[m] . identifier[group] ( literal[int] ): identifier[encoding] = literal[string] keyword[elif] identifier[m] . identifier[group] ( literal[int] ): identifier[encoding] = literal[string] keyword[elif] identifier[m] . identifier[group] ( literal[int] ): identifier[encoding] = literal[string] keyword[elif] identifier[m] . identifier[group] ( literal[int] ): identifier[encoding] = literal[string] keyword[return] identifier[encoding]
def has_bom(self, f): """Check for UTF8, UTF16, and UTF32 BOMs.""" content = f.read(4) encoding = None m = RE_UTF_BOM.match(content) if m is not None: if m.group(1): encoding = 'utf-8-sig' # depends on [control=['if'], data=[]] elif m.group(2): encoding = 'utf-32' # depends on [control=['if'], data=[]] elif m.group(3): encoding = 'utf-32' # depends on [control=['if'], data=[]] elif m.group(4): encoding = 'utf-16' # depends on [control=['if'], data=[]] elif m.group(5): encoding = 'utf-16' # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['m']] return encoding
def validate(self): """Validate / fix up the current config""" if not self.get('api_key'): raise ValueError("api_key not found in config. Please see documentation.") host = self.get('host') or DEFAULT_CLOUD_HOST if host: # remove extraneous slashes and force to byte string # otherwise msg += message_body in httplib will fail in python2 # when message_body contains binary data, and url is unicode # remaining failure modes include at least: # passing bytes in python3 will fail as we try to strip unicode '/' characters # passing unicode code points in python2 will fail due to httplib host.encode('ascii') host = host.strip('/') if not isinstance(host, str): host = host.encode('utf-8') self['host'] = host self.setdefault('autostart_notification_thread', True)
def function[validate, parameter[self]]: constant[Validate / fix up the current config] if <ast.UnaryOp object at 0x7da18dc9b490> begin[:] <ast.Raise object at 0x7da18dc988b0> variable[host] assign[=] <ast.BoolOp object at 0x7da18dc9a920> if name[host] begin[:] variable[host] assign[=] call[name[host].strip, parameter[constant[/]]] if <ast.UnaryOp object at 0x7da18dc9ac80> begin[:] variable[host] assign[=] call[name[host].encode, parameter[constant[utf-8]]] call[name[self]][constant[host]] assign[=] name[host] call[name[self].setdefault, parameter[constant[autostart_notification_thread], constant[True]]]
keyword[def] identifier[validate] ( identifier[self] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[get] ( literal[string] ): keyword[raise] identifier[ValueError] ( literal[string] ) identifier[host] = identifier[self] . identifier[get] ( literal[string] ) keyword[or] identifier[DEFAULT_CLOUD_HOST] keyword[if] identifier[host] : identifier[host] = identifier[host] . identifier[strip] ( literal[string] ) keyword[if] keyword[not] identifier[isinstance] ( identifier[host] , identifier[str] ): identifier[host] = identifier[host] . identifier[encode] ( literal[string] ) identifier[self] [ literal[string] ]= identifier[host] identifier[self] . identifier[setdefault] ( literal[string] , keyword[True] )
def validate(self): """Validate / fix up the current config""" if not self.get('api_key'): raise ValueError('api_key not found in config. Please see documentation.') # depends on [control=['if'], data=[]] host = self.get('host') or DEFAULT_CLOUD_HOST if host: # remove extraneous slashes and force to byte string # otherwise msg += message_body in httplib will fail in python2 # when message_body contains binary data, and url is unicode # remaining failure modes include at least: # passing bytes in python3 will fail as we try to strip unicode '/' characters # passing unicode code points in python2 will fail due to httplib host.encode('ascii') host = host.strip('/') if not isinstance(host, str): host = host.encode('utf-8') # depends on [control=['if'], data=[]] self['host'] = host # depends on [control=['if'], data=[]] self.setdefault('autostart_notification_thread', True)
def caption_mentions(self) -> List[str]: """List of all lowercased profiles that are mentioned in the Post's caption, without preceeding @.""" if not self.caption: return [] # This regular expression is from jStassen, adjusted to use Python's \w to support Unicode # http://blog.jstassen.com/2016/03/code-regex-for-instagram-username-and-hashtags/ mention_regex = re.compile(r"(?:@)(\w(?:(?:\w|(?:\.(?!\.))){0,28}(?:\w))?)") return re.findall(mention_regex, self.caption.lower())
def function[caption_mentions, parameter[self]]: constant[List of all lowercased profiles that are mentioned in the Post's caption, without preceeding @.] if <ast.UnaryOp object at 0x7da18f09d7e0> begin[:] return[list[[]]] variable[mention_regex] assign[=] call[name[re].compile, parameter[constant[(?:@)(\w(?:(?:\w|(?:\.(?!\.))){0,28}(?:\w))?)]]] return[call[name[re].findall, parameter[name[mention_regex], call[name[self].caption.lower, parameter[]]]]]
keyword[def] identifier[caption_mentions] ( identifier[self] )-> identifier[List] [ identifier[str] ]: literal[string] keyword[if] keyword[not] identifier[self] . identifier[caption] : keyword[return] [] identifier[mention_regex] = identifier[re] . identifier[compile] ( literal[string] ) keyword[return] identifier[re] . identifier[findall] ( identifier[mention_regex] , identifier[self] . identifier[caption] . identifier[lower] ())
def caption_mentions(self) -> List[str]: """List of all lowercased profiles that are mentioned in the Post's caption, without preceeding @.""" if not self.caption: return [] # depends on [control=['if'], data=[]] # This regular expression is from jStassen, adjusted to use Python's \w to support Unicode # http://blog.jstassen.com/2016/03/code-regex-for-instagram-username-and-hashtags/ mention_regex = re.compile('(?:@)(\\w(?:(?:\\w|(?:\\.(?!\\.))){0,28}(?:\\w))?)') return re.findall(mention_regex, self.caption.lower())
def print_seqs(seqs, id2name, name): """ print fasta of introns and ORFs # seqs[id] = [gene, model, [[i-gene_pos, i-model_pos, i-length, iseq, [orfs], [introns], orfs?, introns?, [orf annotations]], ...]] """ orfs = open('%s.orfs.faa' % (name.rsplit('.', 1)[0]), 'w') introns = open('%s.introns.fa' % (name.rsplit('.', 1)[0]), 'w') insertions = open('%s.insertions.fa' % (name.rsplit('.', 1)[0]), 'w') for seq in seqs: for i, ins in enumerate(seqs[seq][2], 1): model_pos = ins[1] if ins[6] is True: # orf(s) in ins[4] for orf in ins[4]: orf_info = orf[0].split('>')[1].split() id = orf_info[0].split('_', 1)[0] name = id2name[id] annotation = orf_info[1] strand = orf_info[7] if strand == '1': strand = '+' else: strand = '-' start, stop = sorted([int(orf_info[3]), int(orf_info[5])]) header = '>%s insertion::seq=%s type=%s strand=%s gene-pos=%s-%s model-pos=%s'\ % (name, i, annotation, strand, start, stop, model_pos) print('\n'.join([header, orf[1]]), file=orfs) if ins[7] is True: # intron(s) in ins[5] for intron in ins[5]: id, type, strand, pos = intron[0].split('>', 1)[1].split() name = id2name[id.split('_')[0]] header = '>%s insertion::seq=%s type=%s strand=%s gene-pos=%s model-pos=%s'\ % (name, i, type, strand, pos, model_pos) print('\n'.join([header, intron[1]]), file=introns) insertion = ins[3] id, info = insertion[0].split('>')[1].split(' ', 1) name = id2name[id.split('_')[0]] header = '>%s %s' % (name, info) print('\n'.join([header, insertion[1]]), file=insertions) insertions.close() orfs.close() introns.close()
def function[print_seqs, parameter[seqs, id2name, name]]: constant[ print fasta of introns and ORFs # seqs[id] = [gene, model, [[i-gene_pos, i-model_pos, i-length, iseq, [orfs], [introns], orfs?, introns?, [orf annotations]], ...]] ] variable[orfs] assign[=] call[name[open], parameter[binary_operation[constant[%s.orfs.faa] <ast.Mod object at 0x7da2590d6920> call[call[name[name].rsplit, parameter[constant[.], constant[1]]]][constant[0]]], constant[w]]] variable[introns] assign[=] call[name[open], parameter[binary_operation[constant[%s.introns.fa] <ast.Mod object at 0x7da2590d6920> call[call[name[name].rsplit, parameter[constant[.], constant[1]]]][constant[0]]], constant[w]]] variable[insertions] assign[=] call[name[open], parameter[binary_operation[constant[%s.insertions.fa] <ast.Mod object at 0x7da2590d6920> call[call[name[name].rsplit, parameter[constant[.], constant[1]]]][constant[0]]], constant[w]]] for taget[name[seq]] in starred[name[seqs]] begin[:] for taget[tuple[[<ast.Name object at 0x7da20c6a9510>, <ast.Name object at 0x7da20c6aad40>]]] in starred[call[name[enumerate], parameter[call[call[name[seqs]][name[seq]]][constant[2]], constant[1]]]] begin[:] variable[model_pos] assign[=] call[name[ins]][constant[1]] if compare[call[name[ins]][constant[6]] is constant[True]] begin[:] for taget[name[orf]] in starred[call[name[ins]][constant[4]]] begin[:] variable[orf_info] assign[=] call[call[call[call[name[orf]][constant[0]].split, parameter[constant[>]]]][constant[1]].split, parameter[]] variable[id] assign[=] call[call[call[name[orf_info]][constant[0]].split, parameter[constant[_], constant[1]]]][constant[0]] variable[name] assign[=] call[name[id2name]][name[id]] variable[annotation] assign[=] call[name[orf_info]][constant[1]] variable[strand] assign[=] call[name[orf_info]][constant[7]] if compare[name[strand] equal[==] constant[1]] begin[:] variable[strand] assign[=] constant[+] <ast.Tuple object at 0x7da20e956920> assign[=] call[name[sorted], parameter[list[[<ast.Call object at 0x7da20e954280>, <ast.Call object at 0x7da20e956a10>]]]] variable[header] assign[=] binary_operation[constant[>%s insertion::seq=%s type=%s strand=%s gene-pos=%s-%s model-pos=%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da20e956d70>, <ast.Name object at 0x7da20e956140>, <ast.Name object at 0x7da20e957880>, <ast.Name object at 0x7da20e954e50>, <ast.Name object at 0x7da20e957190>, <ast.Name object at 0x7da20e955bd0>, <ast.Name object at 0x7da20e957b80>]]] call[name[print], parameter[call[constant[ ].join, parameter[list[[<ast.Name object at 0x7da20e957c40>, <ast.Subscript object at 0x7da20e955120>]]]]]] if compare[call[name[ins]][constant[7]] is constant[True]] begin[:] for taget[name[intron]] in starred[call[name[ins]][constant[5]]] begin[:] <ast.Tuple object at 0x7da20e956e00> assign[=] call[call[call[call[name[intron]][constant[0]].split, parameter[constant[>], constant[1]]]][constant[1]].split, parameter[]] variable[name] assign[=] call[name[id2name]][call[call[name[id].split, parameter[constant[_]]]][constant[0]]] variable[header] assign[=] binary_operation[constant[>%s insertion::seq=%s type=%s strand=%s gene-pos=%s model-pos=%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da20e956650>, <ast.Name object at 0x7da20e954580>, <ast.Name object at 0x7da20e956b30>, <ast.Name object at 0x7da20e9569b0>, <ast.Name object at 0x7da20e954eb0>, <ast.Name object at 0x7da20e9549a0>]]] call[name[print], parameter[call[constant[ ].join, parameter[list[[<ast.Name object at 0x7da20e9555d0>, <ast.Subscript object at 0x7da20e956110>]]]]]] variable[insertion] assign[=] call[name[ins]][constant[3]] <ast.Tuple object at 0x7da20e955840> assign[=] call[call[call[call[name[insertion]][constant[0]].split, parameter[constant[>]]]][constant[1]].split, parameter[constant[ ], constant[1]]] variable[name] assign[=] call[name[id2name]][call[call[name[id].split, parameter[constant[_]]]][constant[0]]] variable[header] assign[=] binary_operation[constant[>%s %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da20e955030>, <ast.Name object at 0x7da20e955a50>]]] call[name[print], parameter[call[constant[ ].join, parameter[list[[<ast.Name object at 0x7da20e956260>, <ast.Subscript object at 0x7da20e9579a0>]]]]]] call[name[insertions].close, parameter[]] call[name[orfs].close, parameter[]] call[name[introns].close, parameter[]]
keyword[def] identifier[print_seqs] ( identifier[seqs] , identifier[id2name] , identifier[name] ): literal[string] identifier[orfs] = identifier[open] ( literal[string] %( identifier[name] . identifier[rsplit] ( literal[string] , literal[int] )[ literal[int] ]), literal[string] ) identifier[introns] = identifier[open] ( literal[string] %( identifier[name] . identifier[rsplit] ( literal[string] , literal[int] )[ literal[int] ]), literal[string] ) identifier[insertions] = identifier[open] ( literal[string] %( identifier[name] . identifier[rsplit] ( literal[string] , literal[int] )[ literal[int] ]), literal[string] ) keyword[for] identifier[seq] keyword[in] identifier[seqs] : keyword[for] identifier[i] , identifier[ins] keyword[in] identifier[enumerate] ( identifier[seqs] [ identifier[seq] ][ literal[int] ], literal[int] ): identifier[model_pos] = identifier[ins] [ literal[int] ] keyword[if] identifier[ins] [ literal[int] ] keyword[is] keyword[True] : keyword[for] identifier[orf] keyword[in] identifier[ins] [ literal[int] ]: identifier[orf_info] = identifier[orf] [ literal[int] ]. identifier[split] ( literal[string] )[ literal[int] ]. identifier[split] () identifier[id] = identifier[orf_info] [ literal[int] ]. identifier[split] ( literal[string] , literal[int] )[ literal[int] ] identifier[name] = identifier[id2name] [ identifier[id] ] identifier[annotation] = identifier[orf_info] [ literal[int] ] identifier[strand] = identifier[orf_info] [ literal[int] ] keyword[if] identifier[strand] == literal[string] : identifier[strand] = literal[string] keyword[else] : identifier[strand] = literal[string] identifier[start] , identifier[stop] = identifier[sorted] ([ identifier[int] ( identifier[orf_info] [ literal[int] ]), identifier[int] ( identifier[orf_info] [ literal[int] ])]) identifier[header] = literal[string] %( identifier[name] , identifier[i] , identifier[annotation] , identifier[strand] , identifier[start] , identifier[stop] , identifier[model_pos] ) identifier[print] ( literal[string] . identifier[join] ([ identifier[header] , identifier[orf] [ literal[int] ]]), identifier[file] = identifier[orfs] ) keyword[if] identifier[ins] [ literal[int] ] keyword[is] keyword[True] : keyword[for] identifier[intron] keyword[in] identifier[ins] [ literal[int] ]: identifier[id] , identifier[type] , identifier[strand] , identifier[pos] = identifier[intron] [ literal[int] ]. identifier[split] ( literal[string] , literal[int] )[ literal[int] ]. identifier[split] () identifier[name] = identifier[id2name] [ identifier[id] . identifier[split] ( literal[string] )[ literal[int] ]] identifier[header] = literal[string] %( identifier[name] , identifier[i] , identifier[type] , identifier[strand] , identifier[pos] , identifier[model_pos] ) identifier[print] ( literal[string] . identifier[join] ([ identifier[header] , identifier[intron] [ literal[int] ]]), identifier[file] = identifier[introns] ) identifier[insertion] = identifier[ins] [ literal[int] ] identifier[id] , identifier[info] = identifier[insertion] [ literal[int] ]. identifier[split] ( literal[string] )[ literal[int] ]. identifier[split] ( literal[string] , literal[int] ) identifier[name] = identifier[id2name] [ identifier[id] . identifier[split] ( literal[string] )[ literal[int] ]] identifier[header] = literal[string] %( identifier[name] , identifier[info] ) identifier[print] ( literal[string] . identifier[join] ([ identifier[header] , identifier[insertion] [ literal[int] ]]), identifier[file] = identifier[insertions] ) identifier[insertions] . identifier[close] () identifier[orfs] . identifier[close] () identifier[introns] . identifier[close] ()
def print_seqs(seqs, id2name, name): """ print fasta of introns and ORFs # seqs[id] = [gene, model, [[i-gene_pos, i-model_pos, i-length, iseq, [orfs], [introns], orfs?, introns?, [orf annotations]], ...]] """ orfs = open('%s.orfs.faa' % name.rsplit('.', 1)[0], 'w') introns = open('%s.introns.fa' % name.rsplit('.', 1)[0], 'w') insertions = open('%s.insertions.fa' % name.rsplit('.', 1)[0], 'w') for seq in seqs: for (i, ins) in enumerate(seqs[seq][2], 1): model_pos = ins[1] if ins[6] is True: # orf(s) in ins[4] for orf in ins[4]: orf_info = orf[0].split('>')[1].split() id = orf_info[0].split('_', 1)[0] name = id2name[id] annotation = orf_info[1] strand = orf_info[7] if strand == '1': strand = '+' # depends on [control=['if'], data=['strand']] else: strand = '-' (start, stop) = sorted([int(orf_info[3]), int(orf_info[5])]) header = '>%s insertion::seq=%s type=%s strand=%s gene-pos=%s-%s model-pos=%s' % (name, i, annotation, strand, start, stop, model_pos) print('\n'.join([header, orf[1]]), file=orfs) # depends on [control=['for'], data=['orf']] # depends on [control=['if'], data=[]] if ins[7] is True: # intron(s) in ins[5] for intron in ins[5]: (id, type, strand, pos) = intron[0].split('>', 1)[1].split() name = id2name[id.split('_')[0]] header = '>%s insertion::seq=%s type=%s strand=%s gene-pos=%s model-pos=%s' % (name, i, type, strand, pos, model_pos) print('\n'.join([header, intron[1]]), file=introns) # depends on [control=['for'], data=['intron']] # depends on [control=['if'], data=[]] insertion = ins[3] (id, info) = insertion[0].split('>')[1].split(' ', 1) name = id2name[id.split('_')[0]] header = '>%s %s' % (name, info) print('\n'.join([header, insertion[1]]), file=insertions) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['seq']] insertions.close() orfs.close() introns.close()
def get_composition_lookup_session_for_repository(self, repository_id, proxy): """Gets the ``OsidSession`` associated with the composition lookup service for the given repository. arg: repository_id (osid.id.Id): the ``Id`` of the repository arg: proxy (osid.proxy.Proxy): a proxy return: (osid.repository.CompositionLookupSession) - the new ``CompositionLookupSession`` raise: NotFound - ``repository_id`` not found raise: NullArgument - ``repository_id`` or ``proxy`` is ``null`` raise: OperationFailed - ``unable to complete request`` raise: Unimplemented - ``supports_composition_lookup()`` or ``supports_visible_federation()`` is ``false`` *compliance: optional -- This method must be implemented if ``supports_composition_lookup()`` and ``supports_visible_federation()`` are ``true``.* """ if not self.supports_composition_lookup(): raise errors.Unimplemented() ## # Also include check to see if the catalog Id is found otherwise raise errors.NotFound ## # pylint: disable=no-member return sessions.CompositionLookupSession(repository_id, proxy, self._runtime)
def function[get_composition_lookup_session_for_repository, parameter[self, repository_id, proxy]]: constant[Gets the ``OsidSession`` associated with the composition lookup service for the given repository. arg: repository_id (osid.id.Id): the ``Id`` of the repository arg: proxy (osid.proxy.Proxy): a proxy return: (osid.repository.CompositionLookupSession) - the new ``CompositionLookupSession`` raise: NotFound - ``repository_id`` not found raise: NullArgument - ``repository_id`` or ``proxy`` is ``null`` raise: OperationFailed - ``unable to complete request`` raise: Unimplemented - ``supports_composition_lookup()`` or ``supports_visible_federation()`` is ``false`` *compliance: optional -- This method must be implemented if ``supports_composition_lookup()`` and ``supports_visible_federation()`` are ``true``.* ] if <ast.UnaryOp object at 0x7da18ede7220> begin[:] <ast.Raise object at 0x7da18ede65c0> return[call[name[sessions].CompositionLookupSession, parameter[name[repository_id], name[proxy], name[self]._runtime]]]
keyword[def] identifier[get_composition_lookup_session_for_repository] ( identifier[self] , identifier[repository_id] , identifier[proxy] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[supports_composition_lookup] (): keyword[raise] identifier[errors] . identifier[Unimplemented] () keyword[return] identifier[sessions] . identifier[CompositionLookupSession] ( identifier[repository_id] , identifier[proxy] , identifier[self] . identifier[_runtime] )
def get_composition_lookup_session_for_repository(self, repository_id, proxy): """Gets the ``OsidSession`` associated with the composition lookup service for the given repository. arg: repository_id (osid.id.Id): the ``Id`` of the repository arg: proxy (osid.proxy.Proxy): a proxy return: (osid.repository.CompositionLookupSession) - the new ``CompositionLookupSession`` raise: NotFound - ``repository_id`` not found raise: NullArgument - ``repository_id`` or ``proxy`` is ``null`` raise: OperationFailed - ``unable to complete request`` raise: Unimplemented - ``supports_composition_lookup()`` or ``supports_visible_federation()`` is ``false`` *compliance: optional -- This method must be implemented if ``supports_composition_lookup()`` and ``supports_visible_federation()`` are ``true``.* """ if not self.supports_composition_lookup(): raise errors.Unimplemented() # depends on [control=['if'], data=[]] ## # Also include check to see if the catalog Id is found otherwise raise errors.NotFound ## # pylint: disable=no-member return sessions.CompositionLookupSession(repository_id, proxy, self._runtime)
def save_file(path, data, readable=False): """ Save to file :param path: File path to save :type path: str | unicode :param data: Data to save :type data: None | int | float | str | unicode | list | dict :param readable: Format file to be human readable (default: False) :type readable: bool :rtype: None :raises IOError: If empty path or error writing file """ if not path: IOError("No path specified to save") try: with io.open(path, "w", encoding="utf-8") as f: if path.endswith(".json"): save_json_file( f, data, pretty=readable, compact=(not readable), sort=True ) elif path.endswith(".yaml") or path.endswith(".yml"): save_yaml_file(f, data) except IOError: raise except Exception as e: raise IOError(e)
def function[save_file, parameter[path, data, readable]]: constant[ Save to file :param path: File path to save :type path: str | unicode :param data: Data to save :type data: None | int | float | str | unicode | list | dict :param readable: Format file to be human readable (default: False) :type readable: bool :rtype: None :raises IOError: If empty path or error writing file ] if <ast.UnaryOp object at 0x7da1b10223e0> begin[:] call[name[IOError], parameter[constant[No path specified to save]]] <ast.Try object at 0x7da1b10200a0>
keyword[def] identifier[save_file] ( identifier[path] , identifier[data] , identifier[readable] = keyword[False] ): literal[string] keyword[if] keyword[not] identifier[path] : identifier[IOError] ( literal[string] ) keyword[try] : keyword[with] identifier[io] . identifier[open] ( identifier[path] , literal[string] , identifier[encoding] = literal[string] ) keyword[as] identifier[f] : keyword[if] identifier[path] . identifier[endswith] ( literal[string] ): identifier[save_json_file] ( identifier[f] , identifier[data] , identifier[pretty] = identifier[readable] , identifier[compact] =( keyword[not] identifier[readable] ), identifier[sort] = keyword[True] ) keyword[elif] identifier[path] . identifier[endswith] ( literal[string] ) keyword[or] identifier[path] . identifier[endswith] ( literal[string] ): identifier[save_yaml_file] ( identifier[f] , identifier[data] ) keyword[except] identifier[IOError] : keyword[raise] keyword[except] identifier[Exception] keyword[as] identifier[e] : keyword[raise] identifier[IOError] ( identifier[e] )
def save_file(path, data, readable=False): """ Save to file :param path: File path to save :type path: str | unicode :param data: Data to save :type data: None | int | float | str | unicode | list | dict :param readable: Format file to be human readable (default: False) :type readable: bool :rtype: None :raises IOError: If empty path or error writing file """ if not path: IOError('No path specified to save') # depends on [control=['if'], data=[]] try: with io.open(path, 'w', encoding='utf-8') as f: if path.endswith('.json'): save_json_file(f, data, pretty=readable, compact=not readable, sort=True) # depends on [control=['if'], data=[]] elif path.endswith('.yaml') or path.endswith('.yml'): save_yaml_file(f, data) # depends on [control=['if'], data=[]] # depends on [control=['with'], data=['f']] # depends on [control=['try'], data=[]] except IOError: raise # depends on [control=['except'], data=[]] except Exception as e: raise IOError(e) # depends on [control=['except'], data=['e']]
def D_baffle_holes(do=None, L_unsupported=None): r'''Determines the diameter of holes in baffles for tubes according to TEMA [1]_. Applies for all geometries. Parameters ---------- do : float Tube outer diameter, [m] L_unsupported : float Distance between tube supports, [m] Returns ------- dB : float Baffle hole diameter, [m] Notes ----- Examples -------- >>> D_baffle_holes(do=.0508, L_unsupported=0.75) 0.0516 >>> D_baffle_holes(do=0.01905, L_unsupported=0.3) 0.01985 >>> D_baffle_holes(do=0.01905, L_unsupported=1.5) 0.019450000000000002 References ---------- .. [1] Standards of the Tubular Exchanger Manufacturers Association, Ninth edition, 2007, TEMA, New York. ''' if do > 0.0318 or L_unsupported <= 0.914: # 1-1/4 inches and 36 inches extra = 0.0008 else: extra = 0.0004 d = do + extra return d
def function[D_baffle_holes, parameter[do, L_unsupported]]: constant[Determines the diameter of holes in baffles for tubes according to TEMA [1]_. Applies for all geometries. Parameters ---------- do : float Tube outer diameter, [m] L_unsupported : float Distance between tube supports, [m] Returns ------- dB : float Baffle hole diameter, [m] Notes ----- Examples -------- >>> D_baffle_holes(do=.0508, L_unsupported=0.75) 0.0516 >>> D_baffle_holes(do=0.01905, L_unsupported=0.3) 0.01985 >>> D_baffle_holes(do=0.01905, L_unsupported=1.5) 0.019450000000000002 References ---------- .. [1] Standards of the Tubular Exchanger Manufacturers Association, Ninth edition, 2007, TEMA, New York. ] if <ast.BoolOp object at 0x7da2047e82e0> begin[:] variable[extra] assign[=] constant[0.0008] variable[d] assign[=] binary_operation[name[do] + name[extra]] return[name[d]]
keyword[def] identifier[D_baffle_holes] ( identifier[do] = keyword[None] , identifier[L_unsupported] = keyword[None] ): literal[string] keyword[if] identifier[do] > literal[int] keyword[or] identifier[L_unsupported] <= literal[int] : identifier[extra] = literal[int] keyword[else] : identifier[extra] = literal[int] identifier[d] = identifier[do] + identifier[extra] keyword[return] identifier[d]
def D_baffle_holes(do=None, L_unsupported=None): """Determines the diameter of holes in baffles for tubes according to TEMA [1]_. Applies for all geometries. Parameters ---------- do : float Tube outer diameter, [m] L_unsupported : float Distance between tube supports, [m] Returns ------- dB : float Baffle hole diameter, [m] Notes ----- Examples -------- >>> D_baffle_holes(do=.0508, L_unsupported=0.75) 0.0516 >>> D_baffle_holes(do=0.01905, L_unsupported=0.3) 0.01985 >>> D_baffle_holes(do=0.01905, L_unsupported=1.5) 0.019450000000000002 References ---------- .. [1] Standards of the Tubular Exchanger Manufacturers Association, Ninth edition, 2007, TEMA, New York. """ if do > 0.0318 or L_unsupported <= 0.914: # 1-1/4 inches and 36 inches extra = 0.0008 # depends on [control=['if'], data=[]] else: extra = 0.0004 d = do + extra return d
def display(obj, skiphidden=True, **printargs): """Print a view of obj, where obj is either a ctypes-derived class or an instance of such a class. Any additional keyword arguments are passed directly to the print function. This is mostly useful to introspect structures from an interactive session. """ top = findnode(obj) #------------------------------------------------------------------- # Iterate through the entire structure turning all the nodes into # tuples of strings for display. maxhex = len(hex(ctypes.sizeof(top.type))) - 2 def addrformat(addr): if isinstance(addr, int): return "0x{0:0{1}X}".format(addr, maxhex) else: intpart = int(addr) fracbits = int((addr - intpart) * 8) return "0x{0:0{1}X}'{2}".format(intpart, maxhex, fracbits) def formatval(here): if isinstance(here, BoundSimpleNode): return "{0}({1})".format(here.type.__name__, here.value) else: return str(here.value) if isinstance(top, UnboundNode): headers = ['Path', 'Addr', 'Type'] results = [ ((' ' * n.depth) + n.name, addrformat(n.baseoffset), n.type.__name__) for n in walknode(top, skiphidden) ] else: headers = ['Path', 'Addr', 'Value'] results = [ ((' ' * n.depth) + n.name, addrformat(n.baseoffset), formatval(n)) for n in walknode(top, skiphidden) ] #------------------------------------------------------------------- # Determine the maximum width of the text in each column, make the # column always that wide. widths = [ max(max(len(d[col]) for d in results), len(h)) for col, h in enumerate(headers) ] #------------------------------------------------------------------- # Print out the tabular data. def lp(args): print(*args, **printargs) lp(d.center(w) for d, w in zip(headers, widths)) lp('-' * w for w in widths) for r in results: lp(d.ljust(w) for d, w in zip(r, widths))
def function[display, parameter[obj, skiphidden]]: constant[Print a view of obj, where obj is either a ctypes-derived class or an instance of such a class. Any additional keyword arguments are passed directly to the print function. This is mostly useful to introspect structures from an interactive session. ] variable[top] assign[=] call[name[findnode], parameter[name[obj]]] variable[maxhex] assign[=] binary_operation[call[name[len], parameter[call[name[hex], parameter[call[name[ctypes].sizeof, parameter[name[top].type]]]]]] - constant[2]] def function[addrformat, parameter[addr]]: if call[name[isinstance], parameter[name[addr], name[int]]] begin[:] return[call[constant[0x{0:0{1}X}].format, parameter[name[addr], name[maxhex]]]] def function[formatval, parameter[here]]: if call[name[isinstance], parameter[name[here], name[BoundSimpleNode]]] begin[:] return[call[constant[{0}({1})].format, parameter[name[here].type.__name__, name[here].value]]] if call[name[isinstance], parameter[name[top], name[UnboundNode]]] begin[:] variable[headers] assign[=] list[[<ast.Constant object at 0x7da1b2216b60>, <ast.Constant object at 0x7da1b22160b0>, <ast.Constant object at 0x7da1b22157e0>]] variable[results] assign[=] <ast.ListComp object at 0x7da1b2215d50> variable[widths] assign[=] <ast.ListComp object at 0x7da1b2215930> def function[lp, parameter[args]]: call[name[print], parameter[<ast.Starred object at 0x7da1b2363eb0>]] call[name[lp], parameter[<ast.GeneratorExp object at 0x7da1b2363d00>]] call[name[lp], parameter[<ast.GeneratorExp object at 0x7da1b2363940>]] for taget[name[r]] in starred[name[results]] begin[:] call[name[lp], parameter[<ast.GeneratorExp object at 0x7da1b23637c0>]]
keyword[def] identifier[display] ( identifier[obj] , identifier[skiphidden] = keyword[True] ,** identifier[printargs] ): literal[string] identifier[top] = identifier[findnode] ( identifier[obj] ) identifier[maxhex] = identifier[len] ( identifier[hex] ( identifier[ctypes] . identifier[sizeof] ( identifier[top] . identifier[type] )))- literal[int] keyword[def] identifier[addrformat] ( identifier[addr] ): keyword[if] identifier[isinstance] ( identifier[addr] , identifier[int] ): keyword[return] literal[string] . identifier[format] ( identifier[addr] , identifier[maxhex] ) keyword[else] : identifier[intpart] = identifier[int] ( identifier[addr] ) identifier[fracbits] = identifier[int] (( identifier[addr] - identifier[intpart] )* literal[int] ) keyword[return] literal[string] . identifier[format] ( identifier[intpart] , identifier[maxhex] , identifier[fracbits] ) keyword[def] identifier[formatval] ( identifier[here] ): keyword[if] identifier[isinstance] ( identifier[here] , identifier[BoundSimpleNode] ): keyword[return] literal[string] . identifier[format] ( identifier[here] . identifier[type] . identifier[__name__] , identifier[here] . identifier[value] ) keyword[else] : keyword[return] identifier[str] ( identifier[here] . identifier[value] ) keyword[if] identifier[isinstance] ( identifier[top] , identifier[UnboundNode] ): identifier[headers] =[ literal[string] , literal[string] , literal[string] ] identifier[results] =[ (( literal[string] * identifier[n] . identifier[depth] )+ identifier[n] . identifier[name] , identifier[addrformat] ( identifier[n] . identifier[baseoffset] ), identifier[n] . identifier[type] . identifier[__name__] ) keyword[for] identifier[n] keyword[in] identifier[walknode] ( identifier[top] , identifier[skiphidden] ) ] keyword[else] : identifier[headers] =[ literal[string] , literal[string] , literal[string] ] identifier[results] =[ (( literal[string] * identifier[n] . identifier[depth] )+ identifier[n] . identifier[name] , identifier[addrformat] ( identifier[n] . identifier[baseoffset] ), identifier[formatval] ( identifier[n] )) keyword[for] identifier[n] keyword[in] identifier[walknode] ( identifier[top] , identifier[skiphidden] ) ] identifier[widths] =[ identifier[max] ( identifier[max] ( identifier[len] ( identifier[d] [ identifier[col] ]) keyword[for] identifier[d] keyword[in] identifier[results] ), identifier[len] ( identifier[h] )) keyword[for] identifier[col] , identifier[h] keyword[in] identifier[enumerate] ( identifier[headers] ) ] keyword[def] identifier[lp] ( identifier[args] ): identifier[print] (* identifier[args] ,** identifier[printargs] ) identifier[lp] ( identifier[d] . identifier[center] ( identifier[w] ) keyword[for] identifier[d] , identifier[w] keyword[in] identifier[zip] ( identifier[headers] , identifier[widths] )) identifier[lp] ( literal[string] * identifier[w] keyword[for] identifier[w] keyword[in] identifier[widths] ) keyword[for] identifier[r] keyword[in] identifier[results] : identifier[lp] ( identifier[d] . identifier[ljust] ( identifier[w] ) keyword[for] identifier[d] , identifier[w] keyword[in] identifier[zip] ( identifier[r] , identifier[widths] ))
def display(obj, skiphidden=True, **printargs): """Print a view of obj, where obj is either a ctypes-derived class or an instance of such a class. Any additional keyword arguments are passed directly to the print function. This is mostly useful to introspect structures from an interactive session. """ top = findnode(obj) #------------------------------------------------------------------- # Iterate through the entire structure turning all the nodes into # tuples of strings for display. maxhex = len(hex(ctypes.sizeof(top.type))) - 2 def addrformat(addr): if isinstance(addr, int): return '0x{0:0{1}X}'.format(addr, maxhex) # depends on [control=['if'], data=[]] else: intpart = int(addr) fracbits = int((addr - intpart) * 8) return "0x{0:0{1}X}'{2}".format(intpart, maxhex, fracbits) def formatval(here): if isinstance(here, BoundSimpleNode): return '{0}({1})'.format(here.type.__name__, here.value) # depends on [control=['if'], data=[]] else: return str(here.value) if isinstance(top, UnboundNode): headers = ['Path', 'Addr', 'Type'] results = [(' ' * n.depth + n.name, addrformat(n.baseoffset), n.type.__name__) for n in walknode(top, skiphidden)] # depends on [control=['if'], data=[]] else: headers = ['Path', 'Addr', 'Value'] results = [(' ' * n.depth + n.name, addrformat(n.baseoffset), formatval(n)) for n in walknode(top, skiphidden)] #------------------------------------------------------------------- # Determine the maximum width of the text in each column, make the # column always that wide. widths = [max(max((len(d[col]) for d in results)), len(h)) for (col, h) in enumerate(headers)] #------------------------------------------------------------------- # Print out the tabular data. def lp(args): print(*args, **printargs) lp((d.center(w) for (d, w) in zip(headers, widths))) lp(('-' * w for w in widths)) for r in results: lp((d.ljust(w) for (d, w) in zip(r, widths))) # depends on [control=['for'], data=['r']]
def parse(line): "DEPRECATED; see :func:`~skyfield.data.hipparcos.load_dataframe() instead." # See ftp://cdsarc.u-strasbg.fr/cats/I/239/ReadMe star = Star( ra=Angle(degrees=float(line[51:63])), dec=Angle(degrees=float(line[64:76])), ra_mas_per_year=float(line[87:95]), dec_mas_per_year=float(line[96:104]), parallax_mas=float(line[79:86]), names=[('HIP', int(line[8:14]))], ) star._position_au += star._velocity_au_per_d * days distance, dec, ra = to_polar(star._position_au) star.ra = Angle(radians=ra, preference='hours') star.dec = Angle(radians=dec) return star
def function[parse, parameter[line]]: constant[DEPRECATED; see :func:`~skyfield.data.hipparcos.load_dataframe() instead.] variable[star] assign[=] call[name[Star], parameter[]] <ast.AugAssign object at 0x7da1b179a530> <ast.Tuple object at 0x7da1b179a440> assign[=] call[name[to_polar], parameter[name[star]._position_au]] name[star].ra assign[=] call[name[Angle], parameter[]] name[star].dec assign[=] call[name[Angle], parameter[]] return[name[star]]
keyword[def] identifier[parse] ( identifier[line] ): literal[string] identifier[star] = identifier[Star] ( identifier[ra] = identifier[Angle] ( identifier[degrees] = identifier[float] ( identifier[line] [ literal[int] : literal[int] ])), identifier[dec] = identifier[Angle] ( identifier[degrees] = identifier[float] ( identifier[line] [ literal[int] : literal[int] ])), identifier[ra_mas_per_year] = identifier[float] ( identifier[line] [ literal[int] : literal[int] ]), identifier[dec_mas_per_year] = identifier[float] ( identifier[line] [ literal[int] : literal[int] ]), identifier[parallax_mas] = identifier[float] ( identifier[line] [ literal[int] : literal[int] ]), identifier[names] =[( literal[string] , identifier[int] ( identifier[line] [ literal[int] : literal[int] ]))], ) identifier[star] . identifier[_position_au] += identifier[star] . identifier[_velocity_au_per_d] * identifier[days] identifier[distance] , identifier[dec] , identifier[ra] = identifier[to_polar] ( identifier[star] . identifier[_position_au] ) identifier[star] . identifier[ra] = identifier[Angle] ( identifier[radians] = identifier[ra] , identifier[preference] = literal[string] ) identifier[star] . identifier[dec] = identifier[Angle] ( identifier[radians] = identifier[dec] ) keyword[return] identifier[star]
def parse(line): """DEPRECATED; see :func:`~skyfield.data.hipparcos.load_dataframe() instead.""" # See ftp://cdsarc.u-strasbg.fr/cats/I/239/ReadMe star = Star(ra=Angle(degrees=float(line[51:63])), dec=Angle(degrees=float(line[64:76])), ra_mas_per_year=float(line[87:95]), dec_mas_per_year=float(line[96:104]), parallax_mas=float(line[79:86]), names=[('HIP', int(line[8:14]))]) star._position_au += star._velocity_au_per_d * days (distance, dec, ra) = to_polar(star._position_au) star.ra = Angle(radians=ra, preference='hours') star.dec = Angle(radians=dec) return star
def _init_multicast_socket(self): """ Init multicast socket :rtype: None """ self.debug("()") # Create a UDP socket self._multicast_socket = socket.socket( socket.AF_INET, socket.SOCK_DGRAM ) # Allow reuse of addresses self._multicast_socket.setsockopt( socket.SOL_SOCKET, socket.SO_REUSEADDR, 1 ) # Set multicast interface to local_ip self._multicast_socket.setsockopt( socket.IPPROTO_IP, socket.IP_MULTICAST_IF, socket.inet_aton(self._multicast_ip) ) # Set multicast time-to-live # Should keep our multicast packets from escaping the local network self._multicast_socket.setsockopt( socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, self._multicast_ttl ) self._add_membership_multicast_socket() # Bind socket if platform.system().lower() == "darwin": self._multicast_socket.bind(("0.0.0.0", self._multicast_bind_port)) else: self._multicast_socket.bind( (self._multicast_ip, self._multicast_bind_port) ) self._listening.append(self._multicast_socket)
def function[_init_multicast_socket, parameter[self]]: constant[ Init multicast socket :rtype: None ] call[name[self].debug, parameter[constant[()]]] name[self]._multicast_socket assign[=] call[name[socket].socket, parameter[name[socket].AF_INET, name[socket].SOCK_DGRAM]] call[name[self]._multicast_socket.setsockopt, parameter[name[socket].SOL_SOCKET, name[socket].SO_REUSEADDR, constant[1]]] call[name[self]._multicast_socket.setsockopt, parameter[name[socket].IPPROTO_IP, name[socket].IP_MULTICAST_IF, call[name[socket].inet_aton, parameter[name[self]._multicast_ip]]]] call[name[self]._multicast_socket.setsockopt, parameter[name[socket].IPPROTO_IP, name[socket].IP_MULTICAST_TTL, name[self]._multicast_ttl]] call[name[self]._add_membership_multicast_socket, parameter[]] if compare[call[call[name[platform].system, parameter[]].lower, parameter[]] equal[==] constant[darwin]] begin[:] call[name[self]._multicast_socket.bind, parameter[tuple[[<ast.Constant object at 0x7da1b168e260>, <ast.Attribute object at 0x7da1b168d5a0>]]]] call[name[self]._listening.append, parameter[name[self]._multicast_socket]]
keyword[def] identifier[_init_multicast_socket] ( identifier[self] ): literal[string] identifier[self] . identifier[debug] ( literal[string] ) identifier[self] . identifier[_multicast_socket] = identifier[socket] . identifier[socket] ( identifier[socket] . identifier[AF_INET] , identifier[socket] . identifier[SOCK_DGRAM] ) identifier[self] . identifier[_multicast_socket] . identifier[setsockopt] ( identifier[socket] . identifier[SOL_SOCKET] , identifier[socket] . identifier[SO_REUSEADDR] , literal[int] ) identifier[self] . identifier[_multicast_socket] . identifier[setsockopt] ( identifier[socket] . identifier[IPPROTO_IP] , identifier[socket] . identifier[IP_MULTICAST_IF] , identifier[socket] . identifier[inet_aton] ( identifier[self] . identifier[_multicast_ip] ) ) identifier[self] . identifier[_multicast_socket] . identifier[setsockopt] ( identifier[socket] . identifier[IPPROTO_IP] , identifier[socket] . identifier[IP_MULTICAST_TTL] , identifier[self] . identifier[_multicast_ttl] ) identifier[self] . identifier[_add_membership_multicast_socket] () keyword[if] identifier[platform] . identifier[system] (). identifier[lower] ()== literal[string] : identifier[self] . identifier[_multicast_socket] . identifier[bind] (( literal[string] , identifier[self] . identifier[_multicast_bind_port] )) keyword[else] : identifier[self] . identifier[_multicast_socket] . identifier[bind] ( ( identifier[self] . identifier[_multicast_ip] , identifier[self] . identifier[_multicast_bind_port] ) ) identifier[self] . identifier[_listening] . identifier[append] ( identifier[self] . identifier[_multicast_socket] )
def _init_multicast_socket(self): """ Init multicast socket :rtype: None """ self.debug('()') # Create a UDP socket self._multicast_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) # Allow reuse of addresses self._multicast_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) # Set multicast interface to local_ip self._multicast_socket.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_IF, socket.inet_aton(self._multicast_ip)) # Set multicast time-to-live # Should keep our multicast packets from escaping the local network self._multicast_socket.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, self._multicast_ttl) self._add_membership_multicast_socket() # Bind socket if platform.system().lower() == 'darwin': self._multicast_socket.bind(('0.0.0.0', self._multicast_bind_port)) # depends on [control=['if'], data=[]] else: self._multicast_socket.bind((self._multicast_ip, self._multicast_bind_port)) self._listening.append(self._multicast_socket)
def RegisterCustomFieldCodec(encoder, decoder): """Register a custom encoder/decoder for this field.""" def Register(field): _CUSTOM_FIELD_CODECS[field] = _Codec(encoder=encoder, decoder=decoder) return field return Register
def function[RegisterCustomFieldCodec, parameter[encoder, decoder]]: constant[Register a custom encoder/decoder for this field.] def function[Register, parameter[field]]: call[name[_CUSTOM_FIELD_CODECS]][name[field]] assign[=] call[name[_Codec], parameter[]] return[name[field]] return[name[Register]]
keyword[def] identifier[RegisterCustomFieldCodec] ( identifier[encoder] , identifier[decoder] ): literal[string] keyword[def] identifier[Register] ( identifier[field] ): identifier[_CUSTOM_FIELD_CODECS] [ identifier[field] ]= identifier[_Codec] ( identifier[encoder] = identifier[encoder] , identifier[decoder] = identifier[decoder] ) keyword[return] identifier[field] keyword[return] identifier[Register]
def RegisterCustomFieldCodec(encoder, decoder): """Register a custom encoder/decoder for this field.""" def Register(field): _CUSTOM_FIELD_CODECS[field] = _Codec(encoder=encoder, decoder=decoder) return field return Register
def validate_endpoint_data(self, endpoints, admin_port, internal_port, public_port, expected, openstack_release=None): """Validate endpoint data. Pick the correct validator based on OpenStack release. Expected data should be in the v2 format: { 'id': id, 'region': region, 'adminurl': adminurl, 'internalurl': internalurl, 'publicurl': publicurl, 'service_id': service_id} """ validation_function = self.validate_v2_endpoint_data xenial_queens = OPENSTACK_RELEASES_PAIRS.index('xenial_queens') if openstack_release and openstack_release >= xenial_queens: validation_function = self.validate_v3_endpoint_data expected = { 'id': expected['id'], 'region': expected['region'], 'region_id': 'RegionOne', 'url': self.valid_url, 'interface': self.not_null, 'service_id': expected['service_id']} return validation_function(endpoints, admin_port, internal_port, public_port, expected)
def function[validate_endpoint_data, parameter[self, endpoints, admin_port, internal_port, public_port, expected, openstack_release]]: constant[Validate endpoint data. Pick the correct validator based on OpenStack release. Expected data should be in the v2 format: { 'id': id, 'region': region, 'adminurl': adminurl, 'internalurl': internalurl, 'publicurl': publicurl, 'service_id': service_id} ] variable[validation_function] assign[=] name[self].validate_v2_endpoint_data variable[xenial_queens] assign[=] call[name[OPENSTACK_RELEASES_PAIRS].index, parameter[constant[xenial_queens]]] if <ast.BoolOp object at 0x7da1b121b370> begin[:] variable[validation_function] assign[=] name[self].validate_v3_endpoint_data variable[expected] assign[=] dictionary[[<ast.Constant object at 0x7da1b121ab00>, <ast.Constant object at 0x7da1b121a8f0>, <ast.Constant object at 0x7da1b121a980>, <ast.Constant object at 0x7da1b121b160>, <ast.Constant object at 0x7da1b1218a60>, <ast.Constant object at 0x7da1b12184f0>], [<ast.Subscript object at 0x7da1b1218040>, <ast.Subscript object at 0x7da1b1219120>, <ast.Constant object at 0x7da1b121a860>, <ast.Attribute object at 0x7da1b12184c0>, <ast.Attribute object at 0x7da1b1219240>, <ast.Subscript object at 0x7da1b12185e0>]] return[call[name[validation_function], parameter[name[endpoints], name[admin_port], name[internal_port], name[public_port], name[expected]]]]
keyword[def] identifier[validate_endpoint_data] ( identifier[self] , identifier[endpoints] , identifier[admin_port] , identifier[internal_port] , identifier[public_port] , identifier[expected] , identifier[openstack_release] = keyword[None] ): literal[string] identifier[validation_function] = identifier[self] . identifier[validate_v2_endpoint_data] identifier[xenial_queens] = identifier[OPENSTACK_RELEASES_PAIRS] . identifier[index] ( literal[string] ) keyword[if] identifier[openstack_release] keyword[and] identifier[openstack_release] >= identifier[xenial_queens] : identifier[validation_function] = identifier[self] . identifier[validate_v3_endpoint_data] identifier[expected] ={ literal[string] : identifier[expected] [ literal[string] ], literal[string] : identifier[expected] [ literal[string] ], literal[string] : literal[string] , literal[string] : identifier[self] . identifier[valid_url] , literal[string] : identifier[self] . identifier[not_null] , literal[string] : identifier[expected] [ literal[string] ]} keyword[return] identifier[validation_function] ( identifier[endpoints] , identifier[admin_port] , identifier[internal_port] , identifier[public_port] , identifier[expected] )
def validate_endpoint_data(self, endpoints, admin_port, internal_port, public_port, expected, openstack_release=None): """Validate endpoint data. Pick the correct validator based on OpenStack release. Expected data should be in the v2 format: { 'id': id, 'region': region, 'adminurl': adminurl, 'internalurl': internalurl, 'publicurl': publicurl, 'service_id': service_id} """ validation_function = self.validate_v2_endpoint_data xenial_queens = OPENSTACK_RELEASES_PAIRS.index('xenial_queens') if openstack_release and openstack_release >= xenial_queens: validation_function = self.validate_v3_endpoint_data expected = {'id': expected['id'], 'region': expected['region'], 'region_id': 'RegionOne', 'url': self.valid_url, 'interface': self.not_null, 'service_id': expected['service_id']} # depends on [control=['if'], data=[]] return validation_function(endpoints, admin_port, internal_port, public_port, expected)
def fast_ordering(self, structure, num_remove_dict, num_to_return=1): """ This method uses the matrix form of ewaldsum to calculate the ewald sums of the potential structures. This is on the order of 4 orders of magnitude faster when there are large numbers of permutations to consider. There are further optimizations possible (doing a smarter search of permutations for example), but this wont make a difference until the number of permutations is on the order of 30,000. """ self.logger.debug("Performing fast ordering") starttime = time.time() self.logger.debug("Performing initial ewald sum...") ewaldmatrix = EwaldSummation(structure).total_energy_matrix self.logger.debug("Ewald sum took {} seconds." .format(time.time() - starttime)) starttime = time.time() m_list = [] for indices, num in num_remove_dict.items(): m_list.append([0, num, list(indices), None]) self.logger.debug("Calling EwaldMinimizer...") minimizer = EwaldMinimizer(ewaldmatrix, m_list, num_to_return, PartialRemoveSitesTransformation.ALGO_FAST) self.logger.debug("Minimizing Ewald took {} seconds." .format(time.time() - starttime)) all_structures = [] lowest_energy = minimizer.output_lists[0][0] num_atoms = sum(structure.composition.values()) for output in minimizer.output_lists: s = structure.copy() del_indices = [] for manipulation in output[1]: if manipulation[1] is None: del_indices.append(manipulation[0]) else: s.replace(manipulation[0], manipulation[1]) s.remove_sites(del_indices) struct = s.get_sorted_structure() all_structures.append( {"energy": output[0], "energy_above_minimum": (output[0] - lowest_energy) / num_atoms, "structure": struct}) return all_structures
def function[fast_ordering, parameter[self, structure, num_remove_dict, num_to_return]]: constant[ This method uses the matrix form of ewaldsum to calculate the ewald sums of the potential structures. This is on the order of 4 orders of magnitude faster when there are large numbers of permutations to consider. There are further optimizations possible (doing a smarter search of permutations for example), but this wont make a difference until the number of permutations is on the order of 30,000. ] call[name[self].logger.debug, parameter[constant[Performing fast ordering]]] variable[starttime] assign[=] call[name[time].time, parameter[]] call[name[self].logger.debug, parameter[constant[Performing initial ewald sum...]]] variable[ewaldmatrix] assign[=] call[name[EwaldSummation], parameter[name[structure]]].total_energy_matrix call[name[self].logger.debug, parameter[call[constant[Ewald sum took {} seconds.].format, parameter[binary_operation[call[name[time].time, parameter[]] - name[starttime]]]]]] variable[starttime] assign[=] call[name[time].time, parameter[]] variable[m_list] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da18c4ce140>, <ast.Name object at 0x7da18c4cf760>]]] in starred[call[name[num_remove_dict].items, parameter[]]] begin[:] call[name[m_list].append, parameter[list[[<ast.Constant object at 0x7da18f812ce0>, <ast.Name object at 0x7da18f810280>, <ast.Call object at 0x7da18f811f00>, <ast.Constant object at 0x7da18f812f50>]]]] call[name[self].logger.debug, parameter[constant[Calling EwaldMinimizer...]]] variable[minimizer] assign[=] call[name[EwaldMinimizer], parameter[name[ewaldmatrix], name[m_list], name[num_to_return], name[PartialRemoveSitesTransformation].ALGO_FAST]] call[name[self].logger.debug, parameter[call[constant[Minimizing Ewald took {} seconds.].format, parameter[binary_operation[call[name[time].time, parameter[]] - name[starttime]]]]]] variable[all_structures] assign[=] list[[]] variable[lowest_energy] assign[=] call[call[name[minimizer].output_lists][constant[0]]][constant[0]] variable[num_atoms] assign[=] call[name[sum], parameter[call[name[structure].composition.values, parameter[]]]] for taget[name[output]] in starred[name[minimizer].output_lists] begin[:] variable[s] assign[=] call[name[structure].copy, parameter[]] variable[del_indices] assign[=] list[[]] for taget[name[manipulation]] in starred[call[name[output]][constant[1]]] begin[:] if compare[call[name[manipulation]][constant[1]] is constant[None]] begin[:] call[name[del_indices].append, parameter[call[name[manipulation]][constant[0]]]] call[name[s].remove_sites, parameter[name[del_indices]]] variable[struct] assign[=] call[name[s].get_sorted_structure, parameter[]] call[name[all_structures].append, parameter[dictionary[[<ast.Constant object at 0x7da18f810f10>, <ast.Constant object at 0x7da18f8111b0>, <ast.Constant object at 0x7da18f810e20>], [<ast.Subscript object at 0x7da18f813cd0>, <ast.BinOp object at 0x7da18f8124d0>, <ast.Name object at 0x7da18f812b00>]]]] return[name[all_structures]]
keyword[def] identifier[fast_ordering] ( identifier[self] , identifier[structure] , identifier[num_remove_dict] , identifier[num_to_return] = literal[int] ): literal[string] identifier[self] . identifier[logger] . identifier[debug] ( literal[string] ) identifier[starttime] = identifier[time] . identifier[time] () identifier[self] . identifier[logger] . identifier[debug] ( literal[string] ) identifier[ewaldmatrix] = identifier[EwaldSummation] ( identifier[structure] ). identifier[total_energy_matrix] identifier[self] . identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[time] . identifier[time] ()- identifier[starttime] )) identifier[starttime] = identifier[time] . identifier[time] () identifier[m_list] =[] keyword[for] identifier[indices] , identifier[num] keyword[in] identifier[num_remove_dict] . identifier[items] (): identifier[m_list] . identifier[append] ([ literal[int] , identifier[num] , identifier[list] ( identifier[indices] ), keyword[None] ]) identifier[self] . identifier[logger] . identifier[debug] ( literal[string] ) identifier[minimizer] = identifier[EwaldMinimizer] ( identifier[ewaldmatrix] , identifier[m_list] , identifier[num_to_return] , identifier[PartialRemoveSitesTransformation] . identifier[ALGO_FAST] ) identifier[self] . identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[time] . identifier[time] ()- identifier[starttime] )) identifier[all_structures] =[] identifier[lowest_energy] = identifier[minimizer] . identifier[output_lists] [ literal[int] ][ literal[int] ] identifier[num_atoms] = identifier[sum] ( identifier[structure] . identifier[composition] . identifier[values] ()) keyword[for] identifier[output] keyword[in] identifier[minimizer] . identifier[output_lists] : identifier[s] = identifier[structure] . identifier[copy] () identifier[del_indices] =[] keyword[for] identifier[manipulation] keyword[in] identifier[output] [ literal[int] ]: keyword[if] identifier[manipulation] [ literal[int] ] keyword[is] keyword[None] : identifier[del_indices] . identifier[append] ( identifier[manipulation] [ literal[int] ]) keyword[else] : identifier[s] . identifier[replace] ( identifier[manipulation] [ literal[int] ], identifier[manipulation] [ literal[int] ]) identifier[s] . identifier[remove_sites] ( identifier[del_indices] ) identifier[struct] = identifier[s] . identifier[get_sorted_structure] () identifier[all_structures] . identifier[append] ( { literal[string] : identifier[output] [ literal[int] ], literal[string] :( identifier[output] [ literal[int] ]- identifier[lowest_energy] ) / identifier[num_atoms] , literal[string] : identifier[struct] }) keyword[return] identifier[all_structures]
def fast_ordering(self, structure, num_remove_dict, num_to_return=1): """ This method uses the matrix form of ewaldsum to calculate the ewald sums of the potential structures. This is on the order of 4 orders of magnitude faster when there are large numbers of permutations to consider. There are further optimizations possible (doing a smarter search of permutations for example), but this wont make a difference until the number of permutations is on the order of 30,000. """ self.logger.debug('Performing fast ordering') starttime = time.time() self.logger.debug('Performing initial ewald sum...') ewaldmatrix = EwaldSummation(structure).total_energy_matrix self.logger.debug('Ewald sum took {} seconds.'.format(time.time() - starttime)) starttime = time.time() m_list = [] for (indices, num) in num_remove_dict.items(): m_list.append([0, num, list(indices), None]) # depends on [control=['for'], data=[]] self.logger.debug('Calling EwaldMinimizer...') minimizer = EwaldMinimizer(ewaldmatrix, m_list, num_to_return, PartialRemoveSitesTransformation.ALGO_FAST) self.logger.debug('Minimizing Ewald took {} seconds.'.format(time.time() - starttime)) all_structures = [] lowest_energy = minimizer.output_lists[0][0] num_atoms = sum(structure.composition.values()) for output in minimizer.output_lists: s = structure.copy() del_indices = [] for manipulation in output[1]: if manipulation[1] is None: del_indices.append(manipulation[0]) # depends on [control=['if'], data=[]] else: s.replace(manipulation[0], manipulation[1]) # depends on [control=['for'], data=['manipulation']] s.remove_sites(del_indices) struct = s.get_sorted_structure() all_structures.append({'energy': output[0], 'energy_above_minimum': (output[0] - lowest_energy) / num_atoms, 'structure': struct}) # depends on [control=['for'], data=['output']] return all_structures
def str_capitalize(x): """Capitalize the first letter of a string sample. :returns: an expression containing the capitalized strings. Example: >>> import vaex >>> text = ['Something', 'very pretty', 'is coming', 'our', 'way.'] >>> df = vaex.from_arrays(text=text) >>> df # text 0 Something 1 very pretty 2 is coming 3 our 4 way. >>> df.text.str.capitalize() Expression = str_capitalize(text) Length: 5 dtype: str (expression) --------------------------------- 0 Something 1 Very pretty 2 Is coming 3 Our 4 Way. """ sl = _to_string_sequence(x).capitalize() return column.ColumnStringArrow(sl.bytes, sl.indices, sl.length, sl.offset, string_sequence=sl)
def function[str_capitalize, parameter[x]]: constant[Capitalize the first letter of a string sample. :returns: an expression containing the capitalized strings. Example: >>> import vaex >>> text = ['Something', 'very pretty', 'is coming', 'our', 'way.'] >>> df = vaex.from_arrays(text=text) >>> df # text 0 Something 1 very pretty 2 is coming 3 our 4 way. >>> df.text.str.capitalize() Expression = str_capitalize(text) Length: 5 dtype: str (expression) --------------------------------- 0 Something 1 Very pretty 2 Is coming 3 Our 4 Way. ] variable[sl] assign[=] call[call[name[_to_string_sequence], parameter[name[x]]].capitalize, parameter[]] return[call[name[column].ColumnStringArrow, parameter[name[sl].bytes, name[sl].indices, name[sl].length, name[sl].offset]]]
keyword[def] identifier[str_capitalize] ( identifier[x] ): literal[string] identifier[sl] = identifier[_to_string_sequence] ( identifier[x] ). identifier[capitalize] () keyword[return] identifier[column] . identifier[ColumnStringArrow] ( identifier[sl] . identifier[bytes] , identifier[sl] . identifier[indices] , identifier[sl] . identifier[length] , identifier[sl] . identifier[offset] , identifier[string_sequence] = identifier[sl] )
def str_capitalize(x): """Capitalize the first letter of a string sample. :returns: an expression containing the capitalized strings. Example: >>> import vaex >>> text = ['Something', 'very pretty', 'is coming', 'our', 'way.'] >>> df = vaex.from_arrays(text=text) >>> df # text 0 Something 1 very pretty 2 is coming 3 our 4 way. >>> df.text.str.capitalize() Expression = str_capitalize(text) Length: 5 dtype: str (expression) --------------------------------- 0 Something 1 Very pretty 2 Is coming 3 Our 4 Way. """ sl = _to_string_sequence(x).capitalize() return column.ColumnStringArrow(sl.bytes, sl.indices, sl.length, sl.offset, string_sequence=sl)
def control(self, on=[], off=[]): """ This method serves as the primary interaction point to the controls interface. - The 'on' and 'off' arguments can either be a list or a single string. This allows for both individual device control and batch controls. Note: Both the onlist and offlist are optional. If only one item is being managed, it can be passed as a string. Usage: - Turning off all devices: ctrlobj.control(off="all") - Turning on all devices: ctrlobj.control(on="all") - Turning on the light and fan ONLY (for example) ctrlobj.control(on=["light", "fan"]) - Turning on the light and turning off the fan (for example) ctrolobj.control(on="light", off="fan") """ controls = {"light", "valve", "fan", "pump"} def cast_arg(arg): if type(arg) is str: if arg == "all": return controls else: return {arg} & controls else: return set(arg) & controls # User has requested individual controls. for item in cast_arg(on): self.manage(item, "on") for item in cast_arg(off): self.manage(item, "off") sleep(.01) # Force delay to throttle requests return self.update()
def function[control, parameter[self, on, off]]: constant[ This method serves as the primary interaction point to the controls interface. - The 'on' and 'off' arguments can either be a list or a single string. This allows for both individual device control and batch controls. Note: Both the onlist and offlist are optional. If only one item is being managed, it can be passed as a string. Usage: - Turning off all devices: ctrlobj.control(off="all") - Turning on all devices: ctrlobj.control(on="all") - Turning on the light and fan ONLY (for example) ctrlobj.control(on=["light", "fan"]) - Turning on the light and turning off the fan (for example) ctrolobj.control(on="light", off="fan") ] variable[controls] assign[=] <ast.Set object at 0x7da1b0b39570> def function[cast_arg, parameter[arg]]: if compare[call[name[type], parameter[name[arg]]] is name[str]] begin[:] if compare[name[arg] equal[==] constant[all]] begin[:] return[name[controls]] for taget[name[item]] in starred[call[name[cast_arg], parameter[name[on]]]] begin[:] call[name[self].manage, parameter[name[item], constant[on]]] for taget[name[item]] in starred[call[name[cast_arg], parameter[name[off]]]] begin[:] call[name[self].manage, parameter[name[item], constant[off]]] call[name[sleep], parameter[constant[0.01]]] return[call[name[self].update, parameter[]]]
keyword[def] identifier[control] ( identifier[self] , identifier[on] =[], identifier[off] =[]): literal[string] identifier[controls] ={ literal[string] , literal[string] , literal[string] , literal[string] } keyword[def] identifier[cast_arg] ( identifier[arg] ): keyword[if] identifier[type] ( identifier[arg] ) keyword[is] identifier[str] : keyword[if] identifier[arg] == literal[string] : keyword[return] identifier[controls] keyword[else] : keyword[return] { identifier[arg] }& identifier[controls] keyword[else] : keyword[return] identifier[set] ( identifier[arg] )& identifier[controls] keyword[for] identifier[item] keyword[in] identifier[cast_arg] ( identifier[on] ): identifier[self] . identifier[manage] ( identifier[item] , literal[string] ) keyword[for] identifier[item] keyword[in] identifier[cast_arg] ( identifier[off] ): identifier[self] . identifier[manage] ( identifier[item] , literal[string] ) identifier[sleep] ( literal[int] ) keyword[return] identifier[self] . identifier[update] ()
def control(self, on=[], off=[]): """ This method serves as the primary interaction point to the controls interface. - The 'on' and 'off' arguments can either be a list or a single string. This allows for both individual device control and batch controls. Note: Both the onlist and offlist are optional. If only one item is being managed, it can be passed as a string. Usage: - Turning off all devices: ctrlobj.control(off="all") - Turning on all devices: ctrlobj.control(on="all") - Turning on the light and fan ONLY (for example) ctrlobj.control(on=["light", "fan"]) - Turning on the light and turning off the fan (for example) ctrolobj.control(on="light", off="fan") """ controls = {'light', 'valve', 'fan', 'pump'} def cast_arg(arg): if type(arg) is str: if arg == 'all': return controls # depends on [control=['if'], data=[]] else: return {arg} & controls # depends on [control=['if'], data=[]] else: return set(arg) & controls # User has requested individual controls. for item in cast_arg(on): self.manage(item, 'on') # depends on [control=['for'], data=['item']] for item in cast_arg(off): self.manage(item, 'off') # depends on [control=['for'], data=['item']] sleep(0.01) # Force delay to throttle requests return self.update()
def stop(self): """ Method for shutting down the watcher. All config file observers are stopped and their threads joined, along with the worker thread pool. """ self.shutdown.set() for monitor in self.observers: monitor.stop() self.wind_down() for monitor in self.observers: monitor.join() for thread in self.thread_pool.values(): thread.join() self.work_pool.shutdown()
def function[stop, parameter[self]]: constant[ Method for shutting down the watcher. All config file observers are stopped and their threads joined, along with the worker thread pool. ] call[name[self].shutdown.set, parameter[]] for taget[name[monitor]] in starred[name[self].observers] begin[:] call[name[monitor].stop, parameter[]] call[name[self].wind_down, parameter[]] for taget[name[monitor]] in starred[name[self].observers] begin[:] call[name[monitor].join, parameter[]] for taget[name[thread]] in starred[call[name[self].thread_pool.values, parameter[]]] begin[:] call[name[thread].join, parameter[]] call[name[self].work_pool.shutdown, parameter[]]
keyword[def] identifier[stop] ( identifier[self] ): literal[string] identifier[self] . identifier[shutdown] . identifier[set] () keyword[for] identifier[monitor] keyword[in] identifier[self] . identifier[observers] : identifier[monitor] . identifier[stop] () identifier[self] . identifier[wind_down] () keyword[for] identifier[monitor] keyword[in] identifier[self] . identifier[observers] : identifier[monitor] . identifier[join] () keyword[for] identifier[thread] keyword[in] identifier[self] . identifier[thread_pool] . identifier[values] (): identifier[thread] . identifier[join] () identifier[self] . identifier[work_pool] . identifier[shutdown] ()
def stop(self): """ Method for shutting down the watcher. All config file observers are stopped and their threads joined, along with the worker thread pool. """ self.shutdown.set() for monitor in self.observers: monitor.stop() # depends on [control=['for'], data=['monitor']] self.wind_down() for monitor in self.observers: monitor.join() # depends on [control=['for'], data=['monitor']] for thread in self.thread_pool.values(): thread.join() # depends on [control=['for'], data=['thread']] self.work_pool.shutdown()
def sortBy(self, col, *cols): """Sorts the output in each bucket by the given columns on the file system. :param col: a name of a column, or a list of names. :param cols: additional names (optional). If `col` is a list it should be empty. >>> (df.write.format('parquet') # doctest: +SKIP ... .bucketBy(100, 'year', 'month') ... .sortBy('day') ... .mode("overwrite") ... .saveAsTable('sorted_bucketed_table')) """ if isinstance(col, (list, tuple)): if cols: raise ValueError("col is a {0} but cols are not empty".format(type(col))) col, cols = col[0], col[1:] if not all(isinstance(c, basestring) for c in cols) or not(isinstance(col, basestring)): raise TypeError("all names should be `str`") self._jwrite = self._jwrite.sortBy(col, _to_seq(self._spark._sc, cols)) return self
def function[sortBy, parameter[self, col]]: constant[Sorts the output in each bucket by the given columns on the file system. :param col: a name of a column, or a list of names. :param cols: additional names (optional). If `col` is a list it should be empty. >>> (df.write.format('parquet') # doctest: +SKIP ... .bucketBy(100, 'year', 'month') ... .sortBy('day') ... .mode("overwrite") ... .saveAsTable('sorted_bucketed_table')) ] if call[name[isinstance], parameter[name[col], tuple[[<ast.Name object at 0x7da1b20b5930>, <ast.Name object at 0x7da1b20b5180>]]]] begin[:] if name[cols] begin[:] <ast.Raise object at 0x7da1b20b4d60> <ast.Tuple object at 0x7da1b20b5120> assign[=] tuple[[<ast.Subscript object at 0x7da1b20b49a0>, <ast.Subscript object at 0x7da1b20b56c0>]] if <ast.BoolOp object at 0x7da1b20b4f10> begin[:] <ast.Raise object at 0x7da20c992920> name[self]._jwrite assign[=] call[name[self]._jwrite.sortBy, parameter[name[col], call[name[_to_seq], parameter[name[self]._spark._sc, name[cols]]]]] return[name[self]]
keyword[def] identifier[sortBy] ( identifier[self] , identifier[col] ,* identifier[cols] ): literal[string] keyword[if] identifier[isinstance] ( identifier[col] ,( identifier[list] , identifier[tuple] )): keyword[if] identifier[cols] : keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[type] ( identifier[col] ))) identifier[col] , identifier[cols] = identifier[col] [ literal[int] ], identifier[col] [ literal[int] :] keyword[if] keyword[not] identifier[all] ( identifier[isinstance] ( identifier[c] , identifier[basestring] ) keyword[for] identifier[c] keyword[in] identifier[cols] ) keyword[or] keyword[not] ( identifier[isinstance] ( identifier[col] , identifier[basestring] )): keyword[raise] identifier[TypeError] ( literal[string] ) identifier[self] . identifier[_jwrite] = identifier[self] . identifier[_jwrite] . identifier[sortBy] ( identifier[col] , identifier[_to_seq] ( identifier[self] . identifier[_spark] . identifier[_sc] , identifier[cols] )) keyword[return] identifier[self]
def sortBy(self, col, *cols): """Sorts the output in each bucket by the given columns on the file system. :param col: a name of a column, or a list of names. :param cols: additional names (optional). If `col` is a list it should be empty. >>> (df.write.format('parquet') # doctest: +SKIP ... .bucketBy(100, 'year', 'month') ... .sortBy('day') ... .mode("overwrite") ... .saveAsTable('sorted_bucketed_table')) """ if isinstance(col, (list, tuple)): if cols: raise ValueError('col is a {0} but cols are not empty'.format(type(col))) # depends on [control=['if'], data=[]] (col, cols) = (col[0], col[1:]) # depends on [control=['if'], data=[]] if not all((isinstance(c, basestring) for c in cols)) or not isinstance(col, basestring): raise TypeError('all names should be `str`') # depends on [control=['if'], data=[]] self._jwrite = self._jwrite.sortBy(col, _to_seq(self._spark._sc, cols)) return self
def repair_central_directory(zipFile, is_file_instance): # source: https://bitbucket.org/openpyxl/openpyxl/src/93604327bce7aac5e8270674579af76d390e09c0/openpyxl/reader/excel.py?at=default&fileviewer=file-view-default ''' trims trailing data from the central directory code taken from http://stackoverflow.com/a/7457686/570216, courtesy of Uri Cohen ''' f = zipFile if is_file_instance else open(zipFile, 'rb+') data = f.read() pos = data.find(CENTRAL_DIRECTORY_SIGNATURE) # End of central directory signature if (pos > 0): sio = BytesIO(data) sio.seek(pos + 22) # size of 'ZIP end of central directory record' sio.truncate() sio.seek(0) return sio f.seek(0) return f
def function[repair_central_directory, parameter[zipFile, is_file_instance]]: constant[ trims trailing data from the central directory code taken from http://stackoverflow.com/a/7457686/570216, courtesy of Uri Cohen ] variable[f] assign[=] <ast.IfExp object at 0x7da2047e9120> variable[data] assign[=] call[name[f].read, parameter[]] variable[pos] assign[=] call[name[data].find, parameter[name[CENTRAL_DIRECTORY_SIGNATURE]]] if compare[name[pos] greater[>] constant[0]] begin[:] variable[sio] assign[=] call[name[BytesIO], parameter[name[data]]] call[name[sio].seek, parameter[binary_operation[name[pos] + constant[22]]]] call[name[sio].truncate, parameter[]] call[name[sio].seek, parameter[constant[0]]] return[name[sio]] call[name[f].seek, parameter[constant[0]]] return[name[f]]
keyword[def] identifier[repair_central_directory] ( identifier[zipFile] , identifier[is_file_instance] ): literal[string] identifier[f] = identifier[zipFile] keyword[if] identifier[is_file_instance] keyword[else] identifier[open] ( identifier[zipFile] , literal[string] ) identifier[data] = identifier[f] . identifier[read] () identifier[pos] = identifier[data] . identifier[find] ( identifier[CENTRAL_DIRECTORY_SIGNATURE] ) keyword[if] ( identifier[pos] > literal[int] ): identifier[sio] = identifier[BytesIO] ( identifier[data] ) identifier[sio] . identifier[seek] ( identifier[pos] + literal[int] ) identifier[sio] . identifier[truncate] () identifier[sio] . identifier[seek] ( literal[int] ) keyword[return] identifier[sio] identifier[f] . identifier[seek] ( literal[int] ) keyword[return] identifier[f]
def repair_central_directory(zipFile, is_file_instance): # source: https://bitbucket.org/openpyxl/openpyxl/src/93604327bce7aac5e8270674579af76d390e09c0/openpyxl/reader/excel.py?at=default&fileviewer=file-view-default ' trims trailing data from the central directory\n code taken from http://stackoverflow.com/a/7457686/570216, courtesy of Uri Cohen\n ' f = zipFile if is_file_instance else open(zipFile, 'rb+') data = f.read() pos = data.find(CENTRAL_DIRECTORY_SIGNATURE) # End of central directory signature if pos > 0: sio = BytesIO(data) sio.seek(pos + 22) # size of 'ZIP end of central directory record' sio.truncate() sio.seek(0) return sio # depends on [control=['if'], data=['pos']] f.seek(0) return f
def notify_slaves(self): """Checks to see if slaves should be notified, and notifies them if needed""" if self.disable_slave_notify is not None: LOGGER.debug('Slave notifications disabled') return False if self.zone_data()['kind'] == 'Master': response_code = self._put('/zones/' + self.domain + '/notify').status_code if response_code == 200: LOGGER.debug('Slave(s) notified') return True LOGGER.debug('Slave notification failed with code %i', response_code) else: LOGGER.debug('Zone type should be \'Master\' for slave notifications') return False
def function[notify_slaves, parameter[self]]: constant[Checks to see if slaves should be notified, and notifies them if needed] if compare[name[self].disable_slave_notify is_not constant[None]] begin[:] call[name[LOGGER].debug, parameter[constant[Slave notifications disabled]]] return[constant[False]] if compare[call[call[name[self].zone_data, parameter[]]][constant[kind]] equal[==] constant[Master]] begin[:] variable[response_code] assign[=] call[name[self]._put, parameter[binary_operation[binary_operation[constant[/zones/] + name[self].domain] + constant[/notify]]]].status_code if compare[name[response_code] equal[==] constant[200]] begin[:] call[name[LOGGER].debug, parameter[constant[Slave(s) notified]]] return[constant[True]] call[name[LOGGER].debug, parameter[constant[Slave notification failed with code %i], name[response_code]]] return[constant[False]]
keyword[def] identifier[notify_slaves] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[disable_slave_notify] keyword[is] keyword[not] keyword[None] : identifier[LOGGER] . identifier[debug] ( literal[string] ) keyword[return] keyword[False] keyword[if] identifier[self] . identifier[zone_data] ()[ literal[string] ]== literal[string] : identifier[response_code] = identifier[self] . identifier[_put] ( literal[string] + identifier[self] . identifier[domain] + literal[string] ). identifier[status_code] keyword[if] identifier[response_code] == literal[int] : identifier[LOGGER] . identifier[debug] ( literal[string] ) keyword[return] keyword[True] identifier[LOGGER] . identifier[debug] ( literal[string] , identifier[response_code] ) keyword[else] : identifier[LOGGER] . identifier[debug] ( literal[string] ) keyword[return] keyword[False]
def notify_slaves(self): """Checks to see if slaves should be notified, and notifies them if needed""" if self.disable_slave_notify is not None: LOGGER.debug('Slave notifications disabled') return False # depends on [control=['if'], data=[]] if self.zone_data()['kind'] == 'Master': response_code = self._put('/zones/' + self.domain + '/notify').status_code if response_code == 200: LOGGER.debug('Slave(s) notified') return True # depends on [control=['if'], data=[]] LOGGER.debug('Slave notification failed with code %i', response_code) # depends on [control=['if'], data=[]] else: LOGGER.debug("Zone type should be 'Master' for slave notifications") return False
def show_tricky_tasks(self, verbose=0): """ Print list of tricky tasks i.e. tasks that have been restarted or launched more than once or tasks with corrections. Args: verbose: Verbosity level. If > 0, task history and corrections (if any) are printed. """ nids, tasks = [], [] for task in self.iflat_tasks(): if task.num_launches > 1 or any(n > 0 for n in (task.num_restarts, task.num_corrections)): nids.append(task.node_id) tasks.append(task) if not nids: cprint("Everything's fine, no tricky tasks found", color="green") else: self.show_status(nids=nids) if not verbose: print("Use --verbose to print task history.") return for nid, task in zip(nids, tasks): cprint(repr(task), **task.status.color_opts) self.show_history(nids=[nid], full_history=False, metadata=False) #if task.num_restarts: # self.show_restarts(nids=[nid]) if task.num_corrections: self.show_corrections(nids=[nid])
def function[show_tricky_tasks, parameter[self, verbose]]: constant[ Print list of tricky tasks i.e. tasks that have been restarted or launched more than once or tasks with corrections. Args: verbose: Verbosity level. If > 0, task history and corrections (if any) are printed. ] <ast.Tuple object at 0x7da18ede7280> assign[=] tuple[[<ast.List object at 0x7da18ede4c10>, <ast.List object at 0x7da18ede6260>]] for taget[name[task]] in starred[call[name[self].iflat_tasks, parameter[]]] begin[:] if <ast.BoolOp object at 0x7da18ede4460> begin[:] call[name[nids].append, parameter[name[task].node_id]] call[name[tasks].append, parameter[name[task]]] if <ast.UnaryOp object at 0x7da2043456c0> begin[:] call[name[cprint], parameter[constant[Everything's fine, no tricky tasks found]]]
keyword[def] identifier[show_tricky_tasks] ( identifier[self] , identifier[verbose] = literal[int] ): literal[string] identifier[nids] , identifier[tasks] =[],[] keyword[for] identifier[task] keyword[in] identifier[self] . identifier[iflat_tasks] (): keyword[if] identifier[task] . identifier[num_launches] > literal[int] keyword[or] identifier[any] ( identifier[n] > literal[int] keyword[for] identifier[n] keyword[in] ( identifier[task] . identifier[num_restarts] , identifier[task] . identifier[num_corrections] )): identifier[nids] . identifier[append] ( identifier[task] . identifier[node_id] ) identifier[tasks] . identifier[append] ( identifier[task] ) keyword[if] keyword[not] identifier[nids] : identifier[cprint] ( literal[string] , identifier[color] = literal[string] ) keyword[else] : identifier[self] . identifier[show_status] ( identifier[nids] = identifier[nids] ) keyword[if] keyword[not] identifier[verbose] : identifier[print] ( literal[string] ) keyword[return] keyword[for] identifier[nid] , identifier[task] keyword[in] identifier[zip] ( identifier[nids] , identifier[tasks] ): identifier[cprint] ( identifier[repr] ( identifier[task] ),** identifier[task] . identifier[status] . identifier[color_opts] ) identifier[self] . identifier[show_history] ( identifier[nids] =[ identifier[nid] ], identifier[full_history] = keyword[False] , identifier[metadata] = keyword[False] ) keyword[if] identifier[task] . identifier[num_corrections] : identifier[self] . identifier[show_corrections] ( identifier[nids] =[ identifier[nid] ])
def show_tricky_tasks(self, verbose=0): """ Print list of tricky tasks i.e. tasks that have been restarted or launched more than once or tasks with corrections. Args: verbose: Verbosity level. If > 0, task history and corrections (if any) are printed. """ (nids, tasks) = ([], []) for task in self.iflat_tasks(): if task.num_launches > 1 or any((n > 0 for n in (task.num_restarts, task.num_corrections))): nids.append(task.node_id) tasks.append(task) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['task']] if not nids: cprint("Everything's fine, no tricky tasks found", color='green') # depends on [control=['if'], data=[]] else: self.show_status(nids=nids) if not verbose: print('Use --verbose to print task history.') return # depends on [control=['if'], data=[]] for (nid, task) in zip(nids, tasks): cprint(repr(task), **task.status.color_opts) self.show_history(nids=[nid], full_history=False, metadata=False) #if task.num_restarts: # self.show_restarts(nids=[nid]) if task.num_corrections: self.show_corrections(nids=[nid]) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
def Run(self, arg): """Does the actual work.""" try: if self.grr_worker.client.FleetspeakEnabled(): raise ValueError("Not supported on Fleetspeak enabled clients.") except AttributeError: pass smart_arg = {str(field): value for field, value in iteritems(arg)} disallowed_fields = [ field for field in smart_arg if field not in UpdateConfiguration.UPDATABLE_FIELDS ] if disallowed_fields: raise ValueError("Received an update request for restricted field(s) %s." % ",".join(disallowed_fields)) if platform.system() != "Windows": # Check config validity before really applying the changes. This isn't # implemented for our Windows clients though, whose configs are stored in # the registry, as opposed to in the filesystem. canary_config = config.CONFIG.CopyConfig() # Prepare a temporary file we'll write changes to. with tempfiles.CreateGRRTempFile(mode="w+") as temp_fd: temp_filename = temp_fd.name # Write canary_config changes to temp_filename. canary_config.SetWriteBack(temp_filename) self._UpdateConfig(smart_arg, canary_config) try: # Assert temp_filename is usable by loading it. canary_config.SetWriteBack(temp_filename) # Wide exception handling passed here from config_lib.py... except Exception: # pylint: disable=broad-except logging.warning("Updated config file %s is not usable.", temp_filename) raise # If temp_filename works, remove it (if not, it's useful for debugging). os.unlink(temp_filename) # The changes seem to work, so push them to the real config. self._UpdateConfig(smart_arg, config.CONFIG)
def function[Run, parameter[self, arg]]: constant[Does the actual work.] <ast.Try object at 0x7da1b1d93640> variable[smart_arg] assign[=] <ast.DictComp object at 0x7da1b1b6de70> variable[disallowed_fields] assign[=] <ast.ListComp object at 0x7da1b1b6d8a0> if name[disallowed_fields] begin[:] <ast.Raise object at 0x7da1b1b6c160> if compare[call[name[platform].system, parameter[]] not_equal[!=] constant[Windows]] begin[:] variable[canary_config] assign[=] call[name[config].CONFIG.CopyConfig, parameter[]] with call[name[tempfiles].CreateGRRTempFile, parameter[]] begin[:] variable[temp_filename] assign[=] name[temp_fd].name call[name[canary_config].SetWriteBack, parameter[name[temp_filename]]] call[name[self]._UpdateConfig, parameter[name[smart_arg], name[canary_config]]] <ast.Try object at 0x7da1b1cc0460> call[name[os].unlink, parameter[name[temp_filename]]] call[name[self]._UpdateConfig, parameter[name[smart_arg], name[config].CONFIG]]
keyword[def] identifier[Run] ( identifier[self] , identifier[arg] ): literal[string] keyword[try] : keyword[if] identifier[self] . identifier[grr_worker] . identifier[client] . identifier[FleetspeakEnabled] (): keyword[raise] identifier[ValueError] ( literal[string] ) keyword[except] identifier[AttributeError] : keyword[pass] identifier[smart_arg] ={ identifier[str] ( identifier[field] ): identifier[value] keyword[for] identifier[field] , identifier[value] keyword[in] identifier[iteritems] ( identifier[arg] )} identifier[disallowed_fields] =[ identifier[field] keyword[for] identifier[field] keyword[in] identifier[smart_arg] keyword[if] identifier[field] keyword[not] keyword[in] identifier[UpdateConfiguration] . identifier[UPDATABLE_FIELDS] ] keyword[if] identifier[disallowed_fields] : keyword[raise] identifier[ValueError] ( literal[string] % literal[string] . identifier[join] ( identifier[disallowed_fields] )) keyword[if] identifier[platform] . identifier[system] ()!= literal[string] : identifier[canary_config] = identifier[config] . identifier[CONFIG] . identifier[CopyConfig] () keyword[with] identifier[tempfiles] . identifier[CreateGRRTempFile] ( identifier[mode] = literal[string] ) keyword[as] identifier[temp_fd] : identifier[temp_filename] = identifier[temp_fd] . identifier[name] identifier[canary_config] . identifier[SetWriteBack] ( identifier[temp_filename] ) identifier[self] . identifier[_UpdateConfig] ( identifier[smart_arg] , identifier[canary_config] ) keyword[try] : identifier[canary_config] . identifier[SetWriteBack] ( identifier[temp_filename] ) keyword[except] identifier[Exception] : identifier[logging] . identifier[warning] ( literal[string] , identifier[temp_filename] ) keyword[raise] identifier[os] . identifier[unlink] ( identifier[temp_filename] ) identifier[self] . identifier[_UpdateConfig] ( identifier[smart_arg] , identifier[config] . identifier[CONFIG] )
def Run(self, arg): """Does the actual work.""" try: if self.grr_worker.client.FleetspeakEnabled(): raise ValueError('Not supported on Fleetspeak enabled clients.') # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except AttributeError: pass # depends on [control=['except'], data=[]] smart_arg = {str(field): value for (field, value) in iteritems(arg)} disallowed_fields = [field for field in smart_arg if field not in UpdateConfiguration.UPDATABLE_FIELDS] if disallowed_fields: raise ValueError('Received an update request for restricted field(s) %s.' % ','.join(disallowed_fields)) # depends on [control=['if'], data=[]] if platform.system() != 'Windows': # Check config validity before really applying the changes. This isn't # implemented for our Windows clients though, whose configs are stored in # the registry, as opposed to in the filesystem. canary_config = config.CONFIG.CopyConfig() # Prepare a temporary file we'll write changes to. with tempfiles.CreateGRRTempFile(mode='w+') as temp_fd: temp_filename = temp_fd.name # depends on [control=['with'], data=['temp_fd']] # Write canary_config changes to temp_filename. canary_config.SetWriteBack(temp_filename) self._UpdateConfig(smart_arg, canary_config) try: # Assert temp_filename is usable by loading it. canary_config.SetWriteBack(temp_filename) # depends on [control=['try'], data=[]] # Wide exception handling passed here from config_lib.py... except Exception: # pylint: disable=broad-except logging.warning('Updated config file %s is not usable.', temp_filename) raise # depends on [control=['except'], data=[]] # If temp_filename works, remove it (if not, it's useful for debugging). os.unlink(temp_filename) # depends on [control=['if'], data=[]] # The changes seem to work, so push them to the real config. self._UpdateConfig(smart_arg, config.CONFIG)
def draw(self): ''' Draws samples from the `true` distribution. Returns: `np.ndarray` of samples. ''' observed_arr = None for result_tuple in self.__feature_generator.generate(): observed_arr = result_tuple[0] break observed_arr = observed_arr.astype(float) if self.__norm_mode == "z_score": if observed_arr.std() != 0: observed_arr = (observed_arr - observed_arr.mean()) / observed_arr.std() elif self.__norm_mode == "min_max": if (observed_arr.max() - observed_arr.min()) != 0: observed_arr = (observed_arr - observed_arr.min()) / (observed_arr.max() - observed_arr.min()) elif self.__norm_mode == "tanh": observed_arr = np.tanh(observed_arr) return observed_arr
def function[draw, parameter[self]]: constant[ Draws samples from the `true` distribution. Returns: `np.ndarray` of samples. ] variable[observed_arr] assign[=] constant[None] for taget[name[result_tuple]] in starred[call[name[self].__feature_generator.generate, parameter[]]] begin[:] variable[observed_arr] assign[=] call[name[result_tuple]][constant[0]] break variable[observed_arr] assign[=] call[name[observed_arr].astype, parameter[name[float]]] if compare[name[self].__norm_mode equal[==] constant[z_score]] begin[:] if compare[call[name[observed_arr].std, parameter[]] not_equal[!=] constant[0]] begin[:] variable[observed_arr] assign[=] binary_operation[binary_operation[name[observed_arr] - call[name[observed_arr].mean, parameter[]]] / call[name[observed_arr].std, parameter[]]] return[name[observed_arr]]
keyword[def] identifier[draw] ( identifier[self] ): literal[string] identifier[observed_arr] = keyword[None] keyword[for] identifier[result_tuple] keyword[in] identifier[self] . identifier[__feature_generator] . identifier[generate] (): identifier[observed_arr] = identifier[result_tuple] [ literal[int] ] keyword[break] identifier[observed_arr] = identifier[observed_arr] . identifier[astype] ( identifier[float] ) keyword[if] identifier[self] . identifier[__norm_mode] == literal[string] : keyword[if] identifier[observed_arr] . identifier[std] ()!= literal[int] : identifier[observed_arr] =( identifier[observed_arr] - identifier[observed_arr] . identifier[mean] ())/ identifier[observed_arr] . identifier[std] () keyword[elif] identifier[self] . identifier[__norm_mode] == literal[string] : keyword[if] ( identifier[observed_arr] . identifier[max] ()- identifier[observed_arr] . identifier[min] ())!= literal[int] : identifier[observed_arr] =( identifier[observed_arr] - identifier[observed_arr] . identifier[min] ())/( identifier[observed_arr] . identifier[max] ()- identifier[observed_arr] . identifier[min] ()) keyword[elif] identifier[self] . identifier[__norm_mode] == literal[string] : identifier[observed_arr] = identifier[np] . identifier[tanh] ( identifier[observed_arr] ) keyword[return] identifier[observed_arr]
def draw(self): """ Draws samples from the `true` distribution. Returns: `np.ndarray` of samples. """ observed_arr = None for result_tuple in self.__feature_generator.generate(): observed_arr = result_tuple[0] break # depends on [control=['for'], data=['result_tuple']] observed_arr = observed_arr.astype(float) if self.__norm_mode == 'z_score': if observed_arr.std() != 0: observed_arr = (observed_arr - observed_arr.mean()) / observed_arr.std() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif self.__norm_mode == 'min_max': if observed_arr.max() - observed_arr.min() != 0: observed_arr = (observed_arr - observed_arr.min()) / (observed_arr.max() - observed_arr.min()) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif self.__norm_mode == 'tanh': observed_arr = np.tanh(observed_arr) # depends on [control=['if'], data=[]] return observed_arr
def _setupHttp(self): """ Setup an HTTP session authorized by OAuth2. """ if self._http == None: http = httplib2.Http() self._http = self._credentials.authorize(http)
def function[_setupHttp, parameter[self]]: constant[ Setup an HTTP session authorized by OAuth2. ] if compare[name[self]._http equal[==] constant[None]] begin[:] variable[http] assign[=] call[name[httplib2].Http, parameter[]] name[self]._http assign[=] call[name[self]._credentials.authorize, parameter[name[http]]]
keyword[def] identifier[_setupHttp] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[_http] == keyword[None] : identifier[http] = identifier[httplib2] . identifier[Http] () identifier[self] . identifier[_http] = identifier[self] . identifier[_credentials] . identifier[authorize] ( identifier[http] )
def _setupHttp(self): """ Setup an HTTP session authorized by OAuth2. """ if self._http == None: http = httplib2.Http() self._http = self._credentials.authorize(http) # depends on [control=['if'], data=[]]
async def _read_messages(self): """Process messages received on the WebSocket connection. Iteration terminates when the client is stopped or it disconnects. """ while not self._stopped and self._websocket is not None: async for message in self._websocket: if message.type == aiohttp.WSMsgType.TEXT: payload = message.json() event = payload.pop("type", "Unknown") self._dispatch_event(event, data=payload) elif message.type == aiohttp.WSMsgType.ERROR: break
<ast.AsyncFunctionDef object at 0x7da1b1b36ec0>
keyword[async] keyword[def] identifier[_read_messages] ( identifier[self] ): literal[string] keyword[while] keyword[not] identifier[self] . identifier[_stopped] keyword[and] identifier[self] . identifier[_websocket] keyword[is] keyword[not] keyword[None] : keyword[async] keyword[for] identifier[message] keyword[in] identifier[self] . identifier[_websocket] : keyword[if] identifier[message] . identifier[type] == identifier[aiohttp] . identifier[WSMsgType] . identifier[TEXT] : identifier[payload] = identifier[message] . identifier[json] () identifier[event] = identifier[payload] . identifier[pop] ( literal[string] , literal[string] ) identifier[self] . identifier[_dispatch_event] ( identifier[event] , identifier[data] = identifier[payload] ) keyword[elif] identifier[message] . identifier[type] == identifier[aiohttp] . identifier[WSMsgType] . identifier[ERROR] : keyword[break]
async def _read_messages(self): """Process messages received on the WebSocket connection. Iteration terminates when the client is stopped or it disconnects. """ while not self._stopped and self._websocket is not None: async for message in self._websocket: if message.type == aiohttp.WSMsgType.TEXT: payload = message.json() event = payload.pop('type', 'Unknown') self._dispatch_event(event, data=payload) # depends on [control=['if'], data=[]] elif message.type == aiohttp.WSMsgType.ERROR: break # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]]
def init_app(self, app): """Flask application initialization. The initialization will: * Set default values for the configuration variables. * Load translations from paths specified in ``I18N_TRANSLATIONS_PATHS``. * Load translations from ``app.root_path>/translations`` if it exists. * Load translations from a specified entry point. * Add ``toutc`` and ``tousertimezone`` template filters. * Install a custom JSON encoder on app. """ self.init_config(app) # Initialize Flask-BabelEx self.babel.init_app(app) self.babel.localeselector(self.localeselector or get_locale) self.babel.timezoneselector(self.timezoneselector or get_timezone) # 1. Paths listed in I18N_TRANSLATIONS_PATHS for p in app.config.get('I18N_TRANSLATIONS_PATHS', []): self.domain.add_path(p) # 2. <app.root_path>/translations app_translations = os.path.join(app.root_path, 'translations') if os.path.exists(app_translations): self.domain.add_path(app_translations) # 3. Entrypoints if self.entry_point_group: self.domain.add_entrypoint(self.entry_point_group) # Register default routes if URL is set. register_default_routes = app.config['I18N_SET_LANGUAGE_URL'] \ and app.config['I18N_LANGUAGES'] app.register_blueprint( create_blueprint(register_default_routes=register_default_routes), url_prefix=app.config['I18N_SET_LANGUAGE_URL'] ) # Register Jinja2 template filters for date formatting (Flask-Babel # already installs other filters). app.add_template_filter(filter_to_utc, name='toutc') app.add_template_filter(filter_to_user_timezone, name='tousertimezone') app.add_template_filter(filter_language_name, name='language_name') app.add_template_filter( filter_language_name_local, name='language_name_local') app.add_template_global(current_i18n, name='current_i18n') # Lazy string aware JSON encoder. app.json_encoder = get_lazystring_encoder(app) app.extensions['invenio-i18n'] = self
def function[init_app, parameter[self, app]]: constant[Flask application initialization. The initialization will: * Set default values for the configuration variables. * Load translations from paths specified in ``I18N_TRANSLATIONS_PATHS``. * Load translations from ``app.root_path>/translations`` if it exists. * Load translations from a specified entry point. * Add ``toutc`` and ``tousertimezone`` template filters. * Install a custom JSON encoder on app. ] call[name[self].init_config, parameter[name[app]]] call[name[self].babel.init_app, parameter[name[app]]] call[name[self].babel.localeselector, parameter[<ast.BoolOp object at 0x7da1b26ad5d0>]] call[name[self].babel.timezoneselector, parameter[<ast.BoolOp object at 0x7da1b26accd0>]] for taget[name[p]] in starred[call[name[app].config.get, parameter[constant[I18N_TRANSLATIONS_PATHS], list[[]]]]] begin[:] call[name[self].domain.add_path, parameter[name[p]]] variable[app_translations] assign[=] call[name[os].path.join, parameter[name[app].root_path, constant[translations]]] if call[name[os].path.exists, parameter[name[app_translations]]] begin[:] call[name[self].domain.add_path, parameter[name[app_translations]]] if name[self].entry_point_group begin[:] call[name[self].domain.add_entrypoint, parameter[name[self].entry_point_group]] variable[register_default_routes] assign[=] <ast.BoolOp object at 0x7da20c76f040> call[name[app].register_blueprint, parameter[call[name[create_blueprint], parameter[]]]] call[name[app].add_template_filter, parameter[name[filter_to_utc]]] call[name[app].add_template_filter, parameter[name[filter_to_user_timezone]]] call[name[app].add_template_filter, parameter[name[filter_language_name]]] call[name[app].add_template_filter, parameter[name[filter_language_name_local]]] call[name[app].add_template_global, parameter[name[current_i18n]]] name[app].json_encoder assign[=] call[name[get_lazystring_encoder], parameter[name[app]]] call[name[app].extensions][constant[invenio-i18n]] assign[=] name[self]
keyword[def] identifier[init_app] ( identifier[self] , identifier[app] ): literal[string] identifier[self] . identifier[init_config] ( identifier[app] ) identifier[self] . identifier[babel] . identifier[init_app] ( identifier[app] ) identifier[self] . identifier[babel] . identifier[localeselector] ( identifier[self] . identifier[localeselector] keyword[or] identifier[get_locale] ) identifier[self] . identifier[babel] . identifier[timezoneselector] ( identifier[self] . identifier[timezoneselector] keyword[or] identifier[get_timezone] ) keyword[for] identifier[p] keyword[in] identifier[app] . identifier[config] . identifier[get] ( literal[string] ,[]): identifier[self] . identifier[domain] . identifier[add_path] ( identifier[p] ) identifier[app_translations] = identifier[os] . identifier[path] . identifier[join] ( identifier[app] . identifier[root_path] , literal[string] ) keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[app_translations] ): identifier[self] . identifier[domain] . identifier[add_path] ( identifier[app_translations] ) keyword[if] identifier[self] . identifier[entry_point_group] : identifier[self] . identifier[domain] . identifier[add_entrypoint] ( identifier[self] . identifier[entry_point_group] ) identifier[register_default_routes] = identifier[app] . identifier[config] [ literal[string] ] keyword[and] identifier[app] . identifier[config] [ literal[string] ] identifier[app] . identifier[register_blueprint] ( identifier[create_blueprint] ( identifier[register_default_routes] = identifier[register_default_routes] ), identifier[url_prefix] = identifier[app] . identifier[config] [ literal[string] ] ) identifier[app] . identifier[add_template_filter] ( identifier[filter_to_utc] , identifier[name] = literal[string] ) identifier[app] . identifier[add_template_filter] ( identifier[filter_to_user_timezone] , identifier[name] = literal[string] ) identifier[app] . identifier[add_template_filter] ( identifier[filter_language_name] , identifier[name] = literal[string] ) identifier[app] . identifier[add_template_filter] ( identifier[filter_language_name_local] , identifier[name] = literal[string] ) identifier[app] . identifier[add_template_global] ( identifier[current_i18n] , identifier[name] = literal[string] ) identifier[app] . identifier[json_encoder] = identifier[get_lazystring_encoder] ( identifier[app] ) identifier[app] . identifier[extensions] [ literal[string] ]= identifier[self]
def init_app(self, app): """Flask application initialization. The initialization will: * Set default values for the configuration variables. * Load translations from paths specified in ``I18N_TRANSLATIONS_PATHS``. * Load translations from ``app.root_path>/translations`` if it exists. * Load translations from a specified entry point. * Add ``toutc`` and ``tousertimezone`` template filters. * Install a custom JSON encoder on app. """ self.init_config(app) # Initialize Flask-BabelEx self.babel.init_app(app) self.babel.localeselector(self.localeselector or get_locale) self.babel.timezoneselector(self.timezoneselector or get_timezone) # 1. Paths listed in I18N_TRANSLATIONS_PATHS for p in app.config.get('I18N_TRANSLATIONS_PATHS', []): self.domain.add_path(p) # depends on [control=['for'], data=['p']] # 2. <app.root_path>/translations app_translations = os.path.join(app.root_path, 'translations') if os.path.exists(app_translations): self.domain.add_path(app_translations) # depends on [control=['if'], data=[]] # 3. Entrypoints if self.entry_point_group: self.domain.add_entrypoint(self.entry_point_group) # depends on [control=['if'], data=[]] # Register default routes if URL is set. register_default_routes = app.config['I18N_SET_LANGUAGE_URL'] and app.config['I18N_LANGUAGES'] app.register_blueprint(create_blueprint(register_default_routes=register_default_routes), url_prefix=app.config['I18N_SET_LANGUAGE_URL']) # Register Jinja2 template filters for date formatting (Flask-Babel # already installs other filters). app.add_template_filter(filter_to_utc, name='toutc') app.add_template_filter(filter_to_user_timezone, name='tousertimezone') app.add_template_filter(filter_language_name, name='language_name') app.add_template_filter(filter_language_name_local, name='language_name_local') app.add_template_global(current_i18n, name='current_i18n') # Lazy string aware JSON encoder. app.json_encoder = get_lazystring_encoder(app) app.extensions['invenio-i18n'] = self
def verify(token, key, algorithms, verify=True): """Verifies a JWS string's signature. Args: token (str): A signed JWS to be verified. key (str or dict): A key to attempt to verify the payload with. Can be individual JWK or JWK set. algorithms (str or list): Valid algorithms that should be used to verify the JWS. Returns: str: The str representation of the payload, assuming the signature is valid. Raises: JWSError: If there is an exception verifying a token. Examples: >>> token = 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJhIjoiYiJ9.jiMyrsmD8AoHWeQgmxZ5yq8z0lXS67_QGs52AzC8Ru8' >>> jws.verify(token, 'secret', algorithms='HS256') """ header, payload, signing_input, signature = _load(token) if verify: _verify_signature(signing_input, header, signature, key, algorithms) return payload
def function[verify, parameter[token, key, algorithms, verify]]: constant[Verifies a JWS string's signature. Args: token (str): A signed JWS to be verified. key (str or dict): A key to attempt to verify the payload with. Can be individual JWK or JWK set. algorithms (str or list): Valid algorithms that should be used to verify the JWS. Returns: str: The str representation of the payload, assuming the signature is valid. Raises: JWSError: If there is an exception verifying a token. Examples: >>> token = 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJhIjoiYiJ9.jiMyrsmD8AoHWeQgmxZ5yq8z0lXS67_QGs52AzC8Ru8' >>> jws.verify(token, 'secret', algorithms='HS256') ] <ast.Tuple object at 0x7da18c4ccc70> assign[=] call[name[_load], parameter[name[token]]] if name[verify] begin[:] call[name[_verify_signature], parameter[name[signing_input], name[header], name[signature], name[key], name[algorithms]]] return[name[payload]]
keyword[def] identifier[verify] ( identifier[token] , identifier[key] , identifier[algorithms] , identifier[verify] = keyword[True] ): literal[string] identifier[header] , identifier[payload] , identifier[signing_input] , identifier[signature] = identifier[_load] ( identifier[token] ) keyword[if] identifier[verify] : identifier[_verify_signature] ( identifier[signing_input] , identifier[header] , identifier[signature] , identifier[key] , identifier[algorithms] ) keyword[return] identifier[payload]
def verify(token, key, algorithms, verify=True): """Verifies a JWS string's signature. Args: token (str): A signed JWS to be verified. key (str or dict): A key to attempt to verify the payload with. Can be individual JWK or JWK set. algorithms (str or list): Valid algorithms that should be used to verify the JWS. Returns: str: The str representation of the payload, assuming the signature is valid. Raises: JWSError: If there is an exception verifying a token. Examples: >>> token = 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJhIjoiYiJ9.jiMyrsmD8AoHWeQgmxZ5yq8z0lXS67_QGs52AzC8Ru8' >>> jws.verify(token, 'secret', algorithms='HS256') """ (header, payload, signing_input, signature) = _load(token) if verify: _verify_signature(signing_input, header, signature, key, algorithms) # depends on [control=['if'], data=[]] return payload
def parse_request_uri_response(self, uri, state=None, scope=None): """Parse the response URI fragment. If the resource owner grants the access request, the authorization server issues an access token and delivers it to the client by adding the following parameters to the fragment component of the redirection URI using the "application/x-www-form-urlencoded" format: :param uri: The callback URI that resulted from the user being redirected back from the provider to you, the client. :param state: The state provided in the authorization request. :param scope: The scopes provided in the authorization request. :return: Dictionary of token parameters. :raises: OAuth2Error if response is invalid. A successful response should always contain **access_token** The access token issued by the authorization server. Often a random string. **token_type** The type of the token issued as described in `Section 7.1`_. Commonly ``Bearer``. **state** If you provided the state parameter in the authorization phase, then the provider is required to include that exact state value in the response. While it is not mandated it is recommended that the provider include **expires_in** The lifetime in seconds of the access token. For example, the value "3600" denotes that the access token will expire in one hour from the time the response was generated. If omitted, the authorization server SHOULD provide the expiration time via other means or document the default value. **scope** Providers may supply this in all responses but are required to only if it has changed since the authorization request. A few example responses can be seen below:: >>> response_uri = 'https://example.com/callback#access_token=sdlfkj452&state=ss345asyht&token_type=Bearer&scope=hello+world' >>> from oauthlib.oauth2 import MobileApplicationClient >>> client = MobileApplicationClient('your_id') >>> client.parse_request_uri_response(response_uri) { 'access_token': 'sdlfkj452', 'token_type': 'Bearer', 'state': 'ss345asyht', 'scope': [u'hello', u'world'] } >>> client.parse_request_uri_response(response_uri, state='other') Traceback (most recent call last): File "<stdin>", line 1, in <module> File "oauthlib/oauth2/rfc6749/__init__.py", line 598, in parse_request_uri_response **scope** File "oauthlib/oauth2/rfc6749/parameters.py", line 197, in parse_implicit_response raise ValueError("Mismatching or missing state in params.") ValueError: Mismatching or missing state in params. >>> def alert_scope_changed(message, old, new): ... print(message, old, new) ... >>> oauthlib.signals.scope_changed.connect(alert_scope_changed) >>> client.parse_request_body_response(response_body, scope=['other']) ('Scope has changed from "other" to "hello world".', ['other'], ['hello', 'world']) .. _`Section 7.1`: https://tools.ietf.org/html/rfc6749#section-7.1 .. _`Section 3.3`: https://tools.ietf.org/html/rfc6749#section-3.3 """ self.token = parse_implicit_response(uri, state=state, scope=scope) self.populate_token_attributes(self.token) return self.token
def function[parse_request_uri_response, parameter[self, uri, state, scope]]: constant[Parse the response URI fragment. If the resource owner grants the access request, the authorization server issues an access token and delivers it to the client by adding the following parameters to the fragment component of the redirection URI using the "application/x-www-form-urlencoded" format: :param uri: The callback URI that resulted from the user being redirected back from the provider to you, the client. :param state: The state provided in the authorization request. :param scope: The scopes provided in the authorization request. :return: Dictionary of token parameters. :raises: OAuth2Error if response is invalid. A successful response should always contain **access_token** The access token issued by the authorization server. Often a random string. **token_type** The type of the token issued as described in `Section 7.1`_. Commonly ``Bearer``. **state** If you provided the state parameter in the authorization phase, then the provider is required to include that exact state value in the response. While it is not mandated it is recommended that the provider include **expires_in** The lifetime in seconds of the access token. For example, the value "3600" denotes that the access token will expire in one hour from the time the response was generated. If omitted, the authorization server SHOULD provide the expiration time via other means or document the default value. **scope** Providers may supply this in all responses but are required to only if it has changed since the authorization request. A few example responses can be seen below:: >>> response_uri = 'https://example.com/callback#access_token=sdlfkj452&state=ss345asyht&token_type=Bearer&scope=hello+world' >>> from oauthlib.oauth2 import MobileApplicationClient >>> client = MobileApplicationClient('your_id') >>> client.parse_request_uri_response(response_uri) { 'access_token': 'sdlfkj452', 'token_type': 'Bearer', 'state': 'ss345asyht', 'scope': [u'hello', u'world'] } >>> client.parse_request_uri_response(response_uri, state='other') Traceback (most recent call last): File "<stdin>", line 1, in <module> File "oauthlib/oauth2/rfc6749/__init__.py", line 598, in parse_request_uri_response **scope** File "oauthlib/oauth2/rfc6749/parameters.py", line 197, in parse_implicit_response raise ValueError("Mismatching or missing state in params.") ValueError: Mismatching or missing state in params. >>> def alert_scope_changed(message, old, new): ... print(message, old, new) ... >>> oauthlib.signals.scope_changed.connect(alert_scope_changed) >>> client.parse_request_body_response(response_body, scope=['other']) ('Scope has changed from "other" to "hello world".', ['other'], ['hello', 'world']) .. _`Section 7.1`: https://tools.ietf.org/html/rfc6749#section-7.1 .. _`Section 3.3`: https://tools.ietf.org/html/rfc6749#section-3.3 ] name[self].token assign[=] call[name[parse_implicit_response], parameter[name[uri]]] call[name[self].populate_token_attributes, parameter[name[self].token]] return[name[self].token]
keyword[def] identifier[parse_request_uri_response] ( identifier[self] , identifier[uri] , identifier[state] = keyword[None] , identifier[scope] = keyword[None] ): literal[string] identifier[self] . identifier[token] = identifier[parse_implicit_response] ( identifier[uri] , identifier[state] = identifier[state] , identifier[scope] = identifier[scope] ) identifier[self] . identifier[populate_token_attributes] ( identifier[self] . identifier[token] ) keyword[return] identifier[self] . identifier[token]
def parse_request_uri_response(self, uri, state=None, scope=None): """Parse the response URI fragment. If the resource owner grants the access request, the authorization server issues an access token and delivers it to the client by adding the following parameters to the fragment component of the redirection URI using the "application/x-www-form-urlencoded" format: :param uri: The callback URI that resulted from the user being redirected back from the provider to you, the client. :param state: The state provided in the authorization request. :param scope: The scopes provided in the authorization request. :return: Dictionary of token parameters. :raises: OAuth2Error if response is invalid. A successful response should always contain **access_token** The access token issued by the authorization server. Often a random string. **token_type** The type of the token issued as described in `Section 7.1`_. Commonly ``Bearer``. **state** If you provided the state parameter in the authorization phase, then the provider is required to include that exact state value in the response. While it is not mandated it is recommended that the provider include **expires_in** The lifetime in seconds of the access token. For example, the value "3600" denotes that the access token will expire in one hour from the time the response was generated. If omitted, the authorization server SHOULD provide the expiration time via other means or document the default value. **scope** Providers may supply this in all responses but are required to only if it has changed since the authorization request. A few example responses can be seen below:: >>> response_uri = 'https://example.com/callback#access_token=sdlfkj452&state=ss345asyht&token_type=Bearer&scope=hello+world' >>> from oauthlib.oauth2 import MobileApplicationClient >>> client = MobileApplicationClient('your_id') >>> client.parse_request_uri_response(response_uri) { 'access_token': 'sdlfkj452', 'token_type': 'Bearer', 'state': 'ss345asyht', 'scope': [u'hello', u'world'] } >>> client.parse_request_uri_response(response_uri, state='other') Traceback (most recent call last): File "<stdin>", line 1, in <module> File "oauthlib/oauth2/rfc6749/__init__.py", line 598, in parse_request_uri_response **scope** File "oauthlib/oauth2/rfc6749/parameters.py", line 197, in parse_implicit_response raise ValueError("Mismatching or missing state in params.") ValueError: Mismatching or missing state in params. >>> def alert_scope_changed(message, old, new): ... print(message, old, new) ... >>> oauthlib.signals.scope_changed.connect(alert_scope_changed) >>> client.parse_request_body_response(response_body, scope=['other']) ('Scope has changed from "other" to "hello world".', ['other'], ['hello', 'world']) .. _`Section 7.1`: https://tools.ietf.org/html/rfc6749#section-7.1 .. _`Section 3.3`: https://tools.ietf.org/html/rfc6749#section-3.3 """ self.token = parse_implicit_response(uri, state=state, scope=scope) self.populate_token_attributes(self.token) return self.token
def tags(self, val: str) -> None: """ Accessor for record tags (metadata). :param val: record tags """ if not StorageRecord.ok_tags(val): LOGGER.debug('StorageRecord.__init__ <!< Tags %s must map strings to strings', val) raise BadRecord('Tags {} must map strings to strings'.format(val)) self._tags = val or {}
def function[tags, parameter[self, val]]: constant[ Accessor for record tags (metadata). :param val: record tags ] if <ast.UnaryOp object at 0x7da2041dbb50> begin[:] call[name[LOGGER].debug, parameter[constant[StorageRecord.__init__ <!< Tags %s must map strings to strings], name[val]]] <ast.Raise object at 0x7da1b26aff70> name[self]._tags assign[=] <ast.BoolOp object at 0x7da1b26adf30>
keyword[def] identifier[tags] ( identifier[self] , identifier[val] : identifier[str] )-> keyword[None] : literal[string] keyword[if] keyword[not] identifier[StorageRecord] . identifier[ok_tags] ( identifier[val] ): identifier[LOGGER] . identifier[debug] ( literal[string] , identifier[val] ) keyword[raise] identifier[BadRecord] ( literal[string] . identifier[format] ( identifier[val] )) identifier[self] . identifier[_tags] = identifier[val] keyword[or] {}
def tags(self, val: str) -> None: """ Accessor for record tags (metadata). :param val: record tags """ if not StorageRecord.ok_tags(val): LOGGER.debug('StorageRecord.__init__ <!< Tags %s must map strings to strings', val) raise BadRecord('Tags {} must map strings to strings'.format(val)) # depends on [control=['if'], data=[]] self._tags = val or {}
def _iterate(self, url, params, api_entity): """ Args: url: params: api_entity: Return: """ params['resultLimit'] = self.result_limit should_iterate = True result_start = 0 while should_iterate: # params['resultOffset'] = result_offset params['resultStart'] = result_start r = self.tcex.session.get(url, params=params) if not self.success(r): err = r.text or r.reason self.tcex.handle_error(950, [r.status_code, err, r.url]) data = r.json().get('data').get(api_entity) if len(data) < self.result_limit: should_iterate = False result_start += self.result_limit for result in data: yield result
def function[_iterate, parameter[self, url, params, api_entity]]: constant[ Args: url: params: api_entity: Return: ] call[name[params]][constant[resultLimit]] assign[=] name[self].result_limit variable[should_iterate] assign[=] constant[True] variable[result_start] assign[=] constant[0] while name[should_iterate] begin[:] call[name[params]][constant[resultStart]] assign[=] name[result_start] variable[r] assign[=] call[name[self].tcex.session.get, parameter[name[url]]] if <ast.UnaryOp object at 0x7da207f98550> begin[:] variable[err] assign[=] <ast.BoolOp object at 0x7da207f99780> call[name[self].tcex.handle_error, parameter[constant[950], list[[<ast.Attribute object at 0x7da2041d9c90>, <ast.Name object at 0x7da18f810ca0>, <ast.Attribute object at 0x7da18f812b90>]]]] variable[data] assign[=] call[call[call[name[r].json, parameter[]].get, parameter[constant[data]]].get, parameter[name[api_entity]]] if compare[call[name[len], parameter[name[data]]] less[<] name[self].result_limit] begin[:] variable[should_iterate] assign[=] constant[False] <ast.AugAssign object at 0x7da18f8138e0> for taget[name[result]] in starred[name[data]] begin[:] <ast.Yield object at 0x7da18f8139a0>
keyword[def] identifier[_iterate] ( identifier[self] , identifier[url] , identifier[params] , identifier[api_entity] ): literal[string] identifier[params] [ literal[string] ]= identifier[self] . identifier[result_limit] identifier[should_iterate] = keyword[True] identifier[result_start] = literal[int] keyword[while] identifier[should_iterate] : identifier[params] [ literal[string] ]= identifier[result_start] identifier[r] = identifier[self] . identifier[tcex] . identifier[session] . identifier[get] ( identifier[url] , identifier[params] = identifier[params] ) keyword[if] keyword[not] identifier[self] . identifier[success] ( identifier[r] ): identifier[err] = identifier[r] . identifier[text] keyword[or] identifier[r] . identifier[reason] identifier[self] . identifier[tcex] . identifier[handle_error] ( literal[int] ,[ identifier[r] . identifier[status_code] , identifier[err] , identifier[r] . identifier[url] ]) identifier[data] = identifier[r] . identifier[json] (). identifier[get] ( literal[string] ). identifier[get] ( identifier[api_entity] ) keyword[if] identifier[len] ( identifier[data] )< identifier[self] . identifier[result_limit] : identifier[should_iterate] = keyword[False] identifier[result_start] += identifier[self] . identifier[result_limit] keyword[for] identifier[result] keyword[in] identifier[data] : keyword[yield] identifier[result]
def _iterate(self, url, params, api_entity): """ Args: url: params: api_entity: Return: """ params['resultLimit'] = self.result_limit should_iterate = True result_start = 0 while should_iterate: # params['resultOffset'] = result_offset params['resultStart'] = result_start r = self.tcex.session.get(url, params=params) if not self.success(r): err = r.text or r.reason self.tcex.handle_error(950, [r.status_code, err, r.url]) # depends on [control=['if'], data=[]] data = r.json().get('data').get(api_entity) if len(data) < self.result_limit: should_iterate = False # depends on [control=['if'], data=[]] result_start += self.result_limit for result in data: yield result # depends on [control=['for'], data=['result']] # depends on [control=['while'], data=[]]
def iter_user_teams(self, number=-1, etag=None): """Gets the authenticated user's teams across all of organizations. List all of the teams across all of the organizations to which the authenticated user belongs. This method requires user or repo scope when authenticating via OAuth. :returns: generator of :class:`Team <github3.orgs.Team>` objects """ url = self._build_url('user', 'teams') return self._iter(int(number), url, Team, etag=etag)
def function[iter_user_teams, parameter[self, number, etag]]: constant[Gets the authenticated user's teams across all of organizations. List all of the teams across all of the organizations to which the authenticated user belongs. This method requires user or repo scope when authenticating via OAuth. :returns: generator of :class:`Team <github3.orgs.Team>` objects ] variable[url] assign[=] call[name[self]._build_url, parameter[constant[user], constant[teams]]] return[call[name[self]._iter, parameter[call[name[int], parameter[name[number]]], name[url], name[Team]]]]
keyword[def] identifier[iter_user_teams] ( identifier[self] , identifier[number] =- literal[int] , identifier[etag] = keyword[None] ): literal[string] identifier[url] = identifier[self] . identifier[_build_url] ( literal[string] , literal[string] ) keyword[return] identifier[self] . identifier[_iter] ( identifier[int] ( identifier[number] ), identifier[url] , identifier[Team] , identifier[etag] = identifier[etag] )
def iter_user_teams(self, number=-1, etag=None): """Gets the authenticated user's teams across all of organizations. List all of the teams across all of the organizations to which the authenticated user belongs. This method requires user or repo scope when authenticating via OAuth. :returns: generator of :class:`Team <github3.orgs.Team>` objects """ url = self._build_url('user', 'teams') return self._iter(int(number), url, Team, etag=etag)
def add(self, val): """Add `val` to the current value if it's positive. Return without adding if `val` is not positive. :type val: int :param val: Value to add. """ if not isinstance(val, six.integer_types): raise ValueError("CumulativePointLong only supports integer types") if val > 0: super(CumulativePointLong, self).add(val)
def function[add, parameter[self, val]]: constant[Add `val` to the current value if it's positive. Return without adding if `val` is not positive. :type val: int :param val: Value to add. ] if <ast.UnaryOp object at 0x7da20c9900d0> begin[:] <ast.Raise object at 0x7da20c990ac0> if compare[name[val] greater[>] constant[0]] begin[:] call[call[name[super], parameter[name[CumulativePointLong], name[self]]].add, parameter[name[val]]]
keyword[def] identifier[add] ( identifier[self] , identifier[val] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[val] , identifier[six] . identifier[integer_types] ): keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] identifier[val] > literal[int] : identifier[super] ( identifier[CumulativePointLong] , identifier[self] ). identifier[add] ( identifier[val] )
def add(self, val): """Add `val` to the current value if it's positive. Return without adding if `val` is not positive. :type val: int :param val: Value to add. """ if not isinstance(val, six.integer_types): raise ValueError('CumulativePointLong only supports integer types') # depends on [control=['if'], data=[]] if val > 0: super(CumulativePointLong, self).add(val) # depends on [control=['if'], data=['val']]
def _compress_content(self, content): """Gzip a given string.""" zbuf = StringIO() zfile = GzipFile(mode='wb', compresslevel=6, fileobj=zbuf) zfile.write(content.read()) zfile.close() content.file = zbuf return content
def function[_compress_content, parameter[self, content]]: constant[Gzip a given string.] variable[zbuf] assign[=] call[name[StringIO], parameter[]] variable[zfile] assign[=] call[name[GzipFile], parameter[]] call[name[zfile].write, parameter[call[name[content].read, parameter[]]]] call[name[zfile].close, parameter[]] name[content].file assign[=] name[zbuf] return[name[content]]
keyword[def] identifier[_compress_content] ( identifier[self] , identifier[content] ): literal[string] identifier[zbuf] = identifier[StringIO] () identifier[zfile] = identifier[GzipFile] ( identifier[mode] = literal[string] , identifier[compresslevel] = literal[int] , identifier[fileobj] = identifier[zbuf] ) identifier[zfile] . identifier[write] ( identifier[content] . identifier[read] ()) identifier[zfile] . identifier[close] () identifier[content] . identifier[file] = identifier[zbuf] keyword[return] identifier[content]
def _compress_content(self, content): """Gzip a given string.""" zbuf = StringIO() zfile = GzipFile(mode='wb', compresslevel=6, fileobj=zbuf) zfile.write(content.read()) zfile.close() content.file = zbuf return content
def get_config(self, key, default=MISSING): """Get the value of a persistent config key from the registry If no default is specified and the key is not found ArgumentError is raised. Args: key (string): The key name to fetch default (string): an optional value to be returned if key cannot be found Returns: string: the key's value """ keyname = "config:" + key try: return self.kvstore.get(keyname) except KeyError: if default is MISSING: raise ArgumentError("No config value found for key", key=key) return default
def function[get_config, parameter[self, key, default]]: constant[Get the value of a persistent config key from the registry If no default is specified and the key is not found ArgumentError is raised. Args: key (string): The key name to fetch default (string): an optional value to be returned if key cannot be found Returns: string: the key's value ] variable[keyname] assign[=] binary_operation[constant[config:] + name[key]] <ast.Try object at 0x7da20c6c4fa0>
keyword[def] identifier[get_config] ( identifier[self] , identifier[key] , identifier[default] = identifier[MISSING] ): literal[string] identifier[keyname] = literal[string] + identifier[key] keyword[try] : keyword[return] identifier[self] . identifier[kvstore] . identifier[get] ( identifier[keyname] ) keyword[except] identifier[KeyError] : keyword[if] identifier[default] keyword[is] identifier[MISSING] : keyword[raise] identifier[ArgumentError] ( literal[string] , identifier[key] = identifier[key] ) keyword[return] identifier[default]
def get_config(self, key, default=MISSING): """Get the value of a persistent config key from the registry If no default is specified and the key is not found ArgumentError is raised. Args: key (string): The key name to fetch default (string): an optional value to be returned if key cannot be found Returns: string: the key's value """ keyname = 'config:' + key try: return self.kvstore.get(keyname) # depends on [control=['try'], data=[]] except KeyError: if default is MISSING: raise ArgumentError('No config value found for key', key=key) # depends on [control=['if'], data=[]] return default # depends on [control=['except'], data=[]]
def dep_save(self, ): """Save the current department :returns: None :rtype: None :raises: None """ if not self.cur_dep: return ordervalue = self.dep_ordervalue_sb.value() desc = self.dep_desc_pte.toPlainText() self.cur_dep.ordervalue = ordervalue self.cur_dep.description = desc self.cur_dep.save()
def function[dep_save, parameter[self]]: constant[Save the current department :returns: None :rtype: None :raises: None ] if <ast.UnaryOp object at 0x7da1b16d4610> begin[:] return[None] variable[ordervalue] assign[=] call[name[self].dep_ordervalue_sb.value, parameter[]] variable[desc] assign[=] call[name[self].dep_desc_pte.toPlainText, parameter[]] name[self].cur_dep.ordervalue assign[=] name[ordervalue] name[self].cur_dep.description assign[=] name[desc] call[name[self].cur_dep.save, parameter[]]
keyword[def] identifier[dep_save] ( identifier[self] ,): literal[string] keyword[if] keyword[not] identifier[self] . identifier[cur_dep] : keyword[return] identifier[ordervalue] = identifier[self] . identifier[dep_ordervalue_sb] . identifier[value] () identifier[desc] = identifier[self] . identifier[dep_desc_pte] . identifier[toPlainText] () identifier[self] . identifier[cur_dep] . identifier[ordervalue] = identifier[ordervalue] identifier[self] . identifier[cur_dep] . identifier[description] = identifier[desc] identifier[self] . identifier[cur_dep] . identifier[save] ()
def dep_save(self): """Save the current department :returns: None :rtype: None :raises: None """ if not self.cur_dep: return # depends on [control=['if'], data=[]] ordervalue = self.dep_ordervalue_sb.value() desc = self.dep_desc_pte.toPlainText() self.cur_dep.ordervalue = ordervalue self.cur_dep.description = desc self.cur_dep.save()
def renew_service(request, pk): """ renew an existing service :param request object :param pk: the primary key of the service to renew :type pk: int """ default_provider.load_services() service = get_object_or_404(ServicesActivated, pk=pk) service_name = str(service.name) service_object = default_provider.get_service(service_name) lets_auth = getattr(service_object, 'auth') getattr(service_object, 'reset_failed')(pk=pk) return redirect(lets_auth(request))
def function[renew_service, parameter[request, pk]]: constant[ renew an existing service :param request object :param pk: the primary key of the service to renew :type pk: int ] call[name[default_provider].load_services, parameter[]] variable[service] assign[=] call[name[get_object_or_404], parameter[name[ServicesActivated]]] variable[service_name] assign[=] call[name[str], parameter[name[service].name]] variable[service_object] assign[=] call[name[default_provider].get_service, parameter[name[service_name]]] variable[lets_auth] assign[=] call[name[getattr], parameter[name[service_object], constant[auth]]] call[call[name[getattr], parameter[name[service_object], constant[reset_failed]]], parameter[]] return[call[name[redirect], parameter[call[name[lets_auth], parameter[name[request]]]]]]
keyword[def] identifier[renew_service] ( identifier[request] , identifier[pk] ): literal[string] identifier[default_provider] . identifier[load_services] () identifier[service] = identifier[get_object_or_404] ( identifier[ServicesActivated] , identifier[pk] = identifier[pk] ) identifier[service_name] = identifier[str] ( identifier[service] . identifier[name] ) identifier[service_object] = identifier[default_provider] . identifier[get_service] ( identifier[service_name] ) identifier[lets_auth] = identifier[getattr] ( identifier[service_object] , literal[string] ) identifier[getattr] ( identifier[service_object] , literal[string] )( identifier[pk] = identifier[pk] ) keyword[return] identifier[redirect] ( identifier[lets_auth] ( identifier[request] ))
def renew_service(request, pk): """ renew an existing service :param request object :param pk: the primary key of the service to renew :type pk: int """ default_provider.load_services() service = get_object_or_404(ServicesActivated, pk=pk) service_name = str(service.name) service_object = default_provider.get_service(service_name) lets_auth = getattr(service_object, 'auth') getattr(service_object, 'reset_failed')(pk=pk) return redirect(lets_auth(request))
def trigeiro(T,N,factor): """ Data generator for the multi-item lot-sizing problem it uses a simular algorithm for generating the standard benchmarks in: "Capacitated Lot Sizing with Setup Times" by William W. Trigeiro, L. Joseph Thomas, John O. McClain MANAGEMENT SCIENCE Vol. 35, No. 3, March 1989, pp. 353-366 Parameters: - T: number of periods - N: number of products - factor: value for controlling constraining factor of capacity: - 0.75: lightly-constrained instances - 1.10: constrained instances """ P = range(1,N+1) f,g,c,d,h,M = {},{},{},{},{},{} sumT = 0 for t in range(1,T+1): for p in P: # capacity used per unit production: 1, except for # except for specific instances with random value in [0.5, 1.5] # (not tackled in our model) # setup times g[t,p] = 10 * random.randint(1,5) # 10, 50: trigeiro's values # set-up costs f[t,p] = 100 * random.randint(1,10) # checked from Wolsey's instances c[t,p] = 0 # variable costs # demands d[t,p] = 100+random.randint(-25,25) # checked from Wolsey's instances if t <= 4: if random.random() < .25: # trigeiro's parameter d[t,p] = 0 sumT += g[t,p] + d[t,p] # sumT is the total capacity usage in the lot-for-lot solution h[t,p] = random.randint(1,5) # holding costs; checked from Wolsey's instances for t in range(1,T+1): M[t] = int(float(sumT)/float(T)/factor) return P,f,g,c,d,h,M
def function[trigeiro, parameter[T, N, factor]]: constant[ Data generator for the multi-item lot-sizing problem it uses a simular algorithm for generating the standard benchmarks in: "Capacitated Lot Sizing with Setup Times" by William W. Trigeiro, L. Joseph Thomas, John O. McClain MANAGEMENT SCIENCE Vol. 35, No. 3, March 1989, pp. 353-366 Parameters: - T: number of periods - N: number of products - factor: value for controlling constraining factor of capacity: - 0.75: lightly-constrained instances - 1.10: constrained instances ] variable[P] assign[=] call[name[range], parameter[constant[1], binary_operation[name[N] + constant[1]]]] <ast.Tuple object at 0x7da1b18c0070> assign[=] tuple[[<ast.Dict object at 0x7da1b18c0ee0>, <ast.Dict object at 0x7da1b18c0b80>, <ast.Dict object at 0x7da1b18c0af0>, <ast.Dict object at 0x7da1b18c14e0>, <ast.Dict object at 0x7da1b18dd030>, <ast.Dict object at 0x7da1b18de530>]] variable[sumT] assign[=] constant[0] for taget[name[t]] in starred[call[name[range], parameter[constant[1], binary_operation[name[T] + constant[1]]]]] begin[:] for taget[name[p]] in starred[name[P]] begin[:] call[name[g]][tuple[[<ast.Name object at 0x7da1b17f40a0>, <ast.Name object at 0x7da1b17f51b0>]]] assign[=] binary_operation[constant[10] * call[name[random].randint, parameter[constant[1], constant[5]]]] call[name[f]][tuple[[<ast.Name object at 0x7da1b17f5570>, <ast.Name object at 0x7da1b17f7b80>]]] assign[=] binary_operation[constant[100] * call[name[random].randint, parameter[constant[1], constant[10]]]] call[name[c]][tuple[[<ast.Name object at 0x7da1b17f5030>, <ast.Name object at 0x7da1b17f50c0>]]] assign[=] constant[0] call[name[d]][tuple[[<ast.Name object at 0x7da1b17f5ea0>, <ast.Name object at 0x7da1b17f6c50>]]] assign[=] binary_operation[constant[100] + call[name[random].randint, parameter[<ast.UnaryOp object at 0x7da1b17f6fb0>, constant[25]]]] if compare[name[t] less_or_equal[<=] constant[4]] begin[:] if compare[call[name[random].random, parameter[]] less[<] constant[0.25]] begin[:] call[name[d]][tuple[[<ast.Name object at 0x7da1b17f62f0>, <ast.Name object at 0x7da1b17f48e0>]]] assign[=] constant[0] <ast.AugAssign object at 0x7da1b17f7250> call[name[h]][tuple[[<ast.Name object at 0x7da1b17f5ae0>, <ast.Name object at 0x7da1b17f58d0>]]] assign[=] call[name[random].randint, parameter[constant[1], constant[5]]] for taget[name[t]] in starred[call[name[range], parameter[constant[1], binary_operation[name[T] + constant[1]]]]] begin[:] call[name[M]][name[t]] assign[=] call[name[int], parameter[binary_operation[binary_operation[call[name[float], parameter[name[sumT]]] / call[name[float], parameter[name[T]]]] / name[factor]]]] return[tuple[[<ast.Name object at 0x7da1b17f4e20>, <ast.Name object at 0x7da1b17f5f60>, <ast.Name object at 0x7da1b17f69b0>, <ast.Name object at 0x7da1b17f7a90>, <ast.Name object at 0x7da1b1702d10>, <ast.Name object at 0x7da1b1700100>, <ast.Name object at 0x7da1b1701030>]]]
keyword[def] identifier[trigeiro] ( identifier[T] , identifier[N] , identifier[factor] ): literal[string] identifier[P] = identifier[range] ( literal[int] , identifier[N] + literal[int] ) identifier[f] , identifier[g] , identifier[c] , identifier[d] , identifier[h] , identifier[M] ={},{},{},{},{},{} identifier[sumT] = literal[int] keyword[for] identifier[t] keyword[in] identifier[range] ( literal[int] , identifier[T] + literal[int] ): keyword[for] identifier[p] keyword[in] identifier[P] : identifier[g] [ identifier[t] , identifier[p] ]= literal[int] * identifier[random] . identifier[randint] ( literal[int] , literal[int] ) identifier[f] [ identifier[t] , identifier[p] ]= literal[int] * identifier[random] . identifier[randint] ( literal[int] , literal[int] ) identifier[c] [ identifier[t] , identifier[p] ]= literal[int] identifier[d] [ identifier[t] , identifier[p] ]= literal[int] + identifier[random] . identifier[randint] (- literal[int] , literal[int] ) keyword[if] identifier[t] <= literal[int] : keyword[if] identifier[random] . identifier[random] ()< literal[int] : identifier[d] [ identifier[t] , identifier[p] ]= literal[int] identifier[sumT] += identifier[g] [ identifier[t] , identifier[p] ]+ identifier[d] [ identifier[t] , identifier[p] ] identifier[h] [ identifier[t] , identifier[p] ]= identifier[random] . identifier[randint] ( literal[int] , literal[int] ) keyword[for] identifier[t] keyword[in] identifier[range] ( literal[int] , identifier[T] + literal[int] ): identifier[M] [ identifier[t] ]= identifier[int] ( identifier[float] ( identifier[sumT] )/ identifier[float] ( identifier[T] )/ identifier[factor] ) keyword[return] identifier[P] , identifier[f] , identifier[g] , identifier[c] , identifier[d] , identifier[h] , identifier[M]
def trigeiro(T, N, factor): """ Data generator for the multi-item lot-sizing problem it uses a simular algorithm for generating the standard benchmarks in: "Capacitated Lot Sizing with Setup Times" by William W. Trigeiro, L. Joseph Thomas, John O. McClain MANAGEMENT SCIENCE Vol. 35, No. 3, March 1989, pp. 353-366 Parameters: - T: number of periods - N: number of products - factor: value for controlling constraining factor of capacity: - 0.75: lightly-constrained instances - 1.10: constrained instances """ P = range(1, N + 1) (f, g, c, d, h, M) = ({}, {}, {}, {}, {}, {}) sumT = 0 for t in range(1, T + 1): for p in P: # capacity used per unit production: 1, except for # except for specific instances with random value in [0.5, 1.5] # (not tackled in our model) # setup times g[t, p] = 10 * random.randint(1, 5) # 10, 50: trigeiro's values # set-up costs f[t, p] = 100 * random.randint(1, 10) # checked from Wolsey's instances c[t, p] = 0 # variable costs # demands d[t, p] = 100 + random.randint(-25, 25) # checked from Wolsey's instances if t <= 4: if random.random() < 0.25: # trigeiro's parameter d[t, p] = 0 # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['t']] sumT += g[t, p] + d[t, p] # sumT is the total capacity usage in the lot-for-lot solution h[t, p] = random.randint(1, 5) # holding costs; checked from Wolsey's instances # depends on [control=['for'], data=['p']] # depends on [control=['for'], data=['t']] for t in range(1, T + 1): M[t] = int(float(sumT) / float(T) / factor) # depends on [control=['for'], data=['t']] return (P, f, g, c, d, h, M)
def projection_constants(self): """ Returns the (x, y) projection constants for the current projection. :return: x, y tuple projection constants """ return self.far / (self.far - self.near), (self.far * self.near) / (self.near - self.far)
def function[projection_constants, parameter[self]]: constant[ Returns the (x, y) projection constants for the current projection. :return: x, y tuple projection constants ] return[tuple[[<ast.BinOp object at 0x7da18f58d7b0>, <ast.BinOp object at 0x7da2041d9210>]]]
keyword[def] identifier[projection_constants] ( identifier[self] ): literal[string] keyword[return] identifier[self] . identifier[far] /( identifier[self] . identifier[far] - identifier[self] . identifier[near] ),( identifier[self] . identifier[far] * identifier[self] . identifier[near] )/( identifier[self] . identifier[near] - identifier[self] . identifier[far] )
def projection_constants(self): """ Returns the (x, y) projection constants for the current projection. :return: x, y tuple projection constants """ return (self.far / (self.far - self.near), self.far * self.near / (self.near - self.far))
def update_connector_resource(name, server=None, **kwargs): ''' Update a connection resource ''' # You're not supposed to update jndiName, if you do so, it will crash, silently if 'jndiName' in kwargs: del kwargs['jndiName'] return _update_element(name, 'resources/connector-resource', kwargs, server)
def function[update_connector_resource, parameter[name, server]]: constant[ Update a connection resource ] if compare[constant[jndiName] in name[kwargs]] begin[:] <ast.Delete object at 0x7da1b1c341f0> return[call[name[_update_element], parameter[name[name], constant[resources/connector-resource], name[kwargs], name[server]]]]
keyword[def] identifier[update_connector_resource] ( identifier[name] , identifier[server] = keyword[None] ,** identifier[kwargs] ): literal[string] keyword[if] literal[string] keyword[in] identifier[kwargs] : keyword[del] identifier[kwargs] [ literal[string] ] keyword[return] identifier[_update_element] ( identifier[name] , literal[string] , identifier[kwargs] , identifier[server] )
def update_connector_resource(name, server=None, **kwargs): """ Update a connection resource """ # You're not supposed to update jndiName, if you do so, it will crash, silently if 'jndiName' in kwargs: del kwargs['jndiName'] # depends on [control=['if'], data=['kwargs']] return _update_element(name, 'resources/connector-resource', kwargs, server)
def insert(self, parent, position, row=None): """insert(parent, position, row=None) :param parent: A valid :obj:`Gtk.TreeIter`, or :obj:`None` :type parent: :obj:`Gtk.TreeIter` or :obj:`None` :param position: position to insert the new row, or -1 for last :type position: :obj:`int` :param row: a list of values to apply to the newly inserted row or :obj:`None` :type row: [:obj:`object`] or :obj:`None` :returns: a :obj:`Gtk.TreeIter` pointing to the new row :rtype: :obj:`Gtk.TreeIter` Creates a new row at `position`. If parent is not :obj:`None`, then the row will be made a child of `parent`. Otherwise, the row will be created at the toplevel. If `position` is -1 or is larger than the number of rows at that level, then the new row will be inserted to the end of the list. The returned iterator will point to the newly inserted row. The row will be empty after this function is called if `row` is :obj:`None`. To fill in values, you need to call :obj:`Gtk.TreeStore.set`\\() or :obj:`Gtk.TreeStore.set_value`\\(). If `row` isn't :obj:`None` it has to be a list of values which will be used to fill the row. """ return self._do_insert(parent, position, row)
def function[insert, parameter[self, parent, position, row]]: constant[insert(parent, position, row=None) :param parent: A valid :obj:`Gtk.TreeIter`, or :obj:`None` :type parent: :obj:`Gtk.TreeIter` or :obj:`None` :param position: position to insert the new row, or -1 for last :type position: :obj:`int` :param row: a list of values to apply to the newly inserted row or :obj:`None` :type row: [:obj:`object`] or :obj:`None` :returns: a :obj:`Gtk.TreeIter` pointing to the new row :rtype: :obj:`Gtk.TreeIter` Creates a new row at `position`. If parent is not :obj:`None`, then the row will be made a child of `parent`. Otherwise, the row will be created at the toplevel. If `position` is -1 or is larger than the number of rows at that level, then the new row will be inserted to the end of the list. The returned iterator will point to the newly inserted row. The row will be empty after this function is called if `row` is :obj:`None`. To fill in values, you need to call :obj:`Gtk.TreeStore.set`\() or :obj:`Gtk.TreeStore.set_value`\(). If `row` isn't :obj:`None` it has to be a list of values which will be used to fill the row. ] return[call[name[self]._do_insert, parameter[name[parent], name[position], name[row]]]]
keyword[def] identifier[insert] ( identifier[self] , identifier[parent] , identifier[position] , identifier[row] = keyword[None] ): literal[string] keyword[return] identifier[self] . identifier[_do_insert] ( identifier[parent] , identifier[position] , identifier[row] )
def insert(self, parent, position, row=None): """insert(parent, position, row=None) :param parent: A valid :obj:`Gtk.TreeIter`, or :obj:`None` :type parent: :obj:`Gtk.TreeIter` or :obj:`None` :param position: position to insert the new row, or -1 for last :type position: :obj:`int` :param row: a list of values to apply to the newly inserted row or :obj:`None` :type row: [:obj:`object`] or :obj:`None` :returns: a :obj:`Gtk.TreeIter` pointing to the new row :rtype: :obj:`Gtk.TreeIter` Creates a new row at `position`. If parent is not :obj:`None`, then the row will be made a child of `parent`. Otherwise, the row will be created at the toplevel. If `position` is -1 or is larger than the number of rows at that level, then the new row will be inserted to the end of the list. The returned iterator will point to the newly inserted row. The row will be empty after this function is called if `row` is :obj:`None`. To fill in values, you need to call :obj:`Gtk.TreeStore.set`\\() or :obj:`Gtk.TreeStore.set_value`\\(). If `row` isn't :obj:`None` it has to be a list of values which will be used to fill the row. """ return self._do_insert(parent, position, row)
def serv(args): """Serve a rueckenwind application""" if not args.no_debug: tornado.autoreload.start() extra = [] if sys.stdout.isatty(): # set terminal title sys.stdout.write('\x1b]2;rw: {}\x07'.format(' '.join(sys.argv[2:]))) if args.cfg: extra.append(os.path.abspath(args.cfg)) listen = (int(args.port), args.address) ioloop = tornado.ioloop.IOLoop.instance() setup_app(app=args.MODULE, extra_configs=extra, ioloop=ioloop, listen=listen) ioloop.start()
def function[serv, parameter[args]]: constant[Serve a rueckenwind application] if <ast.UnaryOp object at 0x7da18dc05300> begin[:] call[name[tornado].autoreload.start, parameter[]] variable[extra] assign[=] list[[]] if call[name[sys].stdout.isatty, parameter[]] begin[:] call[name[sys].stdout.write, parameter[call[constant[]2;rw: {}].format, parameter[call[constant[ ].join, parameter[call[name[sys].argv][<ast.Slice object at 0x7da18ede7eb0>]]]]]]] if name[args].cfg begin[:] call[name[extra].append, parameter[call[name[os].path.abspath, parameter[name[args].cfg]]]] variable[listen] assign[=] tuple[[<ast.Call object at 0x7da18ede7340>, <ast.Attribute object at 0x7da18ede4ee0>]] variable[ioloop] assign[=] call[name[tornado].ioloop.IOLoop.instance, parameter[]] call[name[setup_app], parameter[]] call[name[ioloop].start, parameter[]]
keyword[def] identifier[serv] ( identifier[args] ): literal[string] keyword[if] keyword[not] identifier[args] . identifier[no_debug] : identifier[tornado] . identifier[autoreload] . identifier[start] () identifier[extra] =[] keyword[if] identifier[sys] . identifier[stdout] . identifier[isatty] (): identifier[sys] . identifier[stdout] . identifier[write] ( literal[string] . identifier[format] ( literal[string] . identifier[join] ( identifier[sys] . identifier[argv] [ literal[int] :]))) keyword[if] identifier[args] . identifier[cfg] : identifier[extra] . identifier[append] ( identifier[os] . identifier[path] . identifier[abspath] ( identifier[args] . identifier[cfg] )) identifier[listen] =( identifier[int] ( identifier[args] . identifier[port] ), identifier[args] . identifier[address] ) identifier[ioloop] = identifier[tornado] . identifier[ioloop] . identifier[IOLoop] . identifier[instance] () identifier[setup_app] ( identifier[app] = identifier[args] . identifier[MODULE] , identifier[extra_configs] = identifier[extra] , identifier[ioloop] = identifier[ioloop] , identifier[listen] = identifier[listen] ) identifier[ioloop] . identifier[start] ()
def serv(args): """Serve a rueckenwind application""" if not args.no_debug: tornado.autoreload.start() # depends on [control=['if'], data=[]] extra = [] if sys.stdout.isatty(): # set terminal title sys.stdout.write('\x1b]2;rw: {}\x07'.format(' '.join(sys.argv[2:]))) # depends on [control=['if'], data=[]] if args.cfg: extra.append(os.path.abspath(args.cfg)) # depends on [control=['if'], data=[]] listen = (int(args.port), args.address) ioloop = tornado.ioloop.IOLoop.instance() setup_app(app=args.MODULE, extra_configs=extra, ioloop=ioloop, listen=listen) ioloop.start()
def query( self ): """ Returns the query this widget is representing from the tree widget. :return <Query> || <QueryCompound> || None """ # build a query if not searching all q = Q() operator = 'and' for i in range(self.uiQueryTREE.topLevelItemCount()): item = self.uiQueryTREE.topLevelItem(i) if ( isinstance(item, XQueryItem) ): if ( operator == 'and' ): q &= item.query() else: q |= item.query() else: operator = nativestring(item.text(0)) return q
def function[query, parameter[self]]: constant[ Returns the query this widget is representing from the tree widget. :return <Query> || <QueryCompound> || None ] variable[q] assign[=] call[name[Q], parameter[]] variable[operator] assign[=] constant[and] for taget[name[i]] in starred[call[name[range], parameter[call[name[self].uiQueryTREE.topLevelItemCount, parameter[]]]]] begin[:] variable[item] assign[=] call[name[self].uiQueryTREE.topLevelItem, parameter[name[i]]] if call[name[isinstance], parameter[name[item], name[XQueryItem]]] begin[:] if compare[name[operator] equal[==] constant[and]] begin[:] <ast.AugAssign object at 0x7da20e9563e0> return[name[q]]
keyword[def] identifier[query] ( identifier[self] ): literal[string] identifier[q] = identifier[Q] () identifier[operator] = literal[string] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[self] . identifier[uiQueryTREE] . identifier[topLevelItemCount] ()): identifier[item] = identifier[self] . identifier[uiQueryTREE] . identifier[topLevelItem] ( identifier[i] ) keyword[if] ( identifier[isinstance] ( identifier[item] , identifier[XQueryItem] )): keyword[if] ( identifier[operator] == literal[string] ): identifier[q] &= identifier[item] . identifier[query] () keyword[else] : identifier[q] |= identifier[item] . identifier[query] () keyword[else] : identifier[operator] = identifier[nativestring] ( identifier[item] . identifier[text] ( literal[int] )) keyword[return] identifier[q]
def query(self): """ Returns the query this widget is representing from the tree widget. :return <Query> || <QueryCompound> || None """ # build a query if not searching all q = Q() operator = 'and' for i in range(self.uiQueryTREE.topLevelItemCount()): item = self.uiQueryTREE.topLevelItem(i) if isinstance(item, XQueryItem): if operator == 'and': q &= item.query() # depends on [control=['if'], data=[]] else: q |= item.query() # depends on [control=['if'], data=[]] else: operator = nativestring(item.text(0)) # depends on [control=['for'], data=['i']] return q
def list_buckets(self): """ List all buckets. Returns a list of all the buckets owned by the authenticated sender of the request. """ details = self._details( method=b"GET", url_context=self._url_context(), ) query = self._query_factory(details) d = self._submit(query) d.addCallback(self._parse_list_buckets) return d
def function[list_buckets, parameter[self]]: constant[ List all buckets. Returns a list of all the buckets owned by the authenticated sender of the request. ] variable[details] assign[=] call[name[self]._details, parameter[]] variable[query] assign[=] call[name[self]._query_factory, parameter[name[details]]] variable[d] assign[=] call[name[self]._submit, parameter[name[query]]] call[name[d].addCallback, parameter[name[self]._parse_list_buckets]] return[name[d]]
keyword[def] identifier[list_buckets] ( identifier[self] ): literal[string] identifier[details] = identifier[self] . identifier[_details] ( identifier[method] = literal[string] , identifier[url_context] = identifier[self] . identifier[_url_context] (), ) identifier[query] = identifier[self] . identifier[_query_factory] ( identifier[details] ) identifier[d] = identifier[self] . identifier[_submit] ( identifier[query] ) identifier[d] . identifier[addCallback] ( identifier[self] . identifier[_parse_list_buckets] ) keyword[return] identifier[d]
def list_buckets(self): """ List all buckets. Returns a list of all the buckets owned by the authenticated sender of the request. """ details = self._details(method=b'GET', url_context=self._url_context()) query = self._query_factory(details) d = self._submit(query) d.addCallback(self._parse_list_buckets) return d
def windowed_r_squared(pos, gn, size=None, start=None, stop=None, step=None, windows=None, fill=np.nan, percentile=50): """Summarise linkage disequilibrium in windows over a single chromosome/contig. Parameters ---------- pos : array_like, int, shape (n_items,) The item positions in ascending order, using 1-based coordinates.. gn : array_like, int8, shape (n_variants, n_samples) Diploid genotypes at biallelic variants, coded as the number of alternate alleles per call (i.e., 0 = hom ref, 1 = het, 2 = hom alt). size : int, optional The window size (number of bases). start : int, optional The position at which to start (1-based). stop : int, optional The position at which to stop (1-based). step : int, optional The distance between start positions of windows. If not given, defaults to the window size, i.e., non-overlapping windows. windows : array_like, int, shape (n_windows, 2), optional Manually specify the windows to use as a sequence of (window_start, window_stop) positions, using 1-based coordinates. Overrides the size/start/stop/step parameters. fill : object, optional The value to use where a window is empty, i.e., contains no items. percentile : int or sequence of ints, optional The percentile or percentiles to calculate within each window. Returns ------- out : ndarray, shape (n_windows,) The value of the statistic for each window. windows : ndarray, int, shape (n_windows, 2) The windows used, as an array of (window_start, window_stop) positions, using 1-based coordinates. counts : ndarray, int, shape (n_windows,) The number of items in each window. Notes ----- Linkage disequilibrium (r**2) is calculated using the method of Rogers and Huff (2008). See Also -------- allel.stats.window.windowed_statistic """ # define the statistic function if isinstance(percentile, (list, tuple)): fill = [fill for _ in percentile] def statistic(gnw): r_squared = rogers_huff_r(gnw) ** 2 return [np.percentile(r_squared, p) for p in percentile] else: def statistic(gnw): r_squared = rogers_huff_r(gnw) ** 2 return np.percentile(r_squared, percentile) return windowed_statistic(pos, gn, statistic, size, start=start, stop=stop, step=step, windows=windows, fill=fill)
def function[windowed_r_squared, parameter[pos, gn, size, start, stop, step, windows, fill, percentile]]: constant[Summarise linkage disequilibrium in windows over a single chromosome/contig. Parameters ---------- pos : array_like, int, shape (n_items,) The item positions in ascending order, using 1-based coordinates.. gn : array_like, int8, shape (n_variants, n_samples) Diploid genotypes at biallelic variants, coded as the number of alternate alleles per call (i.e., 0 = hom ref, 1 = het, 2 = hom alt). size : int, optional The window size (number of bases). start : int, optional The position at which to start (1-based). stop : int, optional The position at which to stop (1-based). step : int, optional The distance between start positions of windows. If not given, defaults to the window size, i.e., non-overlapping windows. windows : array_like, int, shape (n_windows, 2), optional Manually specify the windows to use as a sequence of (window_start, window_stop) positions, using 1-based coordinates. Overrides the size/start/stop/step parameters. fill : object, optional The value to use where a window is empty, i.e., contains no items. percentile : int or sequence of ints, optional The percentile or percentiles to calculate within each window. Returns ------- out : ndarray, shape (n_windows,) The value of the statistic for each window. windows : ndarray, int, shape (n_windows, 2) The windows used, as an array of (window_start, window_stop) positions, using 1-based coordinates. counts : ndarray, int, shape (n_windows,) The number of items in each window. Notes ----- Linkage disequilibrium (r**2) is calculated using the method of Rogers and Huff (2008). See Also -------- allel.stats.window.windowed_statistic ] if call[name[isinstance], parameter[name[percentile], tuple[[<ast.Name object at 0x7da20c795ea0>, <ast.Name object at 0x7da20c794520>]]]] begin[:] variable[fill] assign[=] <ast.ListComp object at 0x7da20c794490> def function[statistic, parameter[gnw]]: variable[r_squared] assign[=] binary_operation[call[name[rogers_huff_r], parameter[name[gnw]]] ** constant[2]] return[<ast.ListComp object at 0x7da2043449d0>] return[call[name[windowed_statistic], parameter[name[pos], name[gn], name[statistic], name[size]]]]
keyword[def] identifier[windowed_r_squared] ( identifier[pos] , identifier[gn] , identifier[size] = keyword[None] , identifier[start] = keyword[None] , identifier[stop] = keyword[None] , identifier[step] = keyword[None] , identifier[windows] = keyword[None] , identifier[fill] = identifier[np] . identifier[nan] , identifier[percentile] = literal[int] ): literal[string] keyword[if] identifier[isinstance] ( identifier[percentile] ,( identifier[list] , identifier[tuple] )): identifier[fill] =[ identifier[fill] keyword[for] identifier[_] keyword[in] identifier[percentile] ] keyword[def] identifier[statistic] ( identifier[gnw] ): identifier[r_squared] = identifier[rogers_huff_r] ( identifier[gnw] )** literal[int] keyword[return] [ identifier[np] . identifier[percentile] ( identifier[r_squared] , identifier[p] ) keyword[for] identifier[p] keyword[in] identifier[percentile] ] keyword[else] : keyword[def] identifier[statistic] ( identifier[gnw] ): identifier[r_squared] = identifier[rogers_huff_r] ( identifier[gnw] )** literal[int] keyword[return] identifier[np] . identifier[percentile] ( identifier[r_squared] , identifier[percentile] ) keyword[return] identifier[windowed_statistic] ( identifier[pos] , identifier[gn] , identifier[statistic] , identifier[size] , identifier[start] = identifier[start] , identifier[stop] = identifier[stop] , identifier[step] = identifier[step] , identifier[windows] = identifier[windows] , identifier[fill] = identifier[fill] )
def windowed_r_squared(pos, gn, size=None, start=None, stop=None, step=None, windows=None, fill=np.nan, percentile=50): """Summarise linkage disequilibrium in windows over a single chromosome/contig. Parameters ---------- pos : array_like, int, shape (n_items,) The item positions in ascending order, using 1-based coordinates.. gn : array_like, int8, shape (n_variants, n_samples) Diploid genotypes at biallelic variants, coded as the number of alternate alleles per call (i.e., 0 = hom ref, 1 = het, 2 = hom alt). size : int, optional The window size (number of bases). start : int, optional The position at which to start (1-based). stop : int, optional The position at which to stop (1-based). step : int, optional The distance between start positions of windows. If not given, defaults to the window size, i.e., non-overlapping windows. windows : array_like, int, shape (n_windows, 2), optional Manually specify the windows to use as a sequence of (window_start, window_stop) positions, using 1-based coordinates. Overrides the size/start/stop/step parameters. fill : object, optional The value to use where a window is empty, i.e., contains no items. percentile : int or sequence of ints, optional The percentile or percentiles to calculate within each window. Returns ------- out : ndarray, shape (n_windows,) The value of the statistic for each window. windows : ndarray, int, shape (n_windows, 2) The windows used, as an array of (window_start, window_stop) positions, using 1-based coordinates. counts : ndarray, int, shape (n_windows,) The number of items in each window. Notes ----- Linkage disequilibrium (r**2) is calculated using the method of Rogers and Huff (2008). See Also -------- allel.stats.window.windowed_statistic """ # define the statistic function if isinstance(percentile, (list, tuple)): fill = [fill for _ in percentile] def statistic(gnw): r_squared = rogers_huff_r(gnw) ** 2 return [np.percentile(r_squared, p) for p in percentile] # depends on [control=['if'], data=[]] else: def statistic(gnw): r_squared = rogers_huff_r(gnw) ** 2 return np.percentile(r_squared, percentile) return windowed_statistic(pos, gn, statistic, size, start=start, stop=stop, step=step, windows=windows, fill=fill)
def _string_remove_slice(input_str, start, end): """ Removes portions of a string :param input_str: str, input string :param start: int, end search index :param end: int, start search index :return: str, the cut string """ if 0 <= start < end <= len(input_str): return input_str[:start] + input_str[end:] return input_str
def function[_string_remove_slice, parameter[input_str, start, end]]: constant[ Removes portions of a string :param input_str: str, input string :param start: int, end search index :param end: int, start search index :return: str, the cut string ] if compare[constant[0] less_or_equal[<=] name[start]] begin[:] return[binary_operation[call[name[input_str]][<ast.Slice object at 0x7da18ede76a0>] + call[name[input_str]][<ast.Slice object at 0x7da18ede77c0>]]] return[name[input_str]]
keyword[def] identifier[_string_remove_slice] ( identifier[input_str] , identifier[start] , identifier[end] ): literal[string] keyword[if] literal[int] <= identifier[start] < identifier[end] <= identifier[len] ( identifier[input_str] ): keyword[return] identifier[input_str] [: identifier[start] ]+ identifier[input_str] [ identifier[end] :] keyword[return] identifier[input_str]
def _string_remove_slice(input_str, start, end): """ Removes portions of a string :param input_str: str, input string :param start: int, end search index :param end: int, start search index :return: str, the cut string """ if 0 <= start < end <= len(input_str): return input_str[:start] + input_str[end:] # depends on [control=['if'], data=['start']] return input_str
def cache_get(cache_dir, cache_key, default=None): """ Returns the content of a cache item or the given default """ filename = os.path.join(cache_dir, cache_key) if os.path.isfile(filename): with open(filename, 'r') as f: return f.read() return default
def function[cache_get, parameter[cache_dir, cache_key, default]]: constant[ Returns the content of a cache item or the given default ] variable[filename] assign[=] call[name[os].path.join, parameter[name[cache_dir], name[cache_key]]] if call[name[os].path.isfile, parameter[name[filename]]] begin[:] with call[name[open], parameter[name[filename], constant[r]]] begin[:] return[call[name[f].read, parameter[]]] return[name[default]]
keyword[def] identifier[cache_get] ( identifier[cache_dir] , identifier[cache_key] , identifier[default] = keyword[None] ): literal[string] identifier[filename] = identifier[os] . identifier[path] . identifier[join] ( identifier[cache_dir] , identifier[cache_key] ) keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[filename] ): keyword[with] identifier[open] ( identifier[filename] , literal[string] ) keyword[as] identifier[f] : keyword[return] identifier[f] . identifier[read] () keyword[return] identifier[default]
def cache_get(cache_dir, cache_key, default=None): """ Returns the content of a cache item or the given default """ filename = os.path.join(cache_dir, cache_key) if os.path.isfile(filename): with open(filename, 'r') as f: return f.read() # depends on [control=['with'], data=['f']] # depends on [control=['if'], data=[]] return default
def set_scanner_alert_threshold(self, scanner_ids, alert_threshold): """Set the alert theshold for the given policies.""" for scanner_id in scanner_ids: self.logger.debug('Setting alert threshold for scanner {0} to {1}'.format(scanner_id, alert_threshold)) result = self.zap.ascan.set_scanner_alert_threshold(scanner_id, alert_threshold) if result != 'OK': raise ZAPError('Error setting alert threshold for scanner with ID {0}: {1}'.format(scanner_id, result))
def function[set_scanner_alert_threshold, parameter[self, scanner_ids, alert_threshold]]: constant[Set the alert theshold for the given policies.] for taget[name[scanner_id]] in starred[name[scanner_ids]] begin[:] call[name[self].logger.debug, parameter[call[constant[Setting alert threshold for scanner {0} to {1}].format, parameter[name[scanner_id], name[alert_threshold]]]]] variable[result] assign[=] call[name[self].zap.ascan.set_scanner_alert_threshold, parameter[name[scanner_id], name[alert_threshold]]] if compare[name[result] not_equal[!=] constant[OK]] begin[:] <ast.Raise object at 0x7da18ede6590>
keyword[def] identifier[set_scanner_alert_threshold] ( identifier[self] , identifier[scanner_ids] , identifier[alert_threshold] ): literal[string] keyword[for] identifier[scanner_id] keyword[in] identifier[scanner_ids] : identifier[self] . identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[scanner_id] , identifier[alert_threshold] )) identifier[result] = identifier[self] . identifier[zap] . identifier[ascan] . identifier[set_scanner_alert_threshold] ( identifier[scanner_id] , identifier[alert_threshold] ) keyword[if] identifier[result] != literal[string] : keyword[raise] identifier[ZAPError] ( literal[string] . identifier[format] ( identifier[scanner_id] , identifier[result] ))
def set_scanner_alert_threshold(self, scanner_ids, alert_threshold): """Set the alert theshold for the given policies.""" for scanner_id in scanner_ids: self.logger.debug('Setting alert threshold for scanner {0} to {1}'.format(scanner_id, alert_threshold)) result = self.zap.ascan.set_scanner_alert_threshold(scanner_id, alert_threshold) if result != 'OK': raise ZAPError('Error setting alert threshold for scanner with ID {0}: {1}'.format(scanner_id, result)) # depends on [control=['if'], data=['result']] # depends on [control=['for'], data=['scanner_id']]
def camel_to_underscore(string): """Convert camelcase to lowercase and underscore. Recipe from http://stackoverflow.com/a/1176023 Args: string (str): The string to convert. Returns: str: The converted string. """ string = FIRST_CAP_RE.sub(r'\1_\2', string) return ALL_CAP_RE.sub(r'\1_\2', string).lower()
def function[camel_to_underscore, parameter[string]]: constant[Convert camelcase to lowercase and underscore. Recipe from http://stackoverflow.com/a/1176023 Args: string (str): The string to convert. Returns: str: The converted string. ] variable[string] assign[=] call[name[FIRST_CAP_RE].sub, parameter[constant[\1_\2], name[string]]] return[call[call[name[ALL_CAP_RE].sub, parameter[constant[\1_\2], name[string]]].lower, parameter[]]]
keyword[def] identifier[camel_to_underscore] ( identifier[string] ): literal[string] identifier[string] = identifier[FIRST_CAP_RE] . identifier[sub] ( literal[string] , identifier[string] ) keyword[return] identifier[ALL_CAP_RE] . identifier[sub] ( literal[string] , identifier[string] ). identifier[lower] ()
def camel_to_underscore(string): """Convert camelcase to lowercase and underscore. Recipe from http://stackoverflow.com/a/1176023 Args: string (str): The string to convert. Returns: str: The converted string. """ string = FIRST_CAP_RE.sub('\\1_\\2', string) return ALL_CAP_RE.sub('\\1_\\2', string).lower()
def initial_distribution_samples(self): r""" Samples of the initial distribution """ res = np.empty((self.nsamples, self.nstates), dtype=config.dtype) for i in range(self.nsamples): res[i, :] = self._sampled_hmms[i].stationary_distribution return res
def function[initial_distribution_samples, parameter[self]]: constant[ Samples of the initial distribution ] variable[res] assign[=] call[name[np].empty, parameter[tuple[[<ast.Attribute object at 0x7da20c6c5390>, <ast.Attribute object at 0x7da20c6c44f0>]]]] for taget[name[i]] in starred[call[name[range], parameter[name[self].nsamples]]] begin[:] call[name[res]][tuple[[<ast.Name object at 0x7da20c6c52d0>, <ast.Slice object at 0x7da20c6c7910>]]] assign[=] call[name[self]._sampled_hmms][name[i]].stationary_distribution return[name[res]]
keyword[def] identifier[initial_distribution_samples] ( identifier[self] ): literal[string] identifier[res] = identifier[np] . identifier[empty] (( identifier[self] . identifier[nsamples] , identifier[self] . identifier[nstates] ), identifier[dtype] = identifier[config] . identifier[dtype] ) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[self] . identifier[nsamples] ): identifier[res] [ identifier[i] ,:]= identifier[self] . identifier[_sampled_hmms] [ identifier[i] ]. identifier[stationary_distribution] keyword[return] identifier[res]
def initial_distribution_samples(self): """ Samples of the initial distribution """ res = np.empty((self.nsamples, self.nstates), dtype=config.dtype) for i in range(self.nsamples): res[i, :] = self._sampled_hmms[i].stationary_distribution # depends on [control=['for'], data=['i']] return res
def timetree(params): """ implementeing treetime tree """ if params.relax is None: relaxed_clock_params = None elif params.relax==[]: relaxed_clock_params=True elif len(params.relax)==2: relaxed_clock_params={'slack':params.relax[0], 'coupling':params.relax[1]} dates = utils.parse_dates(params.dates) if len(dates)==0: print("No valid dates -- exiting.") return 1 if assure_tree(params, tmp_dir='timetree_tmp'): print("No tree -- exiting.") return 1 outdir = get_outdir(params, '_treetime') gtr = create_gtr(params) infer_gtr = params.gtr=='infer' ########################################################################### ### READ IN VCF ########################################################################### #sets ref and fixed_pi to None if not VCF aln, ref, fixed_pi = read_if_vcf(params) is_vcf = True if ref is not None else False branch_length_mode = params.branch_length_mode #variable-site-only trees can have big branch lengths, the auto setting won't work. if is_vcf or (params.aln and params.sequence_length): if branch_length_mode == 'auto': branch_length_mode = 'joint' ########################################################################### ### SET-UP and RUN ########################################################################### if params.aln is None and params.sequence_length is None: print("one of arguments '--aln' and '--sequence-length' is required.", file=sys.stderr) return 1 myTree = TreeTime(dates=dates, tree=params.tree, ref=ref, aln=aln, gtr=gtr, seq_len=params.sequence_length, verbose=params.verbose) myTree.tip_slack=params.tip_slack if not myTree.one_mutation: print("TreeTime setup failed, exiting") return 1 # coalescent model options try: coalescent = float(params.coalescent) if coalescent<10*myTree.one_mutation: coalescent = None except: if params.coalescent in ['opt', 'const', 'skyline']: coalescent = params.coalescent else: print("unknown coalescent model specification, has to be either " "a float, 'opt', 'const' or 'skyline' -- exiting") return 1 # determine whether confidence intervals are to be computed and how the # uncertainty in the rate estimate should be treated calc_confidence = params.confidence if params.clock_std_dev: vary_rate = params.clock_std_dev if calc_confidence else False elif params.confidence and params.covariation: vary_rate = True elif params.confidence: print("\nOutside of covariance aware mode TreeTime cannot estimate confidence intervals " "without specified standard deviation of the clock rate Please specify '--clock-std-dev' " "or rerun with '--covariance'. Will proceed without confidence estimation") vary_rate = False calc_confidence = False else: vary_rate = False # RUN root = None if params.keep_root else params.reroot success = myTree.run(root=root, relaxed_clock=relaxed_clock_params, resolve_polytomies=(not params.keep_polytomies), Tc=coalescent, max_iter=params.max_iter, fixed_clock_rate=params.clock_rate, n_iqd=params.clock_filter, time_marginal="assign" if calc_confidence else False, vary_rate = vary_rate, branch_length_mode = branch_length_mode, fixed_pi=fixed_pi, use_covariation = params.covariation) if success==ttconf.ERROR: # if TreeTime.run failed, exit print("\nTreeTime run FAILED: please check above for errors and/or rerun with --verbose 4.\n") return 1 ########################################################################### ### OUTPUT and saving of results ########################################################################### if infer_gtr: print('\nInferred GTR model:') print(myTree.gtr) print(myTree.date2dist) basename = get_basename(params, outdir) if coalescent in ['skyline', 'opt', 'const']: print("Inferred coalescent model") if coalescent=='skyline': print_save_plot_skyline(myTree, plot=basename+'skyline.pdf', save=basename+'skyline.tsv', screen=True) else: Tc = myTree.merger_model.Tc.y[0] print(" --T_c: \t %1.2e \toptimized inverse merger rate in units of substitutions"%Tc) print(" --T_c: \t %1.2e \toptimized inverse merger rate in years"%(Tc/myTree.date2dist.clock_rate)) print(" --N_e: \t %1.2e \tcorresponding 'effective population size' assuming 50 gen/year\n"%(Tc/myTree.date2dist.clock_rate*50)) # plot import matplotlib.pyplot as plt from .treetime import plot_vs_years leaf_count = myTree.tree.count_terminals() label_func = lambda x: (x.name if x.is_terminal() and ((leaf_count<30 and (not params.no_tip_labels)) or params.tip_labels) else '') plot_vs_years(myTree, show_confidence=False, label_func=label_func, confidence=0.9 if params.confidence else None) tree_fname = (outdir + params.plot_tree) plt.savefig(tree_fname) print("--- saved tree as \n\t %s\n"%tree_fname) plot_rtt(myTree, outdir + params.plot_rtt) if params.relax: fname = outdir+'substitution_rates.tsv' print("--- wrote branch specific rates to\n\t %s\n"%fname) with open(fname, 'w') as fh: fh.write("#node\tclock_length\tmutation_length\trate\tfold_change\n") for n in myTree.tree.find_clades(order="preorder"): if n==myTree.tree.root: continue g = n.branch_length_interpolator.gamma fh.write("%s\t%1.3e\t%1.3e\t%1.3e\t%1.2f\n"%(n.name, n.clock_length, n.mutation_length, myTree.date2dist.clock_rate*g, g)) export_sequences_and_tree(myTree, basename, is_vcf, params.zero_based, timetree=True, confidence=calc_confidence) return 0
def function[timetree, parameter[params]]: constant[ implementeing treetime tree ] if compare[name[params].relax is constant[None]] begin[:] variable[relaxed_clock_params] assign[=] constant[None] variable[dates] assign[=] call[name[utils].parse_dates, parameter[name[params].dates]] if compare[call[name[len], parameter[name[dates]]] equal[==] constant[0]] begin[:] call[name[print], parameter[constant[No valid dates -- exiting.]]] return[constant[1]] if call[name[assure_tree], parameter[name[params]]] begin[:] call[name[print], parameter[constant[No tree -- exiting.]]] return[constant[1]] variable[outdir] assign[=] call[name[get_outdir], parameter[name[params], constant[_treetime]]] variable[gtr] assign[=] call[name[create_gtr], parameter[name[params]]] variable[infer_gtr] assign[=] compare[name[params].gtr equal[==] constant[infer]] <ast.Tuple object at 0x7da1b01d91b0> assign[=] call[name[read_if_vcf], parameter[name[params]]] variable[is_vcf] assign[=] <ast.IfExp object at 0x7da1b01d9930> variable[branch_length_mode] assign[=] name[params].branch_length_mode if <ast.BoolOp object at 0x7da1b01d8fa0> begin[:] if compare[name[branch_length_mode] equal[==] constant[auto]] begin[:] variable[branch_length_mode] assign[=] constant[joint] if <ast.BoolOp object at 0x7da1b01d9630> begin[:] call[name[print], parameter[constant[one of arguments '--aln' and '--sequence-length' is required.]]] return[constant[1]] variable[myTree] assign[=] call[name[TreeTime], parameter[]] name[myTree].tip_slack assign[=] name[params].tip_slack if <ast.UnaryOp object at 0x7da1b01da170> begin[:] call[name[print], parameter[constant[TreeTime setup failed, exiting]]] return[constant[1]] <ast.Try object at 0x7da1b01da650> variable[calc_confidence] assign[=] name[params].confidence if name[params].clock_std_dev begin[:] variable[vary_rate] assign[=] <ast.IfExp object at 0x7da1b01dac80> variable[root] assign[=] <ast.IfExp object at 0x7da1b01db970> variable[success] assign[=] call[name[myTree].run, parameter[]] if compare[name[success] equal[==] name[ttconf].ERROR] begin[:] call[name[print], parameter[constant[ TreeTime run FAILED: please check above for errors and/or rerun with --verbose 4. ]]] return[constant[1]] if name[infer_gtr] begin[:] call[name[print], parameter[constant[ Inferred GTR model:]]] call[name[print], parameter[name[myTree].gtr]] call[name[print], parameter[name[myTree].date2dist]] variable[basename] assign[=] call[name[get_basename], parameter[name[params], name[outdir]]] if compare[name[coalescent] in list[[<ast.Constant object at 0x7da1b0137af0>, <ast.Constant object at 0x7da1b0137ac0>, <ast.Constant object at 0x7da1b0137a90>]]] begin[:] call[name[print], parameter[constant[Inferred coalescent model]]] if compare[name[coalescent] equal[==] constant[skyline]] begin[:] call[name[print_save_plot_skyline], parameter[name[myTree]]] import module[matplotlib.pyplot] as alias[plt] from relative_module[treetime] import module[plot_vs_years] variable[leaf_count] assign[=] call[name[myTree].tree.count_terminals, parameter[]] variable[label_func] assign[=] <ast.Lambda object at 0x7da1b0216380> call[name[plot_vs_years], parameter[name[myTree]]] variable[tree_fname] assign[=] binary_operation[name[outdir] + name[params].plot_tree] call[name[plt].savefig, parameter[name[tree_fname]]] call[name[print], parameter[binary_operation[constant[--- saved tree as %s ] <ast.Mod object at 0x7da2590d6920> name[tree_fname]]]] call[name[plot_rtt], parameter[name[myTree], binary_operation[name[outdir] + name[params].plot_rtt]]] if name[params].relax begin[:] variable[fname] assign[=] binary_operation[name[outdir] + constant[substitution_rates.tsv]] call[name[print], parameter[binary_operation[constant[--- wrote branch specific rates to %s ] <ast.Mod object at 0x7da2590d6920> name[fname]]]] with call[name[open], parameter[name[fname], constant[w]]] begin[:] call[name[fh].write, parameter[constant[#node clock_length mutation_length rate fold_change ]]] for taget[name[n]] in starred[call[name[myTree].tree.find_clades, parameter[]]] begin[:] if compare[name[n] equal[==] name[myTree].tree.root] begin[:] continue variable[g] assign[=] name[n].branch_length_interpolator.gamma call[name[fh].write, parameter[binary_operation[constant[%s %1.3e %1.3e %1.3e %1.2f ] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b0186d40>, <ast.Attribute object at 0x7da1b0187790>, <ast.Attribute object at 0x7da1b0186ce0>, <ast.BinOp object at 0x7da1b01845e0>, <ast.Name object at 0x7da1b01870d0>]]]]] call[name[export_sequences_and_tree], parameter[name[myTree], name[basename], name[is_vcf], name[params].zero_based]] return[constant[0]]
keyword[def] identifier[timetree] ( identifier[params] ): literal[string] keyword[if] identifier[params] . identifier[relax] keyword[is] keyword[None] : identifier[relaxed_clock_params] = keyword[None] keyword[elif] identifier[params] . identifier[relax] ==[]: identifier[relaxed_clock_params] = keyword[True] keyword[elif] identifier[len] ( identifier[params] . identifier[relax] )== literal[int] : identifier[relaxed_clock_params] ={ literal[string] : identifier[params] . identifier[relax] [ literal[int] ], literal[string] : identifier[params] . identifier[relax] [ literal[int] ]} identifier[dates] = identifier[utils] . identifier[parse_dates] ( identifier[params] . identifier[dates] ) keyword[if] identifier[len] ( identifier[dates] )== literal[int] : identifier[print] ( literal[string] ) keyword[return] literal[int] keyword[if] identifier[assure_tree] ( identifier[params] , identifier[tmp_dir] = literal[string] ): identifier[print] ( literal[string] ) keyword[return] literal[int] identifier[outdir] = identifier[get_outdir] ( identifier[params] , literal[string] ) identifier[gtr] = identifier[create_gtr] ( identifier[params] ) identifier[infer_gtr] = identifier[params] . identifier[gtr] == literal[string] identifier[aln] , identifier[ref] , identifier[fixed_pi] = identifier[read_if_vcf] ( identifier[params] ) identifier[is_vcf] = keyword[True] keyword[if] identifier[ref] keyword[is] keyword[not] keyword[None] keyword[else] keyword[False] identifier[branch_length_mode] = identifier[params] . identifier[branch_length_mode] keyword[if] identifier[is_vcf] keyword[or] ( identifier[params] . identifier[aln] keyword[and] identifier[params] . identifier[sequence_length] ): keyword[if] identifier[branch_length_mode] == literal[string] : identifier[branch_length_mode] = literal[string] keyword[if] identifier[params] . identifier[aln] keyword[is] keyword[None] keyword[and] identifier[params] . identifier[sequence_length] keyword[is] keyword[None] : identifier[print] ( literal[string] , identifier[file] = identifier[sys] . identifier[stderr] ) keyword[return] literal[int] identifier[myTree] = identifier[TreeTime] ( identifier[dates] = identifier[dates] , identifier[tree] = identifier[params] . identifier[tree] , identifier[ref] = identifier[ref] , identifier[aln] = identifier[aln] , identifier[gtr] = identifier[gtr] , identifier[seq_len] = identifier[params] . identifier[sequence_length] , identifier[verbose] = identifier[params] . identifier[verbose] ) identifier[myTree] . identifier[tip_slack] = identifier[params] . identifier[tip_slack] keyword[if] keyword[not] identifier[myTree] . identifier[one_mutation] : identifier[print] ( literal[string] ) keyword[return] literal[int] keyword[try] : identifier[coalescent] = identifier[float] ( identifier[params] . identifier[coalescent] ) keyword[if] identifier[coalescent] < literal[int] * identifier[myTree] . identifier[one_mutation] : identifier[coalescent] = keyword[None] keyword[except] : keyword[if] identifier[params] . identifier[coalescent] keyword[in] [ literal[string] , literal[string] , literal[string] ]: identifier[coalescent] = identifier[params] . identifier[coalescent] keyword[else] : identifier[print] ( literal[string] literal[string] ) keyword[return] literal[int] identifier[calc_confidence] = identifier[params] . identifier[confidence] keyword[if] identifier[params] . identifier[clock_std_dev] : identifier[vary_rate] = identifier[params] . identifier[clock_std_dev] keyword[if] identifier[calc_confidence] keyword[else] keyword[False] keyword[elif] identifier[params] . identifier[confidence] keyword[and] identifier[params] . identifier[covariation] : identifier[vary_rate] = keyword[True] keyword[elif] identifier[params] . identifier[confidence] : identifier[print] ( literal[string] literal[string] literal[string] ) identifier[vary_rate] = keyword[False] identifier[calc_confidence] = keyword[False] keyword[else] : identifier[vary_rate] = keyword[False] identifier[root] = keyword[None] keyword[if] identifier[params] . identifier[keep_root] keyword[else] identifier[params] . identifier[reroot] identifier[success] = identifier[myTree] . identifier[run] ( identifier[root] = identifier[root] , identifier[relaxed_clock] = identifier[relaxed_clock_params] , identifier[resolve_polytomies] =( keyword[not] identifier[params] . identifier[keep_polytomies] ), identifier[Tc] = identifier[coalescent] , identifier[max_iter] = identifier[params] . identifier[max_iter] , identifier[fixed_clock_rate] = identifier[params] . identifier[clock_rate] , identifier[n_iqd] = identifier[params] . identifier[clock_filter] , identifier[time_marginal] = literal[string] keyword[if] identifier[calc_confidence] keyword[else] keyword[False] , identifier[vary_rate] = identifier[vary_rate] , identifier[branch_length_mode] = identifier[branch_length_mode] , identifier[fixed_pi] = identifier[fixed_pi] , identifier[use_covariation] = identifier[params] . identifier[covariation] ) keyword[if] identifier[success] == identifier[ttconf] . identifier[ERROR] : identifier[print] ( literal[string] ) keyword[return] literal[int] keyword[if] identifier[infer_gtr] : identifier[print] ( literal[string] ) identifier[print] ( identifier[myTree] . identifier[gtr] ) identifier[print] ( identifier[myTree] . identifier[date2dist] ) identifier[basename] = identifier[get_basename] ( identifier[params] , identifier[outdir] ) keyword[if] identifier[coalescent] keyword[in] [ literal[string] , literal[string] , literal[string] ]: identifier[print] ( literal[string] ) keyword[if] identifier[coalescent] == literal[string] : identifier[print_save_plot_skyline] ( identifier[myTree] , identifier[plot] = identifier[basename] + literal[string] , identifier[save] = identifier[basename] + literal[string] , identifier[screen] = keyword[True] ) keyword[else] : identifier[Tc] = identifier[myTree] . identifier[merger_model] . identifier[Tc] . identifier[y] [ literal[int] ] identifier[print] ( literal[string] % identifier[Tc] ) identifier[print] ( literal[string] %( identifier[Tc] / identifier[myTree] . identifier[date2dist] . identifier[clock_rate] )) identifier[print] ( literal[string] %( identifier[Tc] / identifier[myTree] . identifier[date2dist] . identifier[clock_rate] * literal[int] )) keyword[import] identifier[matplotlib] . identifier[pyplot] keyword[as] identifier[plt] keyword[from] . identifier[treetime] keyword[import] identifier[plot_vs_years] identifier[leaf_count] = identifier[myTree] . identifier[tree] . identifier[count_terminals] () identifier[label_func] = keyword[lambda] identifier[x] :( identifier[x] . identifier[name] keyword[if] identifier[x] . identifier[is_terminal] () keyword[and] (( identifier[leaf_count] < literal[int] keyword[and] ( keyword[not] identifier[params] . identifier[no_tip_labels] )) keyword[or] identifier[params] . identifier[tip_labels] ) keyword[else] literal[string] ) identifier[plot_vs_years] ( identifier[myTree] , identifier[show_confidence] = keyword[False] , identifier[label_func] = identifier[label_func] , identifier[confidence] = literal[int] keyword[if] identifier[params] . identifier[confidence] keyword[else] keyword[None] ) identifier[tree_fname] =( identifier[outdir] + identifier[params] . identifier[plot_tree] ) identifier[plt] . identifier[savefig] ( identifier[tree_fname] ) identifier[print] ( literal[string] % identifier[tree_fname] ) identifier[plot_rtt] ( identifier[myTree] , identifier[outdir] + identifier[params] . identifier[plot_rtt] ) keyword[if] identifier[params] . identifier[relax] : identifier[fname] = identifier[outdir] + literal[string] identifier[print] ( literal[string] % identifier[fname] ) keyword[with] identifier[open] ( identifier[fname] , literal[string] ) keyword[as] identifier[fh] : identifier[fh] . identifier[write] ( literal[string] ) keyword[for] identifier[n] keyword[in] identifier[myTree] . identifier[tree] . identifier[find_clades] ( identifier[order] = literal[string] ): keyword[if] identifier[n] == identifier[myTree] . identifier[tree] . identifier[root] : keyword[continue] identifier[g] = identifier[n] . identifier[branch_length_interpolator] . identifier[gamma] identifier[fh] . identifier[write] ( literal[string] %( identifier[n] . identifier[name] , identifier[n] . identifier[clock_length] , identifier[n] . identifier[mutation_length] , identifier[myTree] . identifier[date2dist] . identifier[clock_rate] * identifier[g] , identifier[g] )) identifier[export_sequences_and_tree] ( identifier[myTree] , identifier[basename] , identifier[is_vcf] , identifier[params] . identifier[zero_based] , identifier[timetree] = keyword[True] , identifier[confidence] = identifier[calc_confidence] ) keyword[return] literal[int]
def timetree(params): """ implementeing treetime tree """ if params.relax is None: relaxed_clock_params = None # depends on [control=['if'], data=[]] elif params.relax == []: relaxed_clock_params = True # depends on [control=['if'], data=[]] elif len(params.relax) == 2: relaxed_clock_params = {'slack': params.relax[0], 'coupling': params.relax[1]} # depends on [control=['if'], data=[]] dates = utils.parse_dates(params.dates) if len(dates) == 0: print('No valid dates -- exiting.') return 1 # depends on [control=['if'], data=[]] if assure_tree(params, tmp_dir='timetree_tmp'): print('No tree -- exiting.') return 1 # depends on [control=['if'], data=[]] outdir = get_outdir(params, '_treetime') gtr = create_gtr(params) infer_gtr = params.gtr == 'infer' ########################################################################### ### READ IN VCF ########################################################################### #sets ref and fixed_pi to None if not VCF (aln, ref, fixed_pi) = read_if_vcf(params) is_vcf = True if ref is not None else False branch_length_mode = params.branch_length_mode #variable-site-only trees can have big branch lengths, the auto setting won't work. if is_vcf or (params.aln and params.sequence_length): if branch_length_mode == 'auto': branch_length_mode = 'joint' # depends on [control=['if'], data=['branch_length_mode']] # depends on [control=['if'], data=[]] ########################################################################### ### SET-UP and RUN ########################################################################### if params.aln is None and params.sequence_length is None: print("one of arguments '--aln' and '--sequence-length' is required.", file=sys.stderr) return 1 # depends on [control=['if'], data=[]] myTree = TreeTime(dates=dates, tree=params.tree, ref=ref, aln=aln, gtr=gtr, seq_len=params.sequence_length, verbose=params.verbose) myTree.tip_slack = params.tip_slack if not myTree.one_mutation: print('TreeTime setup failed, exiting') return 1 # depends on [control=['if'], data=[]] # coalescent model options try: coalescent = float(params.coalescent) if coalescent < 10 * myTree.one_mutation: coalescent = None # depends on [control=['if'], data=['coalescent']] # depends on [control=['try'], data=[]] except: if params.coalescent in ['opt', 'const', 'skyline']: coalescent = params.coalescent # depends on [control=['if'], data=[]] else: print("unknown coalescent model specification, has to be either a float, 'opt', 'const' or 'skyline' -- exiting") return 1 # depends on [control=['except'], data=[]] # determine whether confidence intervals are to be computed and how the # uncertainty in the rate estimate should be treated calc_confidence = params.confidence if params.clock_std_dev: vary_rate = params.clock_std_dev if calc_confidence else False # depends on [control=['if'], data=[]] elif params.confidence and params.covariation: vary_rate = True # depends on [control=['if'], data=[]] elif params.confidence: print("\nOutside of covariance aware mode TreeTime cannot estimate confidence intervals without specified standard deviation of the clock rate Please specify '--clock-std-dev' or rerun with '--covariance'. Will proceed without confidence estimation") vary_rate = False calc_confidence = False # depends on [control=['if'], data=[]] else: vary_rate = False # RUN root = None if params.keep_root else params.reroot success = myTree.run(root=root, relaxed_clock=relaxed_clock_params, resolve_polytomies=not params.keep_polytomies, Tc=coalescent, max_iter=params.max_iter, fixed_clock_rate=params.clock_rate, n_iqd=params.clock_filter, time_marginal='assign' if calc_confidence else False, vary_rate=vary_rate, branch_length_mode=branch_length_mode, fixed_pi=fixed_pi, use_covariation=params.covariation) if success == ttconf.ERROR: # if TreeTime.run failed, exit print('\nTreeTime run FAILED: please check above for errors and/or rerun with --verbose 4.\n') return 1 # depends on [control=['if'], data=[]] ########################################################################### ### OUTPUT and saving of results ########################################################################### if infer_gtr: print('\nInferred GTR model:') print(myTree.gtr) # depends on [control=['if'], data=[]] print(myTree.date2dist) basename = get_basename(params, outdir) if coalescent in ['skyline', 'opt', 'const']: print('Inferred coalescent model') if coalescent == 'skyline': print_save_plot_skyline(myTree, plot=basename + 'skyline.pdf', save=basename + 'skyline.tsv', screen=True) # depends on [control=['if'], data=[]] else: Tc = myTree.merger_model.Tc.y[0] print(' --T_c: \t %1.2e \toptimized inverse merger rate in units of substitutions' % Tc) print(' --T_c: \t %1.2e \toptimized inverse merger rate in years' % (Tc / myTree.date2dist.clock_rate)) print(" --N_e: \t %1.2e \tcorresponding 'effective population size' assuming 50 gen/year\n" % (Tc / myTree.date2dist.clock_rate * 50)) # depends on [control=['if'], data=['coalescent']] # plot import matplotlib.pyplot as plt from .treetime import plot_vs_years leaf_count = myTree.tree.count_terminals() label_func = lambda x: x.name if x.is_terminal() and (leaf_count < 30 and (not params.no_tip_labels) or params.tip_labels) else '' plot_vs_years(myTree, show_confidence=False, label_func=label_func, confidence=0.9 if params.confidence else None) tree_fname = outdir + params.plot_tree plt.savefig(tree_fname) print('--- saved tree as \n\t %s\n' % tree_fname) plot_rtt(myTree, outdir + params.plot_rtt) if params.relax: fname = outdir + 'substitution_rates.tsv' print('--- wrote branch specific rates to\n\t %s\n' % fname) with open(fname, 'w') as fh: fh.write('#node\tclock_length\tmutation_length\trate\tfold_change\n') for n in myTree.tree.find_clades(order='preorder'): if n == myTree.tree.root: continue # depends on [control=['if'], data=[]] g = n.branch_length_interpolator.gamma fh.write('%s\t%1.3e\t%1.3e\t%1.3e\t%1.2f\n' % (n.name, n.clock_length, n.mutation_length, myTree.date2dist.clock_rate * g, g)) # depends on [control=['for'], data=['n']] # depends on [control=['with'], data=['fh']] # depends on [control=['if'], data=[]] export_sequences_and_tree(myTree, basename, is_vcf, params.zero_based, timetree=True, confidence=calc_confidence) return 0
def get_objective(self, objective_id): """Gets the ``Objective`` specified by its ``Id``. In plenary mode, the exact ``Id`` is found or a ``NotFound`` results. Otherwise, the returned ``Objective`` may have a different ``Id`` than requested, such as the case where a duplicate ``Id`` was assigned to an ``Objective`` and retained for compatibility. arg: objective_id (osid.id.Id): ``Id`` of the ``Objective`` return: (osid.learning.Objective) - the objective raise: NotFound - ``objective_id`` not found raise: NullArgument - ``objective_id`` is ``null`` raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization failure *compliance: mandatory -- This method is must be implemented.* """ # Implemented from template for # osid.resource.ResourceLookupSession.get_resource # NOTE: This implementation currently ignores plenary view collection = JSONClientValidated('learning', collection='Objective', runtime=self._runtime) result = collection.find_one( dict({'_id': ObjectId(self._get_id(objective_id, 'learning').get_identifier())}, **self._view_filter())) return objects.Objective(osid_object_map=result, runtime=self._runtime, proxy=self._proxy)
def function[get_objective, parameter[self, objective_id]]: constant[Gets the ``Objective`` specified by its ``Id``. In plenary mode, the exact ``Id`` is found or a ``NotFound`` results. Otherwise, the returned ``Objective`` may have a different ``Id`` than requested, such as the case where a duplicate ``Id`` was assigned to an ``Objective`` and retained for compatibility. arg: objective_id (osid.id.Id): ``Id`` of the ``Objective`` return: (osid.learning.Objective) - the objective raise: NotFound - ``objective_id`` not found raise: NullArgument - ``objective_id`` is ``null`` raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization failure *compliance: mandatory -- This method is must be implemented.* ] variable[collection] assign[=] call[name[JSONClientValidated], parameter[constant[learning]]] variable[result] assign[=] call[name[collection].find_one, parameter[call[name[dict], parameter[dictionary[[<ast.Constant object at 0x7da20c6a8460>], [<ast.Call object at 0x7da20c6aaa40>]]]]]] return[call[name[objects].Objective, parameter[]]]
keyword[def] identifier[get_objective] ( identifier[self] , identifier[objective_id] ): literal[string] identifier[collection] = identifier[JSONClientValidated] ( literal[string] , identifier[collection] = literal[string] , identifier[runtime] = identifier[self] . identifier[_runtime] ) identifier[result] = identifier[collection] . identifier[find_one] ( identifier[dict] ({ literal[string] : identifier[ObjectId] ( identifier[self] . identifier[_get_id] ( identifier[objective_id] , literal[string] ). identifier[get_identifier] ())}, ** identifier[self] . identifier[_view_filter] ())) keyword[return] identifier[objects] . identifier[Objective] ( identifier[osid_object_map] = identifier[result] , identifier[runtime] = identifier[self] . identifier[_runtime] , identifier[proxy] = identifier[self] . identifier[_proxy] )
def get_objective(self, objective_id): """Gets the ``Objective`` specified by its ``Id``. In plenary mode, the exact ``Id`` is found or a ``NotFound`` results. Otherwise, the returned ``Objective`` may have a different ``Id`` than requested, such as the case where a duplicate ``Id`` was assigned to an ``Objective`` and retained for compatibility. arg: objective_id (osid.id.Id): ``Id`` of the ``Objective`` return: (osid.learning.Objective) - the objective raise: NotFound - ``objective_id`` not found raise: NullArgument - ``objective_id`` is ``null`` raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization failure *compliance: mandatory -- This method is must be implemented.* """ # Implemented from template for # osid.resource.ResourceLookupSession.get_resource # NOTE: This implementation currently ignores plenary view collection = JSONClientValidated('learning', collection='Objective', runtime=self._runtime) result = collection.find_one(dict({'_id': ObjectId(self._get_id(objective_id, 'learning').get_identifier())}, **self._view_filter())) return objects.Objective(osid_object_map=result, runtime=self._runtime, proxy=self._proxy)
def remove_parameter(self, name): """ Remove the specified parameter from this query :param name: name of a parameter to remove :return: None """ if name in self.__query: self.__query.pop(name)
def function[remove_parameter, parameter[self, name]]: constant[ Remove the specified parameter from this query :param name: name of a parameter to remove :return: None ] if compare[name[name] in name[self].__query] begin[:] call[name[self].__query.pop, parameter[name[name]]]
keyword[def] identifier[remove_parameter] ( identifier[self] , identifier[name] ): literal[string] keyword[if] identifier[name] keyword[in] identifier[self] . identifier[__query] : identifier[self] . identifier[__query] . identifier[pop] ( identifier[name] )
def remove_parameter(self, name): """ Remove the specified parameter from this query :param name: name of a parameter to remove :return: None """ if name in self.__query: self.__query.pop(name) # depends on [control=['if'], data=['name']]
def scansion_prepare(self,meter=None,conscious=False): """Print out header column for line-scansions for a given meter. """ import prosodic config=prosodic.config if not meter: if not hasattr(self,'_Text__bestparses'): return x=getattr(self,'_Text__bestparses') if not x.keys(): return meter=x.keys()[0] ckeys="\t".join(sorted([str(x) for x in meter.constraints])) self.om("\t".join([makeminlength(str("text"),config['linelen']), makeminlength(str("parse"),config['linelen']),"meter","num_parses","num_viols","score_viols",ckeys]),conscious=conscious)
def function[scansion_prepare, parameter[self, meter, conscious]]: constant[Print out header column for line-scansions for a given meter. ] import module[prosodic] variable[config] assign[=] name[prosodic].config if <ast.UnaryOp object at 0x7da18f00dcf0> begin[:] if <ast.UnaryOp object at 0x7da18f00c640> begin[:] return[None] variable[x] assign[=] call[name[getattr], parameter[name[self], constant[_Text__bestparses]]] if <ast.UnaryOp object at 0x7da18f00f070> begin[:] return[None] variable[meter] assign[=] call[call[name[x].keys, parameter[]]][constant[0]] variable[ckeys] assign[=] call[constant[ ].join, parameter[call[name[sorted], parameter[<ast.ListComp object at 0x7da18f00d4b0>]]]] call[name[self].om, parameter[call[constant[ ].join, parameter[list[[<ast.Call object at 0x7da18f00d780>, <ast.Call object at 0x7da18f00c820>, <ast.Constant object at 0x7da18f00eb90>, <ast.Constant object at 0x7da18f00c400>, <ast.Constant object at 0x7da18f00d450>, <ast.Constant object at 0x7da18f00ec80>, <ast.Name object at 0x7da18f00f400>]]]]]]
keyword[def] identifier[scansion_prepare] ( identifier[self] , identifier[meter] = keyword[None] , identifier[conscious] = keyword[False] ): literal[string] keyword[import] identifier[prosodic] identifier[config] = identifier[prosodic] . identifier[config] keyword[if] keyword[not] identifier[meter] : keyword[if] keyword[not] identifier[hasattr] ( identifier[self] , literal[string] ): keyword[return] identifier[x] = identifier[getattr] ( identifier[self] , literal[string] ) keyword[if] keyword[not] identifier[x] . identifier[keys] (): keyword[return] identifier[meter] = identifier[x] . identifier[keys] ()[ literal[int] ] identifier[ckeys] = literal[string] . identifier[join] ( identifier[sorted] ([ identifier[str] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[meter] . identifier[constraints] ])) identifier[self] . identifier[om] ( literal[string] . identifier[join] ([ identifier[makeminlength] ( identifier[str] ( literal[string] ), identifier[config] [ literal[string] ]), identifier[makeminlength] ( identifier[str] ( literal[string] ), identifier[config] [ literal[string] ]), literal[string] , literal[string] , literal[string] , literal[string] , identifier[ckeys] ]), identifier[conscious] = identifier[conscious] )
def scansion_prepare(self, meter=None, conscious=False): """Print out header column for line-scansions for a given meter. """ import prosodic config = prosodic.config if not meter: if not hasattr(self, '_Text__bestparses'): return # depends on [control=['if'], data=[]] x = getattr(self, '_Text__bestparses') if not x.keys(): return # depends on [control=['if'], data=[]] meter = x.keys()[0] # depends on [control=['if'], data=[]] ckeys = '\t'.join(sorted([str(x) for x in meter.constraints])) self.om('\t'.join([makeminlength(str('text'), config['linelen']), makeminlength(str('parse'), config['linelen']), 'meter', 'num_parses', 'num_viols', 'score_viols', ckeys]), conscious=conscious)
def _rename_nvram_file(self): """ Before starting the VM, rename the nvram and vlan.dat files with the correct IOU application identifier. """ destination = self._nvram_file() for file_path in glob.glob(os.path.join(glob.escape(self.working_dir), "nvram_*")): shutil.move(file_path, destination) destination = os.path.join(self.working_dir, "vlan.dat-{:05d}".format(self.application_id)) for file_path in glob.glob(os.path.join(glob.escape(self.working_dir), "vlan.dat-*")): shutil.move(file_path, destination)
def function[_rename_nvram_file, parameter[self]]: constant[ Before starting the VM, rename the nvram and vlan.dat files with the correct IOU application identifier. ] variable[destination] assign[=] call[name[self]._nvram_file, parameter[]] for taget[name[file_path]] in starred[call[name[glob].glob, parameter[call[name[os].path.join, parameter[call[name[glob].escape, parameter[name[self].working_dir]], constant[nvram_*]]]]]] begin[:] call[name[shutil].move, parameter[name[file_path], name[destination]]] variable[destination] assign[=] call[name[os].path.join, parameter[name[self].working_dir, call[constant[vlan.dat-{:05d}].format, parameter[name[self].application_id]]]] for taget[name[file_path]] in starred[call[name[glob].glob, parameter[call[name[os].path.join, parameter[call[name[glob].escape, parameter[name[self].working_dir]], constant[vlan.dat-*]]]]]] begin[:] call[name[shutil].move, parameter[name[file_path], name[destination]]]
keyword[def] identifier[_rename_nvram_file] ( identifier[self] ): literal[string] identifier[destination] = identifier[self] . identifier[_nvram_file] () keyword[for] identifier[file_path] keyword[in] identifier[glob] . identifier[glob] ( identifier[os] . identifier[path] . identifier[join] ( identifier[glob] . identifier[escape] ( identifier[self] . identifier[working_dir] ), literal[string] )): identifier[shutil] . identifier[move] ( identifier[file_path] , identifier[destination] ) identifier[destination] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[working_dir] , literal[string] . identifier[format] ( identifier[self] . identifier[application_id] )) keyword[for] identifier[file_path] keyword[in] identifier[glob] . identifier[glob] ( identifier[os] . identifier[path] . identifier[join] ( identifier[glob] . identifier[escape] ( identifier[self] . identifier[working_dir] ), literal[string] )): identifier[shutil] . identifier[move] ( identifier[file_path] , identifier[destination] )
def _rename_nvram_file(self): """ Before starting the VM, rename the nvram and vlan.dat files with the correct IOU application identifier. """ destination = self._nvram_file() for file_path in glob.glob(os.path.join(glob.escape(self.working_dir), 'nvram_*')): shutil.move(file_path, destination) # depends on [control=['for'], data=['file_path']] destination = os.path.join(self.working_dir, 'vlan.dat-{:05d}'.format(self.application_id)) for file_path in glob.glob(os.path.join(glob.escape(self.working_dir), 'vlan.dat-*')): shutil.move(file_path, destination) # depends on [control=['for'], data=['file_path']]
def hexblock_dword(cls, data, address = None, bits = None, separator = ' ', width = 4): """ Dump a block of hexadecimal DWORDs from binary data. @type data: str @param data: Binary data. @type address: str @param address: Memory address where the data was read from. @type bits: int @param bits: (Optional) Number of bits of the target architecture. The default is platform dependent. See: L{HexDump.address_size} @type separator: str @param separator: Separator between the hexadecimal representation of each DWORD. @type width: int @param width: (Optional) Maximum number of DWORDs to convert per text line. @rtype: str @return: Multiline output text. """ return cls.hexblock_cb(cls.hexa_dword, data, address, bits, width * 4, cb_kwargs = {'separator': separator})
def function[hexblock_dword, parameter[cls, data, address, bits, separator, width]]: constant[ Dump a block of hexadecimal DWORDs from binary data. @type data: str @param data: Binary data. @type address: str @param address: Memory address where the data was read from. @type bits: int @param bits: (Optional) Number of bits of the target architecture. The default is platform dependent. See: L{HexDump.address_size} @type separator: str @param separator: Separator between the hexadecimal representation of each DWORD. @type width: int @param width: (Optional) Maximum number of DWORDs to convert per text line. @rtype: str @return: Multiline output text. ] return[call[name[cls].hexblock_cb, parameter[name[cls].hexa_dword, name[data], name[address], name[bits], binary_operation[name[width] * constant[4]]]]]
keyword[def] identifier[hexblock_dword] ( identifier[cls] , identifier[data] , identifier[address] = keyword[None] , identifier[bits] = keyword[None] , identifier[separator] = literal[string] , identifier[width] = literal[int] ): literal[string] keyword[return] identifier[cls] . identifier[hexblock_cb] ( identifier[cls] . identifier[hexa_dword] , identifier[data] , identifier[address] , identifier[bits] , identifier[width] * literal[int] , identifier[cb_kwargs] ={ literal[string] : identifier[separator] })
def hexblock_dword(cls, data, address=None, bits=None, separator=' ', width=4): """ Dump a block of hexadecimal DWORDs from binary data. @type data: str @param data: Binary data. @type address: str @param address: Memory address where the data was read from. @type bits: int @param bits: (Optional) Number of bits of the target architecture. The default is platform dependent. See: L{HexDump.address_size} @type separator: str @param separator: Separator between the hexadecimal representation of each DWORD. @type width: int @param width: (Optional) Maximum number of DWORDs to convert per text line. @rtype: str @return: Multiline output text. """ return cls.hexblock_cb(cls.hexa_dword, data, address, bits, width * 4, cb_kwargs={'separator': separator})
def _prepare_quote(quote, author, max_len=78): """This function processes a quote and returns a string that is ready to be used in the fancy prompt. """ quote = quote.split(' ') max_len -= 6 lines = [] cur_line = [] def _len(line): return sum(len(elt) for elt in line) + len(line) - 1 while quote: if not cur_line or (_len(cur_line) + len(quote[0]) - 1 <= max_len): cur_line.append(quote.pop(0)) continue lines.append(' | %s' % ' '.join(cur_line)) cur_line = [] if cur_line: lines.append(' | %s' % ' '.join(cur_line)) cur_line = [] lines.append(' | %s-- %s' % (" " * (max_len - len(author) - 5), author)) return lines
def function[_prepare_quote, parameter[quote, author, max_len]]: constant[This function processes a quote and returns a string that is ready to be used in the fancy prompt. ] variable[quote] assign[=] call[name[quote].split, parameter[constant[ ]]] <ast.AugAssign object at 0x7da1b1f941f0> variable[lines] assign[=] list[[]] variable[cur_line] assign[=] list[[]] def function[_len, parameter[line]]: return[binary_operation[binary_operation[call[name[sum], parameter[<ast.GeneratorExp object at 0x7da1b1f94be0>]] + call[name[len], parameter[name[line]]]] - constant[1]]] while name[quote] begin[:] if <ast.BoolOp object at 0x7da1b1f96c50> begin[:] call[name[cur_line].append, parameter[call[name[quote].pop, parameter[constant[0]]]]] continue call[name[lines].append, parameter[binary_operation[constant[ | %s] <ast.Mod object at 0x7da2590d6920> call[constant[ ].join, parameter[name[cur_line]]]]]] variable[cur_line] assign[=] list[[]] if name[cur_line] begin[:] call[name[lines].append, parameter[binary_operation[constant[ | %s] <ast.Mod object at 0x7da2590d6920> call[constant[ ].join, parameter[name[cur_line]]]]]] variable[cur_line] assign[=] list[[]] call[name[lines].append, parameter[binary_operation[constant[ | %s-- %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.BinOp object at 0x7da1b1f94f70>, <ast.Name object at 0x7da1b1f96410>]]]]] return[name[lines]]
keyword[def] identifier[_prepare_quote] ( identifier[quote] , identifier[author] , identifier[max_len] = literal[int] ): literal[string] identifier[quote] = identifier[quote] . identifier[split] ( literal[string] ) identifier[max_len] -= literal[int] identifier[lines] =[] identifier[cur_line] =[] keyword[def] identifier[_len] ( identifier[line] ): keyword[return] identifier[sum] ( identifier[len] ( identifier[elt] ) keyword[for] identifier[elt] keyword[in] identifier[line] )+ identifier[len] ( identifier[line] )- literal[int] keyword[while] identifier[quote] : keyword[if] keyword[not] identifier[cur_line] keyword[or] ( identifier[_len] ( identifier[cur_line] )+ identifier[len] ( identifier[quote] [ literal[int] ])- literal[int] <= identifier[max_len] ): identifier[cur_line] . identifier[append] ( identifier[quote] . identifier[pop] ( literal[int] )) keyword[continue] identifier[lines] . identifier[append] ( literal[string] % literal[string] . identifier[join] ( identifier[cur_line] )) identifier[cur_line] =[] keyword[if] identifier[cur_line] : identifier[lines] . identifier[append] ( literal[string] % literal[string] . identifier[join] ( identifier[cur_line] )) identifier[cur_line] =[] identifier[lines] . identifier[append] ( literal[string] %( literal[string] *( identifier[max_len] - identifier[len] ( identifier[author] )- literal[int] ), identifier[author] )) keyword[return] identifier[lines]
def _prepare_quote(quote, author, max_len=78): """This function processes a quote and returns a string that is ready to be used in the fancy prompt. """ quote = quote.split(' ') max_len -= 6 lines = [] cur_line = [] def _len(line): return sum((len(elt) for elt in line)) + len(line) - 1 while quote: if not cur_line or _len(cur_line) + len(quote[0]) - 1 <= max_len: cur_line.append(quote.pop(0)) continue # depends on [control=['if'], data=[]] lines.append(' | %s' % ' '.join(cur_line)) cur_line = [] # depends on [control=['while'], data=[]] if cur_line: lines.append(' | %s' % ' '.join(cur_line)) cur_line = [] # depends on [control=['if'], data=[]] lines.append(' | %s-- %s' % (' ' * (max_len - len(author) - 5), author)) return lines
def concat(self, frames, axis=1): """ Append multiple H2OFrames to this frame, column-wise or row-wise. :param List[H2OFrame] frames: list of frames that should be appended to the current frame. :param int axis: if 1 then append column-wise (default), if 0 then append row-wise. :returns: an H2OFrame of the combined datasets. """ if len(frames) == 0: raise ValueError("Input list of frames is empty! Nothing to concat.") if axis == 1: df = self.cbind(frames) else: df = self.rbind(frames) return df
def function[concat, parameter[self, frames, axis]]: constant[ Append multiple H2OFrames to this frame, column-wise or row-wise. :param List[H2OFrame] frames: list of frames that should be appended to the current frame. :param int axis: if 1 then append column-wise (default), if 0 then append row-wise. :returns: an H2OFrame of the combined datasets. ] if compare[call[name[len], parameter[name[frames]]] equal[==] constant[0]] begin[:] <ast.Raise object at 0x7da1b03712a0> if compare[name[axis] equal[==] constant[1]] begin[:] variable[df] assign[=] call[name[self].cbind, parameter[name[frames]]] return[name[df]]
keyword[def] identifier[concat] ( identifier[self] , identifier[frames] , identifier[axis] = literal[int] ): literal[string] keyword[if] identifier[len] ( identifier[frames] )== literal[int] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] identifier[axis] == literal[int] : identifier[df] = identifier[self] . identifier[cbind] ( identifier[frames] ) keyword[else] : identifier[df] = identifier[self] . identifier[rbind] ( identifier[frames] ) keyword[return] identifier[df]
def concat(self, frames, axis=1): """ Append multiple H2OFrames to this frame, column-wise or row-wise. :param List[H2OFrame] frames: list of frames that should be appended to the current frame. :param int axis: if 1 then append column-wise (default), if 0 then append row-wise. :returns: an H2OFrame of the combined datasets. """ if len(frames) == 0: raise ValueError('Input list of frames is empty! Nothing to concat.') # depends on [control=['if'], data=[]] if axis == 1: df = self.cbind(frames) # depends on [control=['if'], data=[]] else: df = self.rbind(frames) return df
def wcs_add_energy_axis(wcs, energies): """Copy a WCS object, and add on the energy axis. Parameters ---------- wcs : `~astropy.wcs.WCS` WCS energies : array-like Array of energies. """ if wcs.naxis != 2: raise Exception( 'wcs_add_energy_axis, input WCS naxis != 2 %i' % wcs.naxis) w = WCS(naxis=3) w.wcs.crpix[0] = wcs.wcs.crpix[0] w.wcs.crpix[1] = wcs.wcs.crpix[1] w.wcs.ctype[0] = wcs.wcs.ctype[0] w.wcs.ctype[1] = wcs.wcs.ctype[1] w.wcs.crval[0] = wcs.wcs.crval[0] w.wcs.crval[1] = wcs.wcs.crval[1] w.wcs.cdelt[0] = wcs.wcs.cdelt[0] w.wcs.cdelt[1] = wcs.wcs.cdelt[1] w = WCS(w.to_header()) w.wcs.crpix[2] = 1 w.wcs.crval[2] = energies[0] w.wcs.cdelt[2] = energies[1] - energies[0] w.wcs.ctype[2] = 'Energy' return w
def function[wcs_add_energy_axis, parameter[wcs, energies]]: constant[Copy a WCS object, and add on the energy axis. Parameters ---------- wcs : `~astropy.wcs.WCS` WCS energies : array-like Array of energies. ] if compare[name[wcs].naxis not_equal[!=] constant[2]] begin[:] <ast.Raise object at 0x7da2044c3250> variable[w] assign[=] call[name[WCS], parameter[]] call[name[w].wcs.crpix][constant[0]] assign[=] call[name[wcs].wcs.crpix][constant[0]] call[name[w].wcs.crpix][constant[1]] assign[=] call[name[wcs].wcs.crpix][constant[1]] call[name[w].wcs.ctype][constant[0]] assign[=] call[name[wcs].wcs.ctype][constant[0]] call[name[w].wcs.ctype][constant[1]] assign[=] call[name[wcs].wcs.ctype][constant[1]] call[name[w].wcs.crval][constant[0]] assign[=] call[name[wcs].wcs.crval][constant[0]] call[name[w].wcs.crval][constant[1]] assign[=] call[name[wcs].wcs.crval][constant[1]] call[name[w].wcs.cdelt][constant[0]] assign[=] call[name[wcs].wcs.cdelt][constant[0]] call[name[w].wcs.cdelt][constant[1]] assign[=] call[name[wcs].wcs.cdelt][constant[1]] variable[w] assign[=] call[name[WCS], parameter[call[name[w].to_header, parameter[]]]] call[name[w].wcs.crpix][constant[2]] assign[=] constant[1] call[name[w].wcs.crval][constant[2]] assign[=] call[name[energies]][constant[0]] call[name[w].wcs.cdelt][constant[2]] assign[=] binary_operation[call[name[energies]][constant[1]] - call[name[energies]][constant[0]]] call[name[w].wcs.ctype][constant[2]] assign[=] constant[Energy] return[name[w]]
keyword[def] identifier[wcs_add_energy_axis] ( identifier[wcs] , identifier[energies] ): literal[string] keyword[if] identifier[wcs] . identifier[naxis] != literal[int] : keyword[raise] identifier[Exception] ( literal[string] % identifier[wcs] . identifier[naxis] ) identifier[w] = identifier[WCS] ( identifier[naxis] = literal[int] ) identifier[w] . identifier[wcs] . identifier[crpix] [ literal[int] ]= identifier[wcs] . identifier[wcs] . identifier[crpix] [ literal[int] ] identifier[w] . identifier[wcs] . identifier[crpix] [ literal[int] ]= identifier[wcs] . identifier[wcs] . identifier[crpix] [ literal[int] ] identifier[w] . identifier[wcs] . identifier[ctype] [ literal[int] ]= identifier[wcs] . identifier[wcs] . identifier[ctype] [ literal[int] ] identifier[w] . identifier[wcs] . identifier[ctype] [ literal[int] ]= identifier[wcs] . identifier[wcs] . identifier[ctype] [ literal[int] ] identifier[w] . identifier[wcs] . identifier[crval] [ literal[int] ]= identifier[wcs] . identifier[wcs] . identifier[crval] [ literal[int] ] identifier[w] . identifier[wcs] . identifier[crval] [ literal[int] ]= identifier[wcs] . identifier[wcs] . identifier[crval] [ literal[int] ] identifier[w] . identifier[wcs] . identifier[cdelt] [ literal[int] ]= identifier[wcs] . identifier[wcs] . identifier[cdelt] [ literal[int] ] identifier[w] . identifier[wcs] . identifier[cdelt] [ literal[int] ]= identifier[wcs] . identifier[wcs] . identifier[cdelt] [ literal[int] ] identifier[w] = identifier[WCS] ( identifier[w] . identifier[to_header] ()) identifier[w] . identifier[wcs] . identifier[crpix] [ literal[int] ]= literal[int] identifier[w] . identifier[wcs] . identifier[crval] [ literal[int] ]= identifier[energies] [ literal[int] ] identifier[w] . identifier[wcs] . identifier[cdelt] [ literal[int] ]= identifier[energies] [ literal[int] ]- identifier[energies] [ literal[int] ] identifier[w] . identifier[wcs] . identifier[ctype] [ literal[int] ]= literal[string] keyword[return] identifier[w]
def wcs_add_energy_axis(wcs, energies): """Copy a WCS object, and add on the energy axis. Parameters ---------- wcs : `~astropy.wcs.WCS` WCS energies : array-like Array of energies. """ if wcs.naxis != 2: raise Exception('wcs_add_energy_axis, input WCS naxis != 2 %i' % wcs.naxis) # depends on [control=['if'], data=[]] w = WCS(naxis=3) w.wcs.crpix[0] = wcs.wcs.crpix[0] w.wcs.crpix[1] = wcs.wcs.crpix[1] w.wcs.ctype[0] = wcs.wcs.ctype[0] w.wcs.ctype[1] = wcs.wcs.ctype[1] w.wcs.crval[0] = wcs.wcs.crval[0] w.wcs.crval[1] = wcs.wcs.crval[1] w.wcs.cdelt[0] = wcs.wcs.cdelt[0] w.wcs.cdelt[1] = wcs.wcs.cdelt[1] w = WCS(w.to_header()) w.wcs.crpix[2] = 1 w.wcs.crval[2] = energies[0] w.wcs.cdelt[2] = energies[1] - energies[0] w.wcs.ctype[2] = 'Energy' return w
def add_connection_score(self, node): """ Return a numeric value that determines this node's score for adding a new connection. A negative value indicates that no connections should be made to this node for at least that number of seconds. A value of -inf indicates no connections should be made to this node for the foreseeable future. This score should ideally take into account the connectedness of available nodes, so that those with less current connections will get more. """ # TODO: this should ideally take node history into account conntime = node.seconds_until_connect_ok() if conntime > 0: self.log("not considering %r for new connection; has %r left on " "connect blackout" % (node, conntime)) return -conntime numconns = self.num_connectors_to(node) if numconns >= self.max_connections_per_node: return float('-Inf') return sys.maxint - numconns
def function[add_connection_score, parameter[self, node]]: constant[ Return a numeric value that determines this node's score for adding a new connection. A negative value indicates that no connections should be made to this node for at least that number of seconds. A value of -inf indicates no connections should be made to this node for the foreseeable future. This score should ideally take into account the connectedness of available nodes, so that those with less current connections will get more. ] variable[conntime] assign[=] call[name[node].seconds_until_connect_ok, parameter[]] if compare[name[conntime] greater[>] constant[0]] begin[:] call[name[self].log, parameter[binary_operation[constant[not considering %r for new connection; has %r left on connect blackout] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da204961e40>, <ast.Name object at 0x7da204961c60>]]]]] return[<ast.UnaryOp object at 0x7da204962bf0>] variable[numconns] assign[=] call[name[self].num_connectors_to, parameter[name[node]]] if compare[name[numconns] greater_or_equal[>=] name[self].max_connections_per_node] begin[:] return[call[name[float], parameter[constant[-Inf]]]] return[binary_operation[name[sys].maxint - name[numconns]]]
keyword[def] identifier[add_connection_score] ( identifier[self] , identifier[node] ): literal[string] identifier[conntime] = identifier[node] . identifier[seconds_until_connect_ok] () keyword[if] identifier[conntime] > literal[int] : identifier[self] . identifier[log] ( literal[string] literal[string] %( identifier[node] , identifier[conntime] )) keyword[return] - identifier[conntime] identifier[numconns] = identifier[self] . identifier[num_connectors_to] ( identifier[node] ) keyword[if] identifier[numconns] >= identifier[self] . identifier[max_connections_per_node] : keyword[return] identifier[float] ( literal[string] ) keyword[return] identifier[sys] . identifier[maxint] - identifier[numconns]
def add_connection_score(self, node): """ Return a numeric value that determines this node's score for adding a new connection. A negative value indicates that no connections should be made to this node for at least that number of seconds. A value of -inf indicates no connections should be made to this node for the foreseeable future. This score should ideally take into account the connectedness of available nodes, so that those with less current connections will get more. """ # TODO: this should ideally take node history into account conntime = node.seconds_until_connect_ok() if conntime > 0: self.log('not considering %r for new connection; has %r left on connect blackout' % (node, conntime)) return -conntime # depends on [control=['if'], data=['conntime']] numconns = self.num_connectors_to(node) if numconns >= self.max_connections_per_node: return float('-Inf') # depends on [control=['if'], data=[]] return sys.maxint - numconns
def start_output (self): """ Write start of checking info as sql comment. """ super(SQLLogger, self).start_output() if self.has_part("intro"): self.write_intro() self.writeln() self.flush()
def function[start_output, parameter[self]]: constant[ Write start of checking info as sql comment. ] call[call[name[super], parameter[name[SQLLogger], name[self]]].start_output, parameter[]] if call[name[self].has_part, parameter[constant[intro]]] begin[:] call[name[self].write_intro, parameter[]] call[name[self].writeln, parameter[]] call[name[self].flush, parameter[]]
keyword[def] identifier[start_output] ( identifier[self] ): literal[string] identifier[super] ( identifier[SQLLogger] , identifier[self] ). identifier[start_output] () keyword[if] identifier[self] . identifier[has_part] ( literal[string] ): identifier[self] . identifier[write_intro] () identifier[self] . identifier[writeln] () identifier[self] . identifier[flush] ()
def start_output(self): """ Write start of checking info as sql comment. """ super(SQLLogger, self).start_output() if self.has_part('intro'): self.write_intro() self.writeln() self.flush() # depends on [control=['if'], data=[]]
def user_present(name, uid, password, channel=14, callback=False, link_auth=True, ipmi_msg=True, privilege_level='administrator', **kwargs): ''' Ensure IPMI user and user privileges. name name of user (limit 16 bytes) uid user id number (1 to 7) password user password (limit 16 bytes) channel ipmi channel defaults to 14 for auto callback User Restricted to Callback False = User Privilege Limit is determined by the User Privilege Limit parameter privilege_level, for both callback and non-callback connections. True = User Privilege Limit is determined by the privilege_level parameter for callback connections, but is restricted to Callback level for non-callback connections. Thus, a user can only initiate a Callback when they 'call in' to the BMC, but once the callback connection has been made, the user could potentially establish a session as an Operator. link_auth User Link authentication True/False user name and password information will be used for link authentication, e.g. PPP CHAP) for the given channel. Link authentication itself is a global setting for the channel and is enabled/disabled via the serial/modem configuration parameters. ipmi_msg User IPMI Messaging True/False user name and password information will be used for IPMI Messaging. In this case, 'IPMI Messaging' refers to the ability to execute generic IPMI commands that are not associated with a particular payload type. For example, if IPMI Messaging is disabled for a user, but that user is enabled for activating the SOL payload type, then IPMI commands associated with SOL and session management, such as Get SOL Configuration Parameters and Close Session are available, but generic IPMI commands such as Get SEL Time are unavailable.) ipmi_msg privilege_level * callback * user * operator * administrator * proprietary * no_access kwargs - api_host=localhost - api_user=admin - api_pass= - api_port=623 - api_kg=None ''' ret = {'name': name, 'result': False, 'comment': '', 'changes': {}} org_user = __salt__['ipmi.get_user'](uid=uid, channel=channel, **kwargs) change = False if org_user['access']['callback'] != callback: change = True if org_user['access']['link_auth'] != link_auth: change = True if org_user['access']['ipmi_msg'] != ipmi_msg: change = True if org_user['access']['privilege_level'] != privilege_level: change = True if __salt__['ipmi.set_user_password'](uid, mode='test_password', password=password, **kwargs) is False: change = True if change is False: ret['result'] = True ret['comment'] = 'user already present' return ret if __opts__['test']: ret['comment'] = 'would (re)create user' ret['result'] = None ret['changes'] = {'old': org_user, 'new': name} return ret __salt__['ipmi.ensure_user'](uid, name, password, channel, callback, link_auth, ipmi_msg, privilege_level, **kwargs) current_user = __salt__['ipmi.get_user'](uid=uid, channel=channel, **kwargs) ret['comment'] = '(re)created user' ret['result'] = True ret['changes'] = {'old': org_user, 'new': current_user} return ret
def function[user_present, parameter[name, uid, password, channel, callback, link_auth, ipmi_msg, privilege_level]]: constant[ Ensure IPMI user and user privileges. name name of user (limit 16 bytes) uid user id number (1 to 7) password user password (limit 16 bytes) channel ipmi channel defaults to 14 for auto callback User Restricted to Callback False = User Privilege Limit is determined by the User Privilege Limit parameter privilege_level, for both callback and non-callback connections. True = User Privilege Limit is determined by the privilege_level parameter for callback connections, but is restricted to Callback level for non-callback connections. Thus, a user can only initiate a Callback when they 'call in' to the BMC, but once the callback connection has been made, the user could potentially establish a session as an Operator. link_auth User Link authentication True/False user name and password information will be used for link authentication, e.g. PPP CHAP) for the given channel. Link authentication itself is a global setting for the channel and is enabled/disabled via the serial/modem configuration parameters. ipmi_msg User IPMI Messaging True/False user name and password information will be used for IPMI Messaging. In this case, 'IPMI Messaging' refers to the ability to execute generic IPMI commands that are not associated with a particular payload type. For example, if IPMI Messaging is disabled for a user, but that user is enabled for activating the SOL payload type, then IPMI commands associated with SOL and session management, such as Get SOL Configuration Parameters and Close Session are available, but generic IPMI commands such as Get SEL Time are unavailable.) ipmi_msg privilege_level * callback * user * operator * administrator * proprietary * no_access kwargs - api_host=localhost - api_user=admin - api_pass= - api_port=623 - api_kg=None ] variable[ret] assign[=] dictionary[[<ast.Constant object at 0x7da207f9ace0>, <ast.Constant object at 0x7da207f9af20>, <ast.Constant object at 0x7da207f98d60>, <ast.Constant object at 0x7da207f98100>], [<ast.Name object at 0x7da207f987c0>, <ast.Constant object at 0x7da207f9ac20>, <ast.Constant object at 0x7da207f999c0>, <ast.Dict object at 0x7da207f9b2e0>]] variable[org_user] assign[=] call[call[name[__salt__]][constant[ipmi.get_user]], parameter[]] variable[change] assign[=] constant[False] if compare[call[call[name[org_user]][constant[access]]][constant[callback]] not_equal[!=] name[callback]] begin[:] variable[change] assign[=] constant[True] if compare[call[call[name[org_user]][constant[access]]][constant[link_auth]] not_equal[!=] name[link_auth]] begin[:] variable[change] assign[=] constant[True] if compare[call[call[name[org_user]][constant[access]]][constant[ipmi_msg]] not_equal[!=] name[ipmi_msg]] begin[:] variable[change] assign[=] constant[True] if compare[call[call[name[org_user]][constant[access]]][constant[privilege_level]] not_equal[!=] name[privilege_level]] begin[:] variable[change] assign[=] constant[True] if compare[call[call[name[__salt__]][constant[ipmi.set_user_password]], parameter[name[uid]]] is constant[False]] begin[:] variable[change] assign[=] constant[True] if compare[name[change] is constant[False]] begin[:] call[name[ret]][constant[result]] assign[=] constant[True] call[name[ret]][constant[comment]] assign[=] constant[user already present] return[name[ret]] if call[name[__opts__]][constant[test]] begin[:] call[name[ret]][constant[comment]] assign[=] constant[would (re)create user] call[name[ret]][constant[result]] assign[=] constant[None] call[name[ret]][constant[changes]] assign[=] dictionary[[<ast.Constant object at 0x7da2047e81c0>, <ast.Constant object at 0x7da2047eb070>], [<ast.Name object at 0x7da2047e8430>, <ast.Name object at 0x7da2047e8550>]] return[name[ret]] call[call[name[__salt__]][constant[ipmi.ensure_user]], parameter[name[uid], name[name], name[password], name[channel], name[callback], name[link_auth], name[ipmi_msg], name[privilege_level]]] variable[current_user] assign[=] call[call[name[__salt__]][constant[ipmi.get_user]], parameter[]] call[name[ret]][constant[comment]] assign[=] constant[(re)created user] call[name[ret]][constant[result]] assign[=] constant[True] call[name[ret]][constant[changes]] assign[=] dictionary[[<ast.Constant object at 0x7da1b208df00>, <ast.Constant object at 0x7da1b208fb20>], [<ast.Name object at 0x7da1b208ddb0>, <ast.Name object at 0x7da1b208fb50>]] return[name[ret]]
keyword[def] identifier[user_present] ( identifier[name] , identifier[uid] , identifier[password] , identifier[channel] = literal[int] , identifier[callback] = keyword[False] , identifier[link_auth] = keyword[True] , identifier[ipmi_msg] = keyword[True] , identifier[privilege_level] = literal[string] ,** identifier[kwargs] ): literal[string] identifier[ret] ={ literal[string] : identifier[name] , literal[string] : keyword[False] , literal[string] : literal[string] , literal[string] :{}} identifier[org_user] = identifier[__salt__] [ literal[string] ]( identifier[uid] = identifier[uid] , identifier[channel] = identifier[channel] ,** identifier[kwargs] ) identifier[change] = keyword[False] keyword[if] identifier[org_user] [ literal[string] ][ literal[string] ]!= identifier[callback] : identifier[change] = keyword[True] keyword[if] identifier[org_user] [ literal[string] ][ literal[string] ]!= identifier[link_auth] : identifier[change] = keyword[True] keyword[if] identifier[org_user] [ literal[string] ][ literal[string] ]!= identifier[ipmi_msg] : identifier[change] = keyword[True] keyword[if] identifier[org_user] [ literal[string] ][ literal[string] ]!= identifier[privilege_level] : identifier[change] = keyword[True] keyword[if] identifier[__salt__] [ literal[string] ]( identifier[uid] , identifier[mode] = literal[string] , identifier[password] = identifier[password] ,** identifier[kwargs] ) keyword[is] keyword[False] : identifier[change] = keyword[True] keyword[if] identifier[change] keyword[is] keyword[False] : identifier[ret] [ literal[string] ]= keyword[True] identifier[ret] [ literal[string] ]= literal[string] keyword[return] identifier[ret] keyword[if] identifier[__opts__] [ literal[string] ]: identifier[ret] [ literal[string] ]= literal[string] identifier[ret] [ literal[string] ]= keyword[None] identifier[ret] [ literal[string] ]={ literal[string] : identifier[org_user] , literal[string] : identifier[name] } keyword[return] identifier[ret] identifier[__salt__] [ literal[string] ]( identifier[uid] , identifier[name] , identifier[password] , identifier[channel] , identifier[callback] , identifier[link_auth] , identifier[ipmi_msg] , identifier[privilege_level] , ** identifier[kwargs] ) identifier[current_user] = identifier[__salt__] [ literal[string] ]( identifier[uid] = identifier[uid] , identifier[channel] = identifier[channel] ,** identifier[kwargs] ) identifier[ret] [ literal[string] ]= literal[string] identifier[ret] [ literal[string] ]= keyword[True] identifier[ret] [ literal[string] ]={ literal[string] : identifier[org_user] , literal[string] : identifier[current_user] } keyword[return] identifier[ret]
def user_present(name, uid, password, channel=14, callback=False, link_auth=True, ipmi_msg=True, privilege_level='administrator', **kwargs): """ Ensure IPMI user and user privileges. name name of user (limit 16 bytes) uid user id number (1 to 7) password user password (limit 16 bytes) channel ipmi channel defaults to 14 for auto callback User Restricted to Callback False = User Privilege Limit is determined by the User Privilege Limit parameter privilege_level, for both callback and non-callback connections. True = User Privilege Limit is determined by the privilege_level parameter for callback connections, but is restricted to Callback level for non-callback connections. Thus, a user can only initiate a Callback when they 'call in' to the BMC, but once the callback connection has been made, the user could potentially establish a session as an Operator. link_auth User Link authentication True/False user name and password information will be used for link authentication, e.g. PPP CHAP) for the given channel. Link authentication itself is a global setting for the channel and is enabled/disabled via the serial/modem configuration parameters. ipmi_msg User IPMI Messaging True/False user name and password information will be used for IPMI Messaging. In this case, 'IPMI Messaging' refers to the ability to execute generic IPMI commands that are not associated with a particular payload type. For example, if IPMI Messaging is disabled for a user, but that user is enabled for activating the SOL payload type, then IPMI commands associated with SOL and session management, such as Get SOL Configuration Parameters and Close Session are available, but generic IPMI commands such as Get SEL Time are unavailable.) ipmi_msg privilege_level * callback * user * operator * administrator * proprietary * no_access kwargs - api_host=localhost - api_user=admin - api_pass= - api_port=623 - api_kg=None """ ret = {'name': name, 'result': False, 'comment': '', 'changes': {}} org_user = __salt__['ipmi.get_user'](uid=uid, channel=channel, **kwargs) change = False if org_user['access']['callback'] != callback: change = True # depends on [control=['if'], data=[]] if org_user['access']['link_auth'] != link_auth: change = True # depends on [control=['if'], data=[]] if org_user['access']['ipmi_msg'] != ipmi_msg: change = True # depends on [control=['if'], data=[]] if org_user['access']['privilege_level'] != privilege_level: change = True # depends on [control=['if'], data=[]] if __salt__['ipmi.set_user_password'](uid, mode='test_password', password=password, **kwargs) is False: change = True # depends on [control=['if'], data=[]] if change is False: ret['result'] = True ret['comment'] = 'user already present' return ret # depends on [control=['if'], data=[]] if __opts__['test']: ret['comment'] = 'would (re)create user' ret['result'] = None ret['changes'] = {'old': org_user, 'new': name} return ret # depends on [control=['if'], data=[]] __salt__['ipmi.ensure_user'](uid, name, password, channel, callback, link_auth, ipmi_msg, privilege_level, **kwargs) current_user = __salt__['ipmi.get_user'](uid=uid, channel=channel, **kwargs) ret['comment'] = '(re)created user' ret['result'] = True ret['changes'] = {'old': org_user, 'new': current_user} return ret
def by_symbol(symbol, country_code=None): """Get list of possible currencies for symbol; filter by country_code Look for all currencies that use the `symbol`. If there are currencies used in the country of `country_code`, return only those; otherwise return all found currencies. Parameters: symbol: unicode Currency symbol. country_code: Optional[unicode] Iso3166 alpha2 country code. Returns: List[Currency]: Currency objects for `symbol`; filter by country_code. """ res = _data()['symbol'].get(symbol) if res: tmp_res = [] for d in res: if country_code in d.countries: tmp_res += [d] if tmp_res: return tmp_res if country_code is None: return res
def function[by_symbol, parameter[symbol, country_code]]: constant[Get list of possible currencies for symbol; filter by country_code Look for all currencies that use the `symbol`. If there are currencies used in the country of `country_code`, return only those; otherwise return all found currencies. Parameters: symbol: unicode Currency symbol. country_code: Optional[unicode] Iso3166 alpha2 country code. Returns: List[Currency]: Currency objects for `symbol`; filter by country_code. ] variable[res] assign[=] call[call[call[name[_data], parameter[]]][constant[symbol]].get, parameter[name[symbol]]] if name[res] begin[:] variable[tmp_res] assign[=] list[[]] for taget[name[d]] in starred[name[res]] begin[:] if compare[name[country_code] in name[d].countries] begin[:] <ast.AugAssign object at 0x7da1b0b1d810> if name[tmp_res] begin[:] return[name[tmp_res]] if compare[name[country_code] is constant[None]] begin[:] return[name[res]]
keyword[def] identifier[by_symbol] ( identifier[symbol] , identifier[country_code] = keyword[None] ): literal[string] identifier[res] = identifier[_data] ()[ literal[string] ]. identifier[get] ( identifier[symbol] ) keyword[if] identifier[res] : identifier[tmp_res] =[] keyword[for] identifier[d] keyword[in] identifier[res] : keyword[if] identifier[country_code] keyword[in] identifier[d] . identifier[countries] : identifier[tmp_res] +=[ identifier[d] ] keyword[if] identifier[tmp_res] : keyword[return] identifier[tmp_res] keyword[if] identifier[country_code] keyword[is] keyword[None] : keyword[return] identifier[res]
def by_symbol(symbol, country_code=None): """Get list of possible currencies for symbol; filter by country_code Look for all currencies that use the `symbol`. If there are currencies used in the country of `country_code`, return only those; otherwise return all found currencies. Parameters: symbol: unicode Currency symbol. country_code: Optional[unicode] Iso3166 alpha2 country code. Returns: List[Currency]: Currency objects for `symbol`; filter by country_code. """ res = _data()['symbol'].get(symbol) if res: tmp_res = [] for d in res: if country_code in d.countries: tmp_res += [d] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['d']] if tmp_res: return tmp_res # depends on [control=['if'], data=[]] if country_code is None: return res # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
def get_collections(module_ident, plpy): """Get all the collections that the module is part of.""" # Make sure to only return one match per collection and only if it is the # latest collection (which may not be the same as what is in # latest_modules) plan = plpy.prepare(''' WITH RECURSIVE t(node, parent, path, document) AS ( SELECT tr.nodeid, tr.parent_id, ARRAY[tr.nodeid], tr.documentid FROM trees tr WHERE tr.documentid = $1 and tr.is_collated = 'False' UNION ALL SELECT c.nodeid, c.parent_id, path || ARRAY[c.nodeid], c.documentid FROM trees c JOIN t ON (c.nodeid = t.parent) WHERE not c.nodeid = ANY(t.path) ), latest(module_ident) AS ( SELECT module_ident FROM ( SELECT m.module_ident, m.revised, MAX(m.revised) OVER (PARTITION BY m.uuid) as latest FROM modules m where m.portal_type = 'Collection' ) r WHERE r.revised = r.latest ) SELECT module_ident FROM t, latest WHERE latest.module_ident = t.document ''', ('integer',)) for i in plpy.execute(plan, (module_ident,)): yield i['module_ident']
def function[get_collections, parameter[module_ident, plpy]]: constant[Get all the collections that the module is part of.] variable[plan] assign[=] call[name[plpy].prepare, parameter[constant[ WITH RECURSIVE t(node, parent, path, document) AS ( SELECT tr.nodeid, tr.parent_id, ARRAY[tr.nodeid], tr.documentid FROM trees tr WHERE tr.documentid = $1 and tr.is_collated = 'False' UNION ALL SELECT c.nodeid, c.parent_id, path || ARRAY[c.nodeid], c.documentid FROM trees c JOIN t ON (c.nodeid = t.parent) WHERE not c.nodeid = ANY(t.path) ), latest(module_ident) AS ( SELECT module_ident FROM ( SELECT m.module_ident, m.revised, MAX(m.revised) OVER (PARTITION BY m.uuid) as latest FROM modules m where m.portal_type = 'Collection' ) r WHERE r.revised = r.latest ) SELECT module_ident FROM t, latest WHERE latest.module_ident = t.document ], tuple[[<ast.Constant object at 0x7da1b1832200>]]]] for taget[name[i]] in starred[call[name[plpy].execute, parameter[name[plan], tuple[[<ast.Name object at 0x7da1b1832e60>]]]]] begin[:] <ast.Yield object at 0x7da1b1832e90>
keyword[def] identifier[get_collections] ( identifier[module_ident] , identifier[plpy] ): literal[string] identifier[plan] = identifier[plpy] . identifier[prepare] ( literal[string] ,( literal[string] ,)) keyword[for] identifier[i] keyword[in] identifier[plpy] . identifier[execute] ( identifier[plan] ,( identifier[module_ident] ,)): keyword[yield] identifier[i] [ literal[string] ]
def get_collections(module_ident, plpy): """Get all the collections that the module is part of.""" # Make sure to only return one match per collection and only if it is the # latest collection (which may not be the same as what is in # latest_modules) plan = plpy.prepare("\nWITH RECURSIVE t(node, parent, path, document) AS (\n SELECT tr.nodeid, tr.parent_id, ARRAY[tr.nodeid], tr.documentid\n FROM trees tr\n WHERE tr.documentid = $1 and tr.is_collated = 'False'\n UNION ALL\n SELECT c.nodeid, c.parent_id, path || ARRAY[c.nodeid], c.documentid\n FROM trees c JOIN t ON (c.nodeid = t.parent)\n WHERE not c.nodeid = ANY(t.path)\n ),\n latest(module_ident) AS (\n SELECT module_ident FROM (\n SELECT m.module_ident, m.revised,\n MAX(m.revised) OVER (PARTITION BY m.uuid) as latest\n FROM modules m where m.portal_type = 'Collection'\n ) r\n WHERE r.revised = r.latest\n )\n SELECT module_ident FROM t, latest\n WHERE latest.module_ident = t.document\n ", ('integer',)) for i in plpy.execute(plan, (module_ident,)): yield i['module_ident'] # depends on [control=['for'], data=['i']]
def get_image_set(self): """ Obtain existing ImageSet if `pk` is specified, otherwise create a new ImageSet for the user. """ image_set_pk = self.kwargs.get("pk", None) if image_set_pk is None: return self.request.user.image_sets.create() return get_object_or_404(self.get_queryset(), pk=image_set_pk)
def function[get_image_set, parameter[self]]: constant[ Obtain existing ImageSet if `pk` is specified, otherwise create a new ImageSet for the user. ] variable[image_set_pk] assign[=] call[name[self].kwargs.get, parameter[constant[pk], constant[None]]] if compare[name[image_set_pk] is constant[None]] begin[:] return[call[name[self].request.user.image_sets.create, parameter[]]] return[call[name[get_object_or_404], parameter[call[name[self].get_queryset, parameter[]]]]]
keyword[def] identifier[get_image_set] ( identifier[self] ): literal[string] identifier[image_set_pk] = identifier[self] . identifier[kwargs] . identifier[get] ( literal[string] , keyword[None] ) keyword[if] identifier[image_set_pk] keyword[is] keyword[None] : keyword[return] identifier[self] . identifier[request] . identifier[user] . identifier[image_sets] . identifier[create] () keyword[return] identifier[get_object_or_404] ( identifier[self] . identifier[get_queryset] (), identifier[pk] = identifier[image_set_pk] )
def get_image_set(self): """ Obtain existing ImageSet if `pk` is specified, otherwise create a new ImageSet for the user. """ image_set_pk = self.kwargs.get('pk', None) if image_set_pk is None: return self.request.user.image_sets.create() # depends on [control=['if'], data=[]] return get_object_or_404(self.get_queryset(), pk=image_set_pk)
def strip_html(text): """ Get rid of ugly twitter html """ def reply_to(text): replying_to = [] split_text = text.split() for index, token in enumerate(split_text): if token.startswith('@'): replying_to.append(token[1:]) else: message = split_text[index:] break rply_msg = "" if len(replying_to) > 0: rply_msg = "Replying to " for token in replying_to[:-1]: rply_msg += token+"," if len(replying_to)>1: rply_msg += 'and ' rply_msg += replying_to[-1]+". " return rply_msg + " ".join(message) text = reply_to(text) text = text.replace('@', ' ') return " ".join([token for token in text.split() if ('http:' not in token) and ('https:' not in token)])
def function[strip_html, parameter[text]]: constant[ Get rid of ugly twitter html ] def function[reply_to, parameter[text]]: variable[replying_to] assign[=] list[[]] variable[split_text] assign[=] call[name[text].split, parameter[]] for taget[tuple[[<ast.Name object at 0x7da1b0ea1540>, <ast.Name object at 0x7da1b0ea1930>]]] in starred[call[name[enumerate], parameter[name[split_text]]]] begin[:] if call[name[token].startswith, parameter[constant[@]]] begin[:] call[name[replying_to].append, parameter[call[name[token]][<ast.Slice object at 0x7da1b0ea0880>]]] variable[rply_msg] assign[=] constant[] if compare[call[name[len], parameter[name[replying_to]]] greater[>] constant[0]] begin[:] variable[rply_msg] assign[=] constant[Replying to ] for taget[name[token]] in starred[call[name[replying_to]][<ast.Slice object at 0x7da1b0eb82b0>]] begin[:] <ast.AugAssign object at 0x7da1b0eb9a20> if compare[call[name[len], parameter[name[replying_to]]] greater[>] constant[1]] begin[:] <ast.AugAssign object at 0x7da1b0ebfa90> <ast.AugAssign object at 0x7da1b0ebf610> return[binary_operation[name[rply_msg] + call[constant[ ].join, parameter[name[message]]]]] variable[text] assign[=] call[name[reply_to], parameter[name[text]]] variable[text] assign[=] call[name[text].replace, parameter[constant[@], constant[ ]]] return[call[constant[ ].join, parameter[<ast.ListComp object at 0x7da1b0ebe5c0>]]]
keyword[def] identifier[strip_html] ( identifier[text] ): literal[string] keyword[def] identifier[reply_to] ( identifier[text] ): identifier[replying_to] =[] identifier[split_text] = identifier[text] . identifier[split] () keyword[for] identifier[index] , identifier[token] keyword[in] identifier[enumerate] ( identifier[split_text] ): keyword[if] identifier[token] . identifier[startswith] ( literal[string] ): identifier[replying_to] . identifier[append] ( identifier[token] [ literal[int] :]) keyword[else] : identifier[message] = identifier[split_text] [ identifier[index] :] keyword[break] identifier[rply_msg] = literal[string] keyword[if] identifier[len] ( identifier[replying_to] )> literal[int] : identifier[rply_msg] = literal[string] keyword[for] identifier[token] keyword[in] identifier[replying_to] [:- literal[int] ]: identifier[rply_msg] += identifier[token] + literal[string] keyword[if] identifier[len] ( identifier[replying_to] )> literal[int] : identifier[rply_msg] += literal[string] identifier[rply_msg] += identifier[replying_to] [- literal[int] ]+ literal[string] keyword[return] identifier[rply_msg] + literal[string] . identifier[join] ( identifier[message] ) identifier[text] = identifier[reply_to] ( identifier[text] ) identifier[text] = identifier[text] . identifier[replace] ( literal[string] , literal[string] ) keyword[return] literal[string] . identifier[join] ([ identifier[token] keyword[for] identifier[token] keyword[in] identifier[text] . identifier[split] () keyword[if] ( literal[string] keyword[not] keyword[in] identifier[token] ) keyword[and] ( literal[string] keyword[not] keyword[in] identifier[token] )])
def strip_html(text): """ Get rid of ugly twitter html """ def reply_to(text): replying_to = [] split_text = text.split() for (index, token) in enumerate(split_text): if token.startswith('@'): replying_to.append(token[1:]) # depends on [control=['if'], data=[]] else: message = split_text[index:] break # depends on [control=['for'], data=[]] rply_msg = '' if len(replying_to) > 0: rply_msg = 'Replying to ' for token in replying_to[:-1]: rply_msg += token + ',' # depends on [control=['for'], data=['token']] if len(replying_to) > 1: rply_msg += 'and ' # depends on [control=['if'], data=[]] rply_msg += replying_to[-1] + '. ' # depends on [control=['if'], data=[]] return rply_msg + ' '.join(message) text = reply_to(text) text = text.replace('@', ' ') return ' '.join([token for token in text.split() if 'http:' not in token and 'https:' not in token])
def login(self, **kwargs): """ Login abstract method with default implementation. :param kwargs: parameters :return: SdkResponse """ assert self.LOGIN_URL_PATH is not None render_name = kwargs.pop("render", "json") render = get_renderer(render_name) params = parse_args(**kwargs) return self.post(self.LOGIN_URL_PATH, body_params=params, render=render)
def function[login, parameter[self]]: constant[ Login abstract method with default implementation. :param kwargs: parameters :return: SdkResponse ] assert[compare[name[self].LOGIN_URL_PATH is_not constant[None]]] variable[render_name] assign[=] call[name[kwargs].pop, parameter[constant[render], constant[json]]] variable[render] assign[=] call[name[get_renderer], parameter[name[render_name]]] variable[params] assign[=] call[name[parse_args], parameter[]] return[call[name[self].post, parameter[name[self].LOGIN_URL_PATH]]]
keyword[def] identifier[login] ( identifier[self] ,** identifier[kwargs] ): literal[string] keyword[assert] identifier[self] . identifier[LOGIN_URL_PATH] keyword[is] keyword[not] keyword[None] identifier[render_name] = identifier[kwargs] . identifier[pop] ( literal[string] , literal[string] ) identifier[render] = identifier[get_renderer] ( identifier[render_name] ) identifier[params] = identifier[parse_args] (** identifier[kwargs] ) keyword[return] identifier[self] . identifier[post] ( identifier[self] . identifier[LOGIN_URL_PATH] , identifier[body_params] = identifier[params] , identifier[render] = identifier[render] )
def login(self, **kwargs): """ Login abstract method with default implementation. :param kwargs: parameters :return: SdkResponse """ assert self.LOGIN_URL_PATH is not None render_name = kwargs.pop('render', 'json') render = get_renderer(render_name) params = parse_args(**kwargs) return self.post(self.LOGIN_URL_PATH, body_params=params, render=render)
def ang2pix(nside, theta, phi): r"""Convert angle :math:`\theta` :math:`\phi` to pixel. This is translated from chealpix.c; but refer to Section 4.1 of http://adsabs.harvard.edu/abs/2005ApJ...622..759G """ nside, theta, phi = numpy.lib.stride_tricks.broadcast_arrays(nside, theta, phi) def equatorial(nside, tt, z): t1 = nside * (0.5 + tt) t2 = nside * z * 0.75 jp = (t1 - t2).astype('i8') jm = (t1 + t2).astype('i8') ir = nside + 1 + jp - jm # in {1, 2n + 1} kshift = 1 - (ir & 1) # kshift=1 if ir even, 0 odd ip = (jp + jm - nside + kshift + 1) // 2 # in {0, 4n - 1} ip = ip % (4 * nside) return nside * (nside - 1) * 2 + (ir - 1) * 4 * nside + ip def polecaps(nside, tt, z, s): tp = tt - numpy.floor(tt) za = numpy.abs(z) tmp = nside * s / ((1 + za) / 3) ** 0.5 mp = za > 0.99 tmp[mp] = nside[mp] * (3 *(1-za[mp])) ** 0.5 jp = (tp * tmp).astype('i8') jm = ((1 - tp) * tmp).astype('i8') ir = jp + jm + 1 ip = (tt * ir).astype('i8') ip = ip % (4 * ir) r1 = 2 * ir * (ir - 1) r2 = 2 * ir * (ir + 1) r = numpy.empty_like(r1) r[z > 0] = r1[z > 0] + ip[z > 0] r[z < 0] = 12 * nside[z < 0] * nside[z < 0] - r2[z < 0] + ip[z < 0] return r z = numpy.cos(theta) s = numpy.sin(theta) tt = (phi / (0.5 * numpy.pi) ) % 4 # in [0, 4] result = numpy.zeros(z.shape, dtype='i8') mask = (z < 2. / 3) & (z > -2. / 3) result[mask] = equatorial(nside[mask], tt[mask], z[mask]) result[~mask] = polecaps(nside[~mask], tt[~mask], z[~mask], s[~mask]) return result
def function[ang2pix, parameter[nside, theta, phi]]: constant[Convert angle :math:`\theta` :math:`\phi` to pixel. This is translated from chealpix.c; but refer to Section 4.1 of http://adsabs.harvard.edu/abs/2005ApJ...622..759G ] <ast.Tuple object at 0x7da20c991b10> assign[=] call[name[numpy].lib.stride_tricks.broadcast_arrays, parameter[name[nside], name[theta], name[phi]]] def function[equatorial, parameter[nside, tt, z]]: variable[t1] assign[=] binary_operation[name[nside] * binary_operation[constant[0.5] + name[tt]]] variable[t2] assign[=] binary_operation[binary_operation[name[nside] * name[z]] * constant[0.75]] variable[jp] assign[=] call[binary_operation[name[t1] - name[t2]].astype, parameter[constant[i8]]] variable[jm] assign[=] call[binary_operation[name[t1] + name[t2]].astype, parameter[constant[i8]]] variable[ir] assign[=] binary_operation[binary_operation[binary_operation[name[nside] + constant[1]] + name[jp]] - name[jm]] variable[kshift] assign[=] binary_operation[constant[1] - binary_operation[name[ir] <ast.BitAnd object at 0x7da2590d6b60> constant[1]]] variable[ip] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[name[jp] + name[jm]] - name[nside]] + name[kshift]] + constant[1]] <ast.FloorDiv object at 0x7da2590d6bc0> constant[2]] variable[ip] assign[=] binary_operation[name[ip] <ast.Mod object at 0x7da2590d6920> binary_operation[constant[4] * name[nside]]] return[binary_operation[binary_operation[binary_operation[binary_operation[name[nside] * binary_operation[name[nside] - constant[1]]] * constant[2]] + binary_operation[binary_operation[binary_operation[name[ir] - constant[1]] * constant[4]] * name[nside]]] + name[ip]]] def function[polecaps, parameter[nside, tt, z, s]]: variable[tp] assign[=] binary_operation[name[tt] - call[name[numpy].floor, parameter[name[tt]]]] variable[za] assign[=] call[name[numpy].abs, parameter[name[z]]] variable[tmp] assign[=] binary_operation[binary_operation[name[nside] * name[s]] / binary_operation[binary_operation[binary_operation[constant[1] + name[za]] / constant[3]] ** constant[0.5]]] variable[mp] assign[=] compare[name[za] greater[>] constant[0.99]] call[name[tmp]][name[mp]] assign[=] binary_operation[call[name[nside]][name[mp]] * binary_operation[binary_operation[constant[3] * binary_operation[constant[1] - call[name[za]][name[mp]]]] ** constant[0.5]]] variable[jp] assign[=] call[binary_operation[name[tp] * name[tmp]].astype, parameter[constant[i8]]] variable[jm] assign[=] call[binary_operation[binary_operation[constant[1] - name[tp]] * name[tmp]].astype, parameter[constant[i8]]] variable[ir] assign[=] binary_operation[binary_operation[name[jp] + name[jm]] + constant[1]] variable[ip] assign[=] call[binary_operation[name[tt] * name[ir]].astype, parameter[constant[i8]]] variable[ip] assign[=] binary_operation[name[ip] <ast.Mod object at 0x7da2590d6920> binary_operation[constant[4] * name[ir]]] variable[r1] assign[=] binary_operation[binary_operation[constant[2] * name[ir]] * binary_operation[name[ir] - constant[1]]] variable[r2] assign[=] binary_operation[binary_operation[constant[2] * name[ir]] * binary_operation[name[ir] + constant[1]]] variable[r] assign[=] call[name[numpy].empty_like, parameter[name[r1]]] call[name[r]][compare[name[z] greater[>] constant[0]]] assign[=] binary_operation[call[name[r1]][compare[name[z] greater[>] constant[0]]] + call[name[ip]][compare[name[z] greater[>] constant[0]]]] call[name[r]][compare[name[z] less[<] constant[0]]] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[constant[12] * call[name[nside]][compare[name[z] less[<] constant[0]]]] * call[name[nside]][compare[name[z] less[<] constant[0]]]] - call[name[r2]][compare[name[z] less[<] constant[0]]]] + call[name[ip]][compare[name[z] less[<] constant[0]]]] return[name[r]] variable[z] assign[=] call[name[numpy].cos, parameter[name[theta]]] variable[s] assign[=] call[name[numpy].sin, parameter[name[theta]]] variable[tt] assign[=] binary_operation[binary_operation[name[phi] / binary_operation[constant[0.5] * name[numpy].pi]] <ast.Mod object at 0x7da2590d6920> constant[4]] variable[result] assign[=] call[name[numpy].zeros, parameter[name[z].shape]] variable[mask] assign[=] binary_operation[compare[name[z] less[<] binary_operation[constant[2.0] / constant[3]]] <ast.BitAnd object at 0x7da2590d6b60> compare[name[z] greater[>] binary_operation[<ast.UnaryOp object at 0x7da20c991e70> / constant[3]]]] call[name[result]][name[mask]] assign[=] call[name[equatorial], parameter[call[name[nside]][name[mask]], call[name[tt]][name[mask]], call[name[z]][name[mask]]]] call[name[result]][<ast.UnaryOp object at 0x7da18f812b00>] assign[=] call[name[polecaps], parameter[call[name[nside]][<ast.UnaryOp object at 0x7da18f811000>], call[name[tt]][<ast.UnaryOp object at 0x7da18f8108e0>], call[name[z]][<ast.UnaryOp object at 0x7da18f810d60>], call[name[s]][<ast.UnaryOp object at 0x7da18f813a60>]]] return[name[result]]
keyword[def] identifier[ang2pix] ( identifier[nside] , identifier[theta] , identifier[phi] ): literal[string] identifier[nside] , identifier[theta] , identifier[phi] = identifier[numpy] . identifier[lib] . identifier[stride_tricks] . identifier[broadcast_arrays] ( identifier[nside] , identifier[theta] , identifier[phi] ) keyword[def] identifier[equatorial] ( identifier[nside] , identifier[tt] , identifier[z] ): identifier[t1] = identifier[nside] *( literal[int] + identifier[tt] ) identifier[t2] = identifier[nside] * identifier[z] * literal[int] identifier[jp] =( identifier[t1] - identifier[t2] ). identifier[astype] ( literal[string] ) identifier[jm] =( identifier[t1] + identifier[t2] ). identifier[astype] ( literal[string] ) identifier[ir] = identifier[nside] + literal[int] + identifier[jp] - identifier[jm] identifier[kshift] = literal[int] -( identifier[ir] & literal[int] ) identifier[ip] =( identifier[jp] + identifier[jm] - identifier[nside] + identifier[kshift] + literal[int] )// literal[int] identifier[ip] = identifier[ip] %( literal[int] * identifier[nside] ) keyword[return] identifier[nside] *( identifier[nside] - literal[int] )* literal[int] +( identifier[ir] - literal[int] )* literal[int] * identifier[nside] + identifier[ip] keyword[def] identifier[polecaps] ( identifier[nside] , identifier[tt] , identifier[z] , identifier[s] ): identifier[tp] = identifier[tt] - identifier[numpy] . identifier[floor] ( identifier[tt] ) identifier[za] = identifier[numpy] . identifier[abs] ( identifier[z] ) identifier[tmp] = identifier[nside] * identifier[s] /(( literal[int] + identifier[za] )/ literal[int] )** literal[int] identifier[mp] = identifier[za] > literal[int] identifier[tmp] [ identifier[mp] ]= identifier[nside] [ identifier[mp] ]*( literal[int] *( literal[int] - identifier[za] [ identifier[mp] ]))** literal[int] identifier[jp] =( identifier[tp] * identifier[tmp] ). identifier[astype] ( literal[string] ) identifier[jm] =(( literal[int] - identifier[tp] )* identifier[tmp] ). identifier[astype] ( literal[string] ) identifier[ir] = identifier[jp] + identifier[jm] + literal[int] identifier[ip] =( identifier[tt] * identifier[ir] ). identifier[astype] ( literal[string] ) identifier[ip] = identifier[ip] %( literal[int] * identifier[ir] ) identifier[r1] = literal[int] * identifier[ir] *( identifier[ir] - literal[int] ) identifier[r2] = literal[int] * identifier[ir] *( identifier[ir] + literal[int] ) identifier[r] = identifier[numpy] . identifier[empty_like] ( identifier[r1] ) identifier[r] [ identifier[z] > literal[int] ]= identifier[r1] [ identifier[z] > literal[int] ]+ identifier[ip] [ identifier[z] > literal[int] ] identifier[r] [ identifier[z] < literal[int] ]= literal[int] * identifier[nside] [ identifier[z] < literal[int] ]* identifier[nside] [ identifier[z] < literal[int] ]- identifier[r2] [ identifier[z] < literal[int] ]+ identifier[ip] [ identifier[z] < literal[int] ] keyword[return] identifier[r] identifier[z] = identifier[numpy] . identifier[cos] ( identifier[theta] ) identifier[s] = identifier[numpy] . identifier[sin] ( identifier[theta] ) identifier[tt] =( identifier[phi] /( literal[int] * identifier[numpy] . identifier[pi] ))% literal[int] identifier[result] = identifier[numpy] . identifier[zeros] ( identifier[z] . identifier[shape] , identifier[dtype] = literal[string] ) identifier[mask] =( identifier[z] < literal[int] / literal[int] )&( identifier[z] >- literal[int] / literal[int] ) identifier[result] [ identifier[mask] ]= identifier[equatorial] ( identifier[nside] [ identifier[mask] ], identifier[tt] [ identifier[mask] ], identifier[z] [ identifier[mask] ]) identifier[result] [~ identifier[mask] ]= identifier[polecaps] ( identifier[nside] [~ identifier[mask] ], identifier[tt] [~ identifier[mask] ], identifier[z] [~ identifier[mask] ], identifier[s] [~ identifier[mask] ]) keyword[return] identifier[result]
def ang2pix(nside, theta, phi): """Convert angle :math:`\\theta` :math:`\\phi` to pixel. This is translated from chealpix.c; but refer to Section 4.1 of http://adsabs.harvard.edu/abs/2005ApJ...622..759G """ (nside, theta, phi) = numpy.lib.stride_tricks.broadcast_arrays(nside, theta, phi) def equatorial(nside, tt, z): t1 = nside * (0.5 + tt) t2 = nside * z * 0.75 jp = (t1 - t2).astype('i8') jm = (t1 + t2).astype('i8') ir = nside + 1 + jp - jm # in {1, 2n + 1} kshift = 1 - (ir & 1) # kshift=1 if ir even, 0 odd ip = (jp + jm - nside + kshift + 1) // 2 # in {0, 4n - 1} ip = ip % (4 * nside) return nside * (nside - 1) * 2 + (ir - 1) * 4 * nside + ip def polecaps(nside, tt, z, s): tp = tt - numpy.floor(tt) za = numpy.abs(z) tmp = nside * s / ((1 + za) / 3) ** 0.5 mp = za > 0.99 tmp[mp] = nside[mp] * (3 * (1 - za[mp])) ** 0.5 jp = (tp * tmp).astype('i8') jm = ((1 - tp) * tmp).astype('i8') ir = jp + jm + 1 ip = (tt * ir).astype('i8') ip = ip % (4 * ir) r1 = 2 * ir * (ir - 1) r2 = 2 * ir * (ir + 1) r = numpy.empty_like(r1) r[z > 0] = r1[z > 0] + ip[z > 0] r[z < 0] = 12 * nside[z < 0] * nside[z < 0] - r2[z < 0] + ip[z < 0] return r z = numpy.cos(theta) s = numpy.sin(theta) tt = phi / (0.5 * numpy.pi) % 4 # in [0, 4] result = numpy.zeros(z.shape, dtype='i8') mask = (z < 2.0 / 3) & (z > -2.0 / 3) result[mask] = equatorial(nside[mask], tt[mask], z[mask]) result[~mask] = polecaps(nside[~mask], tt[~mask], z[~mask], s[~mask]) return result
def vlan_classifier_rule_ruleid(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") vlan = ET.SubElement(config, "vlan", xmlns="urn:brocade.com:mgmt:brocade-vlan") classifier = ET.SubElement(vlan, "classifier") rule = ET.SubElement(classifier, "rule") ruleid = ET.SubElement(rule, "ruleid") ruleid.text = kwargs.pop('ruleid') callback = kwargs.pop('callback', self._callback) return callback(config)
def function[vlan_classifier_rule_ruleid, parameter[self]]: constant[Auto Generated Code ] variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]] variable[vlan] assign[=] call[name[ET].SubElement, parameter[name[config], constant[vlan]]] variable[classifier] assign[=] call[name[ET].SubElement, parameter[name[vlan], constant[classifier]]] variable[rule] assign[=] call[name[ET].SubElement, parameter[name[classifier], constant[rule]]] variable[ruleid] assign[=] call[name[ET].SubElement, parameter[name[rule], constant[ruleid]]] name[ruleid].text assign[=] call[name[kwargs].pop, parameter[constant[ruleid]]] variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]] return[call[name[callback], parameter[name[config]]]]
keyword[def] identifier[vlan_classifier_rule_ruleid] ( identifier[self] ,** identifier[kwargs] ): literal[string] identifier[config] = identifier[ET] . identifier[Element] ( literal[string] ) identifier[vlan] = identifier[ET] . identifier[SubElement] ( identifier[config] , literal[string] , identifier[xmlns] = literal[string] ) identifier[classifier] = identifier[ET] . identifier[SubElement] ( identifier[vlan] , literal[string] ) identifier[rule] = identifier[ET] . identifier[SubElement] ( identifier[classifier] , literal[string] ) identifier[ruleid] = identifier[ET] . identifier[SubElement] ( identifier[rule] , literal[string] ) identifier[ruleid] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] ) identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] ) keyword[return] identifier[callback] ( identifier[config] )
def vlan_classifier_rule_ruleid(self, **kwargs): """Auto Generated Code """ config = ET.Element('config') vlan = ET.SubElement(config, 'vlan', xmlns='urn:brocade.com:mgmt:brocade-vlan') classifier = ET.SubElement(vlan, 'classifier') rule = ET.SubElement(classifier, 'rule') ruleid = ET.SubElement(rule, 'ruleid') ruleid.text = kwargs.pop('ruleid') callback = kwargs.pop('callback', self._callback) return callback(config)
def stop_gradient(input_layer): """Cuts off the gradient at this point. This works on both sequence and regular Pretty Tensors. Args: input_layer: The input. Returns: A new Pretty Tensor of the same type with stop_gradient applied. """ if input_layer.is_sequence(): result = [tf.stop_gradient(t) for t in input_layer.sequence] return input_layer.with_sequence(result) else: return tf.stop_gradient(input_layer)
def function[stop_gradient, parameter[input_layer]]: constant[Cuts off the gradient at this point. This works on both sequence and regular Pretty Tensors. Args: input_layer: The input. Returns: A new Pretty Tensor of the same type with stop_gradient applied. ] if call[name[input_layer].is_sequence, parameter[]] begin[:] variable[result] assign[=] <ast.ListComp object at 0x7da20c6c5a20> return[call[name[input_layer].with_sequence, parameter[name[result]]]]
keyword[def] identifier[stop_gradient] ( identifier[input_layer] ): literal[string] keyword[if] identifier[input_layer] . identifier[is_sequence] (): identifier[result] =[ identifier[tf] . identifier[stop_gradient] ( identifier[t] ) keyword[for] identifier[t] keyword[in] identifier[input_layer] . identifier[sequence] ] keyword[return] identifier[input_layer] . identifier[with_sequence] ( identifier[result] ) keyword[else] : keyword[return] identifier[tf] . identifier[stop_gradient] ( identifier[input_layer] )
def stop_gradient(input_layer): """Cuts off the gradient at this point. This works on both sequence and regular Pretty Tensors. Args: input_layer: The input. Returns: A new Pretty Tensor of the same type with stop_gradient applied. """ if input_layer.is_sequence(): result = [tf.stop_gradient(t) for t in input_layer.sequence] return input_layer.with_sequence(result) # depends on [control=['if'], data=[]] else: return tf.stop_gradient(input_layer)
def summarize_mean_in_nats_and_bits(inputs, units, name, nats_name_scope="nats", bits_name_scope="bits_per_dim"): """Summarize the mean of a tensor in nats and bits per unit. Args: inputs: A tensor of values measured in nats. units: The units of the tensor with which to compute the mean bits per unit. name: The name of the tensor. nats_name_scope: The name scope of the nats summary. bits_name_scope: The name scope of the bits summary. """ mean = tf.reduce_mean(input_tensor=inputs) with tf.compat.v1.name_scope(nats_name_scope): tf.compat.v2.summary.scalar( name, mean, step=tf.compat.v1.train.get_or_create_global_step()) with tf.compat.v1.name_scope(bits_name_scope): tf.compat.v2.summary.scalar( name, mean / units / tf.math.log(2.), step=tf.compat.v1.train.get_or_create_global_step())
def function[summarize_mean_in_nats_and_bits, parameter[inputs, units, name, nats_name_scope, bits_name_scope]]: constant[Summarize the mean of a tensor in nats and bits per unit. Args: inputs: A tensor of values measured in nats. units: The units of the tensor with which to compute the mean bits per unit. name: The name of the tensor. nats_name_scope: The name scope of the nats summary. bits_name_scope: The name scope of the bits summary. ] variable[mean] assign[=] call[name[tf].reduce_mean, parameter[]] with call[name[tf].compat.v1.name_scope, parameter[name[nats_name_scope]]] begin[:] call[name[tf].compat.v2.summary.scalar, parameter[name[name], name[mean]]] with call[name[tf].compat.v1.name_scope, parameter[name[bits_name_scope]]] begin[:] call[name[tf].compat.v2.summary.scalar, parameter[name[name], binary_operation[binary_operation[name[mean] / name[units]] / call[name[tf].math.log, parameter[constant[2.0]]]]]]
keyword[def] identifier[summarize_mean_in_nats_and_bits] ( identifier[inputs] , identifier[units] , identifier[name] , identifier[nats_name_scope] = literal[string] , identifier[bits_name_scope] = literal[string] ): literal[string] identifier[mean] = identifier[tf] . identifier[reduce_mean] ( identifier[input_tensor] = identifier[inputs] ) keyword[with] identifier[tf] . identifier[compat] . identifier[v1] . identifier[name_scope] ( identifier[nats_name_scope] ): identifier[tf] . identifier[compat] . identifier[v2] . identifier[summary] . identifier[scalar] ( identifier[name] , identifier[mean] , identifier[step] = identifier[tf] . identifier[compat] . identifier[v1] . identifier[train] . identifier[get_or_create_global_step] ()) keyword[with] identifier[tf] . identifier[compat] . identifier[v1] . identifier[name_scope] ( identifier[bits_name_scope] ): identifier[tf] . identifier[compat] . identifier[v2] . identifier[summary] . identifier[scalar] ( identifier[name] , identifier[mean] / identifier[units] / identifier[tf] . identifier[math] . identifier[log] ( literal[int] ), identifier[step] = identifier[tf] . identifier[compat] . identifier[v1] . identifier[train] . identifier[get_or_create_global_step] ())
def summarize_mean_in_nats_and_bits(inputs, units, name, nats_name_scope='nats', bits_name_scope='bits_per_dim'): """Summarize the mean of a tensor in nats and bits per unit. Args: inputs: A tensor of values measured in nats. units: The units of the tensor with which to compute the mean bits per unit. name: The name of the tensor. nats_name_scope: The name scope of the nats summary. bits_name_scope: The name scope of the bits summary. """ mean = tf.reduce_mean(input_tensor=inputs) with tf.compat.v1.name_scope(nats_name_scope): tf.compat.v2.summary.scalar(name, mean, step=tf.compat.v1.train.get_or_create_global_step()) # depends on [control=['with'], data=[]] with tf.compat.v1.name_scope(bits_name_scope): tf.compat.v2.summary.scalar(name, mean / units / tf.math.log(2.0), step=tf.compat.v1.train.get_or_create_global_step()) # depends on [control=['with'], data=[]]
def clean(self): """ Validates the current instance. """ super().clean() if ( (self.user is None and not self.anonymous_user) or (self.user and self.anonymous_user) ): raise ValidationError( _('A permission should target either a user or an anonymous user'), )
def function[clean, parameter[self]]: constant[ Validates the current instance. ] call[call[name[super], parameter[]].clean, parameter[]] if <ast.BoolOp object at 0x7da20c7c81c0> begin[:] <ast.Raise object at 0x7da20c7c8070>
keyword[def] identifier[clean] ( identifier[self] ): literal[string] identifier[super] (). identifier[clean] () keyword[if] ( ( identifier[self] . identifier[user] keyword[is] keyword[None] keyword[and] keyword[not] identifier[self] . identifier[anonymous_user] ) keyword[or] ( identifier[self] . identifier[user] keyword[and] identifier[self] . identifier[anonymous_user] ) ): keyword[raise] identifier[ValidationError] ( identifier[_] ( literal[string] ), )
def clean(self): """ Validates the current instance. """ super().clean() if self.user is None and (not self.anonymous_user) or (self.user and self.anonymous_user): raise ValidationError(_('A permission should target either a user or an anonymous user')) # depends on [control=['if'], data=[]]
def dump_bulk(cls, parent=None, keep_ids=True): """Dumps a tree branch to a python data structure.""" serializable_cls = cls._get_serializable_model() if ( parent and serializable_cls != cls and parent.__class__ != serializable_cls ): parent = serializable_cls.objects.get(pk=parent.pk) # a list of nodes: not really a queryset, but it works objs = serializable_cls.get_tree(parent) ret, lnk = [], {} for node, pyobj in zip(objs, serializers.serialize('python', objs)): depth = node.get_depth() # django's serializer stores the attributes in 'fields' fields = pyobj['fields'] del fields['parent'] # non-sorted trees have this if 'sib_order' in fields: del fields['sib_order'] if 'id' in fields: del fields['id'] newobj = {'data': fields} if keep_ids: newobj['id'] = pyobj['pk'] if (not parent and depth == 1) or\ (parent and depth == parent.get_depth()): ret.append(newobj) else: parentobj = lnk[node.parent_id] if 'children' not in parentobj: parentobj['children'] = [] parentobj['children'].append(newobj) lnk[node.pk] = newobj return ret
def function[dump_bulk, parameter[cls, parent, keep_ids]]: constant[Dumps a tree branch to a python data structure.] variable[serializable_cls] assign[=] call[name[cls]._get_serializable_model, parameter[]] if <ast.BoolOp object at 0x7da20e961ed0> begin[:] variable[parent] assign[=] call[name[serializable_cls].objects.get, parameter[]] variable[objs] assign[=] call[name[serializable_cls].get_tree, parameter[name[parent]]] <ast.Tuple object at 0x7da20e960d90> assign[=] tuple[[<ast.List object at 0x7da20e961690>, <ast.Dict object at 0x7da20e960fa0>]] for taget[tuple[[<ast.Name object at 0x7da20e963d30>, <ast.Name object at 0x7da20e962e30>]]] in starred[call[name[zip], parameter[name[objs], call[name[serializers].serialize, parameter[constant[python], name[objs]]]]]] begin[:] variable[depth] assign[=] call[name[node].get_depth, parameter[]] variable[fields] assign[=] call[name[pyobj]][constant[fields]] <ast.Delete object at 0x7da20e961e40> if compare[constant[sib_order] in name[fields]] begin[:] <ast.Delete object at 0x7da20e962860> if compare[constant[id] in name[fields]] begin[:] <ast.Delete object at 0x7da20e961270> variable[newobj] assign[=] dictionary[[<ast.Constant object at 0x7da20e961150>], [<ast.Name object at 0x7da20e963430>]] if name[keep_ids] begin[:] call[name[newobj]][constant[id]] assign[=] call[name[pyobj]][constant[pk]] if <ast.BoolOp object at 0x7da20e960280> begin[:] call[name[ret].append, parameter[name[newobj]]] call[name[lnk]][name[node].pk] assign[=] name[newobj] return[name[ret]]
keyword[def] identifier[dump_bulk] ( identifier[cls] , identifier[parent] = keyword[None] , identifier[keep_ids] = keyword[True] ): literal[string] identifier[serializable_cls] = identifier[cls] . identifier[_get_serializable_model] () keyword[if] ( identifier[parent] keyword[and] identifier[serializable_cls] != identifier[cls] keyword[and] identifier[parent] . identifier[__class__] != identifier[serializable_cls] ): identifier[parent] = identifier[serializable_cls] . identifier[objects] . identifier[get] ( identifier[pk] = identifier[parent] . identifier[pk] ) identifier[objs] = identifier[serializable_cls] . identifier[get_tree] ( identifier[parent] ) identifier[ret] , identifier[lnk] =[],{} keyword[for] identifier[node] , identifier[pyobj] keyword[in] identifier[zip] ( identifier[objs] , identifier[serializers] . identifier[serialize] ( literal[string] , identifier[objs] )): identifier[depth] = identifier[node] . identifier[get_depth] () identifier[fields] = identifier[pyobj] [ literal[string] ] keyword[del] identifier[fields] [ literal[string] ] keyword[if] literal[string] keyword[in] identifier[fields] : keyword[del] identifier[fields] [ literal[string] ] keyword[if] literal[string] keyword[in] identifier[fields] : keyword[del] identifier[fields] [ literal[string] ] identifier[newobj] ={ literal[string] : identifier[fields] } keyword[if] identifier[keep_ids] : identifier[newobj] [ literal[string] ]= identifier[pyobj] [ literal[string] ] keyword[if] ( keyword[not] identifier[parent] keyword[and] identifier[depth] == literal[int] ) keyword[or] ( identifier[parent] keyword[and] identifier[depth] == identifier[parent] . identifier[get_depth] ()): identifier[ret] . identifier[append] ( identifier[newobj] ) keyword[else] : identifier[parentobj] = identifier[lnk] [ identifier[node] . identifier[parent_id] ] keyword[if] literal[string] keyword[not] keyword[in] identifier[parentobj] : identifier[parentobj] [ literal[string] ]=[] identifier[parentobj] [ literal[string] ]. identifier[append] ( identifier[newobj] ) identifier[lnk] [ identifier[node] . identifier[pk] ]= identifier[newobj] keyword[return] identifier[ret]
def dump_bulk(cls, parent=None, keep_ids=True): """Dumps a tree branch to a python data structure.""" serializable_cls = cls._get_serializable_model() if parent and serializable_cls != cls and (parent.__class__ != serializable_cls): parent = serializable_cls.objects.get(pk=parent.pk) # depends on [control=['if'], data=[]] # a list of nodes: not really a queryset, but it works objs = serializable_cls.get_tree(parent) (ret, lnk) = ([], {}) for (node, pyobj) in zip(objs, serializers.serialize('python', objs)): depth = node.get_depth() # django's serializer stores the attributes in 'fields' fields = pyobj['fields'] del fields['parent'] # non-sorted trees have this if 'sib_order' in fields: del fields['sib_order'] # depends on [control=['if'], data=['fields']] if 'id' in fields: del fields['id'] # depends on [control=['if'], data=['fields']] newobj = {'data': fields} if keep_ids: newobj['id'] = pyobj['pk'] # depends on [control=['if'], data=[]] if not parent and depth == 1 or (parent and depth == parent.get_depth()): ret.append(newobj) # depends on [control=['if'], data=[]] else: parentobj = lnk[node.parent_id] if 'children' not in parentobj: parentobj['children'] = [] # depends on [control=['if'], data=['parentobj']] parentobj['children'].append(newobj) lnk[node.pk] = newobj # depends on [control=['for'], data=[]] return ret
def _GetFieldAttributes(field): """Decomposes field into the needed arguments to pass to the constructor. This can be used to create copies of the field or to compare if two fields are "equal" (since __eq__ is not implemented on messages.Field). Args: field: A ProtoRPC message field (potentially to be copied). Raises: TypeError: If the field is not an instance of messages.Field. Returns: A pair of relevant arguments to be passed to the constructor for the field type. The first element is a list of positional arguments for the constructor and the second is a dictionary of keyword arguments. """ if not isinstance(field, messages.Field): raise TypeError('Field %r to be copied not a ProtoRPC field.' % (field,)) positional_args = [] kwargs = { 'required': field.required, 'repeated': field.repeated, 'variant': field.variant, 'default': field._Field__default, # pylint: disable=protected-access } if isinstance(field, messages.MessageField): # Message fields can't have a default kwargs.pop('default') if not isinstance(field, message_types.DateTimeField): positional_args.insert(0, field.message_type) elif isinstance(field, messages.EnumField): positional_args.insert(0, field.type) return positional_args, kwargs
def function[_GetFieldAttributes, parameter[field]]: constant[Decomposes field into the needed arguments to pass to the constructor. This can be used to create copies of the field or to compare if two fields are "equal" (since __eq__ is not implemented on messages.Field). Args: field: A ProtoRPC message field (potentially to be copied). Raises: TypeError: If the field is not an instance of messages.Field. Returns: A pair of relevant arguments to be passed to the constructor for the field type. The first element is a list of positional arguments for the constructor and the second is a dictionary of keyword arguments. ] if <ast.UnaryOp object at 0x7da1b0efc3a0> begin[:] <ast.Raise object at 0x7da1b0efd420> variable[positional_args] assign[=] list[[]] variable[kwargs] assign[=] dictionary[[<ast.Constant object at 0x7da1b0efef80>, <ast.Constant object at 0x7da1b0efe650>, <ast.Constant object at 0x7da1b0efea40>, <ast.Constant object at 0x7da1b0efe4a0>], [<ast.Attribute object at 0x7da1b0efffa0>, <ast.Attribute object at 0x7da1b0effdc0>, <ast.Attribute object at 0x7da1b0ec2590>, <ast.Attribute object at 0x7da1b0ec2ef0>]] if call[name[isinstance], parameter[name[field], name[messages].MessageField]] begin[:] call[name[kwargs].pop, parameter[constant[default]]] if <ast.UnaryOp object at 0x7da1b0ec2c50> begin[:] call[name[positional_args].insert, parameter[constant[0], name[field].message_type]] return[tuple[[<ast.Name object at 0x7da1b0ec2980>, <ast.Name object at 0x7da1b0ec0f70>]]]
keyword[def] identifier[_GetFieldAttributes] ( identifier[field] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[field] , identifier[messages] . identifier[Field] ): keyword[raise] identifier[TypeError] ( literal[string] %( identifier[field] ,)) identifier[positional_args] =[] identifier[kwargs] ={ literal[string] : identifier[field] . identifier[required] , literal[string] : identifier[field] . identifier[repeated] , literal[string] : identifier[field] . identifier[variant] , literal[string] : identifier[field] . identifier[_Field__default] , } keyword[if] identifier[isinstance] ( identifier[field] , identifier[messages] . identifier[MessageField] ): identifier[kwargs] . identifier[pop] ( literal[string] ) keyword[if] keyword[not] identifier[isinstance] ( identifier[field] , identifier[message_types] . identifier[DateTimeField] ): identifier[positional_args] . identifier[insert] ( literal[int] , identifier[field] . identifier[message_type] ) keyword[elif] identifier[isinstance] ( identifier[field] , identifier[messages] . identifier[EnumField] ): identifier[positional_args] . identifier[insert] ( literal[int] , identifier[field] . identifier[type] ) keyword[return] identifier[positional_args] , identifier[kwargs]
def _GetFieldAttributes(field): """Decomposes field into the needed arguments to pass to the constructor. This can be used to create copies of the field or to compare if two fields are "equal" (since __eq__ is not implemented on messages.Field). Args: field: A ProtoRPC message field (potentially to be copied). Raises: TypeError: If the field is not an instance of messages.Field. Returns: A pair of relevant arguments to be passed to the constructor for the field type. The first element is a list of positional arguments for the constructor and the second is a dictionary of keyword arguments. """ if not isinstance(field, messages.Field): raise TypeError('Field %r to be copied not a ProtoRPC field.' % (field,)) # depends on [control=['if'], data=[]] positional_args = [] # pylint: disable=protected-access kwargs = {'required': field.required, 'repeated': field.repeated, 'variant': field.variant, 'default': field._Field__default} if isinstance(field, messages.MessageField): # Message fields can't have a default kwargs.pop('default') if not isinstance(field, message_types.DateTimeField): positional_args.insert(0, field.message_type) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif isinstance(field, messages.EnumField): positional_args.insert(0, field.type) # depends on [control=['if'], data=[]] return (positional_args, kwargs)
def _data_frame(content): """ Helper funcation that converts text-based get response to a pandas dataframe for additional manipulation. """ response = loads(content) key = [x for x in response.keys() if x in c.response_data][0] frame = DataFrame(response[key]) final_frame = _convert(frame) return final_frame
def function[_data_frame, parameter[content]]: constant[ Helper funcation that converts text-based get response to a pandas dataframe for additional manipulation. ] variable[response] assign[=] call[name[loads], parameter[name[content]]] variable[key] assign[=] call[<ast.ListComp object at 0x7da1b12cdc00>][constant[0]] variable[frame] assign[=] call[name[DataFrame], parameter[call[name[response]][name[key]]]] variable[final_frame] assign[=] call[name[_convert], parameter[name[frame]]] return[name[final_frame]]
keyword[def] identifier[_data_frame] ( identifier[content] ): literal[string] identifier[response] = identifier[loads] ( identifier[content] ) identifier[key] =[ identifier[x] keyword[for] identifier[x] keyword[in] identifier[response] . identifier[keys] () keyword[if] identifier[x] keyword[in] identifier[c] . identifier[response_data] ][ literal[int] ] identifier[frame] = identifier[DataFrame] ( identifier[response] [ identifier[key] ]) identifier[final_frame] = identifier[_convert] ( identifier[frame] ) keyword[return] identifier[final_frame]
def _data_frame(content): """ Helper funcation that converts text-based get response to a pandas dataframe for additional manipulation. """ response = loads(content) key = [x for x in response.keys() if x in c.response_data][0] frame = DataFrame(response[key]) final_frame = _convert(frame) return final_frame
def __get_event(self, block=True, timeout=1): """ Retrieves an event. If self._exceeding_event is not None, it'll be returned. Otherwise, an event is dequeued from the event buffer. If The event which was retrieved is bigger than the permitted batch size, it'll be omitted, and the next event in the event buffer is returned """ while True: if self._exceeding_event: # An event was omitted from last batch event = self._exceeding_event self._exceeding_event = None else: # No omitted event, get an event from the queue event = self._event_queue.get(block, timeout) event_size = len(event) # If the event is bigger than the permitted batch size, ignore it # The ( - 2 ) accounts for the parentheses enclosing the batch if event_size - 2 >= self._batch_max_size: self._notify(logging.WARNING, consts.LOG_MSG_OMITTED_OVERSIZED_EVENT % event_size) else: # Event is of valid size, return it return event
def function[__get_event, parameter[self, block, timeout]]: constant[ Retrieves an event. If self._exceeding_event is not None, it'll be returned. Otherwise, an event is dequeued from the event buffer. If The event which was retrieved is bigger than the permitted batch size, it'll be omitted, and the next event in the event buffer is returned ] while constant[True] begin[:] if name[self]._exceeding_event begin[:] variable[event] assign[=] name[self]._exceeding_event name[self]._exceeding_event assign[=] constant[None] variable[event_size] assign[=] call[name[len], parameter[name[event]]] if compare[binary_operation[name[event_size] - constant[2]] greater_or_equal[>=] name[self]._batch_max_size] begin[:] call[name[self]._notify, parameter[name[logging].WARNING, binary_operation[name[consts].LOG_MSG_OMITTED_OVERSIZED_EVENT <ast.Mod object at 0x7da2590d6920> name[event_size]]]]
keyword[def] identifier[__get_event] ( identifier[self] , identifier[block] = keyword[True] , identifier[timeout] = literal[int] ): literal[string] keyword[while] keyword[True] : keyword[if] identifier[self] . identifier[_exceeding_event] : identifier[event] = identifier[self] . identifier[_exceeding_event] identifier[self] . identifier[_exceeding_event] = keyword[None] keyword[else] : identifier[event] = identifier[self] . identifier[_event_queue] . identifier[get] ( identifier[block] , identifier[timeout] ) identifier[event_size] = identifier[len] ( identifier[event] ) keyword[if] identifier[event_size] - literal[int] >= identifier[self] . identifier[_batch_max_size] : identifier[self] . identifier[_notify] ( identifier[logging] . identifier[WARNING] , identifier[consts] . identifier[LOG_MSG_OMITTED_OVERSIZED_EVENT] % identifier[event_size] ) keyword[else] : keyword[return] identifier[event]
def __get_event(self, block=True, timeout=1): """ Retrieves an event. If self._exceeding_event is not None, it'll be returned. Otherwise, an event is dequeued from the event buffer. If The event which was retrieved is bigger than the permitted batch size, it'll be omitted, and the next event in the event buffer is returned """ while True: if self._exceeding_event: # An event was omitted from last batch event = self._exceeding_event self._exceeding_event = None # depends on [control=['if'], data=[]] else: # No omitted event, get an event from the queue event = self._event_queue.get(block, timeout) event_size = len(event) # If the event is bigger than the permitted batch size, ignore it # The ( - 2 ) accounts for the parentheses enclosing the batch if event_size - 2 >= self._batch_max_size: self._notify(logging.WARNING, consts.LOG_MSG_OMITTED_OVERSIZED_EVENT % event_size) # depends on [control=['if'], data=[]] else: # Event is of valid size, return it return event # depends on [control=['while'], data=[]]
def unCompressed(self): """ Derive uncompressed key """ public_key = repr(self._pk) prefix = public_key[0:2] if prefix == "04": return public_key assert prefix == "02" or prefix == "03" x = int(public_key[2:], 16) y = self._derive_y_from_x(x, (prefix == "02")) key = '04' + '%064x' % x + '%064x' % y return key
def function[unCompressed, parameter[self]]: constant[ Derive uncompressed key ] variable[public_key] assign[=] call[name[repr], parameter[name[self]._pk]] variable[prefix] assign[=] call[name[public_key]][<ast.Slice object at 0x7da2054a4430>] if compare[name[prefix] equal[==] constant[04]] begin[:] return[name[public_key]] assert[<ast.BoolOp object at 0x7da2054a6230>] variable[x] assign[=] call[name[int], parameter[call[name[public_key]][<ast.Slice object at 0x7da2054a6ce0>], constant[16]]] variable[y] assign[=] call[name[self]._derive_y_from_x, parameter[name[x], compare[name[prefix] equal[==] constant[02]]]] variable[key] assign[=] binary_operation[binary_operation[constant[04] + binary_operation[constant[%064x] <ast.Mod object at 0x7da2590d6920> name[x]]] + binary_operation[constant[%064x] <ast.Mod object at 0x7da2590d6920> name[y]]] return[name[key]]
keyword[def] identifier[unCompressed] ( identifier[self] ): literal[string] identifier[public_key] = identifier[repr] ( identifier[self] . identifier[_pk] ) identifier[prefix] = identifier[public_key] [ literal[int] : literal[int] ] keyword[if] identifier[prefix] == literal[string] : keyword[return] identifier[public_key] keyword[assert] identifier[prefix] == literal[string] keyword[or] identifier[prefix] == literal[string] identifier[x] = identifier[int] ( identifier[public_key] [ literal[int] :], literal[int] ) identifier[y] = identifier[self] . identifier[_derive_y_from_x] ( identifier[x] ,( identifier[prefix] == literal[string] )) identifier[key] = literal[string] + literal[string] % identifier[x] + literal[string] % identifier[y] keyword[return] identifier[key]
def unCompressed(self): """ Derive uncompressed key """ public_key = repr(self._pk) prefix = public_key[0:2] if prefix == '04': return public_key # depends on [control=['if'], data=[]] assert prefix == '02' or prefix == '03' x = int(public_key[2:], 16) y = self._derive_y_from_x(x, prefix == '02') key = '04' + '%064x' % x + '%064x' % y return key
def _releaseModifiers(self, modifiers, globally=False): """Release given modifiers (provided in list form). Parameters: modifiers list Returns: None """ # Release them in reverse order from pressing them: modifiers.reverse() modFlags = self._pressModifiers(modifiers, pressed=False, globally=globally) return modFlags
def function[_releaseModifiers, parameter[self, modifiers, globally]]: constant[Release given modifiers (provided in list form). Parameters: modifiers list Returns: None ] call[name[modifiers].reverse, parameter[]] variable[modFlags] assign[=] call[name[self]._pressModifiers, parameter[name[modifiers]]] return[name[modFlags]]
keyword[def] identifier[_releaseModifiers] ( identifier[self] , identifier[modifiers] , identifier[globally] = keyword[False] ): literal[string] identifier[modifiers] . identifier[reverse] () identifier[modFlags] = identifier[self] . identifier[_pressModifiers] ( identifier[modifiers] , identifier[pressed] = keyword[False] , identifier[globally] = identifier[globally] ) keyword[return] identifier[modFlags]
def _releaseModifiers(self, modifiers, globally=False): """Release given modifiers (provided in list form). Parameters: modifiers list Returns: None """ # Release them in reverse order from pressing them: modifiers.reverse() modFlags = self._pressModifiers(modifiers, pressed=False, globally=globally) return modFlags
def _check_validity_of_inputs(data, input_arg, input_name, grid_plot, dimension): """ Function that verifies when an input ('input_arg') of function 'plot' has a valid structure. ---------- Parameters ---------- data : list or list of lists Structure with the data that will be plotted. input_arg : list or list of lists The input data to be verified. input_name : str Name of the input_arg variable. grid_plot : bool A flag that identifies when the input_arg is a matrix or not. dimension : int Level of verification in the matrix format structure. Returns ------- out : list or list of lists Returns the same value as input_arg or a modified version. """ if input_arg is not None: if grid_plot is True: if isinstance(input_arg, list): if numpy.shape(input_arg)[:dimension] != numpy.shape(data)[:dimension]: raise RuntimeError("The shape of " + input_name + " does not match with data " "input.") else: raise RuntimeError("The specified data type of " + input_name + " field is not valid. Input must be a list.") else: if not isinstance(input_arg, str): raise RuntimeError("Taking into account that only one time-series had been " "specified at 'data', the " + input_name + " field must be a " "string") elif grid_plot is True: input_arg = numpy.ndarray(shape=numpy.shape(data)[:dimension], dtype=numpy.object) return input_arg
def function[_check_validity_of_inputs, parameter[data, input_arg, input_name, grid_plot, dimension]]: constant[ Function that verifies when an input ('input_arg') of function 'plot' has a valid structure. ---------- Parameters ---------- data : list or list of lists Structure with the data that will be plotted. input_arg : list or list of lists The input data to be verified. input_name : str Name of the input_arg variable. grid_plot : bool A flag that identifies when the input_arg is a matrix or not. dimension : int Level of verification in the matrix format structure. Returns ------- out : list or list of lists Returns the same value as input_arg or a modified version. ] if compare[name[input_arg] is_not constant[None]] begin[:] if compare[name[grid_plot] is constant[True]] begin[:] if call[name[isinstance], parameter[name[input_arg], name[list]]] begin[:] if compare[call[call[name[numpy].shape, parameter[name[input_arg]]]][<ast.Slice object at 0x7da18f811780>] not_equal[!=] call[call[name[numpy].shape, parameter[name[data]]]][<ast.Slice object at 0x7da18f813b50>]] begin[:] <ast.Raise object at 0x7da18f811de0> return[name[input_arg]]
keyword[def] identifier[_check_validity_of_inputs] ( identifier[data] , identifier[input_arg] , identifier[input_name] , identifier[grid_plot] , identifier[dimension] ): literal[string] keyword[if] identifier[input_arg] keyword[is] keyword[not] keyword[None] : keyword[if] identifier[grid_plot] keyword[is] keyword[True] : keyword[if] identifier[isinstance] ( identifier[input_arg] , identifier[list] ): keyword[if] identifier[numpy] . identifier[shape] ( identifier[input_arg] )[: identifier[dimension] ]!= identifier[numpy] . identifier[shape] ( identifier[data] )[: identifier[dimension] ]: keyword[raise] identifier[RuntimeError] ( literal[string] + identifier[input_name] + literal[string] literal[string] ) keyword[else] : keyword[raise] identifier[RuntimeError] ( literal[string] + identifier[input_name] + literal[string] ) keyword[else] : keyword[if] keyword[not] identifier[isinstance] ( identifier[input_arg] , identifier[str] ): keyword[raise] identifier[RuntimeError] ( literal[string] literal[string] + identifier[input_name] + literal[string] literal[string] ) keyword[elif] identifier[grid_plot] keyword[is] keyword[True] : identifier[input_arg] = identifier[numpy] . identifier[ndarray] ( identifier[shape] = identifier[numpy] . identifier[shape] ( identifier[data] )[: identifier[dimension] ], identifier[dtype] = identifier[numpy] . identifier[object] ) keyword[return] identifier[input_arg]
def _check_validity_of_inputs(data, input_arg, input_name, grid_plot, dimension): """ Function that verifies when an input ('input_arg') of function 'plot' has a valid structure. ---------- Parameters ---------- data : list or list of lists Structure with the data that will be plotted. input_arg : list or list of lists The input data to be verified. input_name : str Name of the input_arg variable. grid_plot : bool A flag that identifies when the input_arg is a matrix or not. dimension : int Level of verification in the matrix format structure. Returns ------- out : list or list of lists Returns the same value as input_arg or a modified version. """ if input_arg is not None: if grid_plot is True: if isinstance(input_arg, list): if numpy.shape(input_arg)[:dimension] != numpy.shape(data)[:dimension]: raise RuntimeError('The shape of ' + input_name + ' does not match with data input.') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: raise RuntimeError('The specified data type of ' + input_name + ' field is not valid. Input must be a list.') # depends on [control=['if'], data=[]] elif not isinstance(input_arg, str): raise RuntimeError("Taking into account that only one time-series had been specified at 'data', the " + input_name + ' field must be a string') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['input_arg']] elif grid_plot is True: input_arg = numpy.ndarray(shape=numpy.shape(data)[:dimension], dtype=numpy.object) # depends on [control=['if'], data=[]] return input_arg
def x_ticks(self, *ticks): """The points on the x-axis for which there are markers and grid lines. There are default ticks, but you can pass values to this method to override the defaults. Otherwise the method will return the ticks. :param \*ticks: if given, these will be chart's x-ticks. :rtype: ``tuple``""" if ticks: for tick in ticks: if not is_numeric(tick): raise TypeError("'%s' is not a numeric tick" % str(tick)) self._x_ticks = tuple(sorted(ticks)) else: if self._x_ticks: return self._x_ticks else: return determine_ticks(self.x_lower_limit(), self.x_upper_limit())
def function[x_ticks, parameter[self]]: constant[The points on the x-axis for which there are markers and grid lines. There are default ticks, but you can pass values to this method to override the defaults. Otherwise the method will return the ticks. :param \*ticks: if given, these will be chart's x-ticks. :rtype: ``tuple``] if name[ticks] begin[:] for taget[name[tick]] in starred[name[ticks]] begin[:] if <ast.UnaryOp object at 0x7da20c76d120> begin[:] <ast.Raise object at 0x7da20c76e6b0> name[self]._x_ticks assign[=] call[name[tuple], parameter[call[name[sorted], parameter[name[ticks]]]]]
keyword[def] identifier[x_ticks] ( identifier[self] ,* identifier[ticks] ): literal[string] keyword[if] identifier[ticks] : keyword[for] identifier[tick] keyword[in] identifier[ticks] : keyword[if] keyword[not] identifier[is_numeric] ( identifier[tick] ): keyword[raise] identifier[TypeError] ( literal[string] % identifier[str] ( identifier[tick] )) identifier[self] . identifier[_x_ticks] = identifier[tuple] ( identifier[sorted] ( identifier[ticks] )) keyword[else] : keyword[if] identifier[self] . identifier[_x_ticks] : keyword[return] identifier[self] . identifier[_x_ticks] keyword[else] : keyword[return] identifier[determine_ticks] ( identifier[self] . identifier[x_lower_limit] (), identifier[self] . identifier[x_upper_limit] ())
def x_ticks(self, *ticks): """The points on the x-axis for which there are markers and grid lines. There are default ticks, but you can pass values to this method to override the defaults. Otherwise the method will return the ticks. :param \\*ticks: if given, these will be chart's x-ticks. :rtype: ``tuple``""" if ticks: for tick in ticks: if not is_numeric(tick): raise TypeError("'%s' is not a numeric tick" % str(tick)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['tick']] self._x_ticks = tuple(sorted(ticks)) # depends on [control=['if'], data=[]] elif self._x_ticks: return self._x_ticks # depends on [control=['if'], data=[]] else: return determine_ticks(self.x_lower_limit(), self.x_upper_limit())
def remove_access_control_lists(self, security_namespace_id, tokens=None, recurse=None): """RemoveAccessControlLists. Remove access control lists under the specfied security namespace. :param str security_namespace_id: Security namespace identifier. :param str tokens: One or more comma-separated security tokens :param bool recurse: If true and this is a hierarchical namespace, also remove child ACLs of the specified tokens. :rtype: bool """ route_values = {} if security_namespace_id is not None: route_values['securityNamespaceId'] = self._serialize.url('security_namespace_id', security_namespace_id, 'str') query_parameters = {} if tokens is not None: query_parameters['tokens'] = self._serialize.query('tokens', tokens, 'str') if recurse is not None: query_parameters['recurse'] = self._serialize.query('recurse', recurse, 'bool') response = self._send(http_method='DELETE', location_id='18a2ad18-7571-46ae-bec7-0c7da1495885', version='5.0', route_values=route_values, query_parameters=query_parameters) return self._deserialize('bool', response)
def function[remove_access_control_lists, parameter[self, security_namespace_id, tokens, recurse]]: constant[RemoveAccessControlLists. Remove access control lists under the specfied security namespace. :param str security_namespace_id: Security namespace identifier. :param str tokens: One or more comma-separated security tokens :param bool recurse: If true and this is a hierarchical namespace, also remove child ACLs of the specified tokens. :rtype: bool ] variable[route_values] assign[=] dictionary[[], []] if compare[name[security_namespace_id] is_not constant[None]] begin[:] call[name[route_values]][constant[securityNamespaceId]] assign[=] call[name[self]._serialize.url, parameter[constant[security_namespace_id], name[security_namespace_id], constant[str]]] variable[query_parameters] assign[=] dictionary[[], []] if compare[name[tokens] is_not constant[None]] begin[:] call[name[query_parameters]][constant[tokens]] assign[=] call[name[self]._serialize.query, parameter[constant[tokens], name[tokens], constant[str]]] if compare[name[recurse] is_not constant[None]] begin[:] call[name[query_parameters]][constant[recurse]] assign[=] call[name[self]._serialize.query, parameter[constant[recurse], name[recurse], constant[bool]]] variable[response] assign[=] call[name[self]._send, parameter[]] return[call[name[self]._deserialize, parameter[constant[bool], name[response]]]]
keyword[def] identifier[remove_access_control_lists] ( identifier[self] , identifier[security_namespace_id] , identifier[tokens] = keyword[None] , identifier[recurse] = keyword[None] ): literal[string] identifier[route_values] ={} keyword[if] identifier[security_namespace_id] keyword[is] keyword[not] keyword[None] : identifier[route_values] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[url] ( literal[string] , identifier[security_namespace_id] , literal[string] ) identifier[query_parameters] ={} keyword[if] identifier[tokens] keyword[is] keyword[not] keyword[None] : identifier[query_parameters] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[query] ( literal[string] , identifier[tokens] , literal[string] ) keyword[if] identifier[recurse] keyword[is] keyword[not] keyword[None] : identifier[query_parameters] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[query] ( literal[string] , identifier[recurse] , literal[string] ) identifier[response] = identifier[self] . identifier[_send] ( identifier[http_method] = literal[string] , identifier[location_id] = literal[string] , identifier[version] = literal[string] , identifier[route_values] = identifier[route_values] , identifier[query_parameters] = identifier[query_parameters] ) keyword[return] identifier[self] . identifier[_deserialize] ( literal[string] , identifier[response] )
def remove_access_control_lists(self, security_namespace_id, tokens=None, recurse=None): """RemoveAccessControlLists. Remove access control lists under the specfied security namespace. :param str security_namespace_id: Security namespace identifier. :param str tokens: One or more comma-separated security tokens :param bool recurse: If true and this is a hierarchical namespace, also remove child ACLs of the specified tokens. :rtype: bool """ route_values = {} if security_namespace_id is not None: route_values['securityNamespaceId'] = self._serialize.url('security_namespace_id', security_namespace_id, 'str') # depends on [control=['if'], data=['security_namespace_id']] query_parameters = {} if tokens is not None: query_parameters['tokens'] = self._serialize.query('tokens', tokens, 'str') # depends on [control=['if'], data=['tokens']] if recurse is not None: query_parameters['recurse'] = self._serialize.query('recurse', recurse, 'bool') # depends on [control=['if'], data=['recurse']] response = self._send(http_method='DELETE', location_id='18a2ad18-7571-46ae-bec7-0c7da1495885', version='5.0', route_values=route_values, query_parameters=query_parameters) return self._deserialize('bool', response)
def finalize(self): """ Called at clean up. Is used to disconnect signals. """ self.spinBox.valueChanged.disconnect(self.commitChangedValue) super(SnFloatCtiEditor, self).finalize()
def function[finalize, parameter[self]]: constant[ Called at clean up. Is used to disconnect signals. ] call[name[self].spinBox.valueChanged.disconnect, parameter[name[self].commitChangedValue]] call[call[name[super], parameter[name[SnFloatCtiEditor], name[self]]].finalize, parameter[]]
keyword[def] identifier[finalize] ( identifier[self] ): literal[string] identifier[self] . identifier[spinBox] . identifier[valueChanged] . identifier[disconnect] ( identifier[self] . identifier[commitChangedValue] ) identifier[super] ( identifier[SnFloatCtiEditor] , identifier[self] ). identifier[finalize] ()
def finalize(self): """ Called at clean up. Is used to disconnect signals. """ self.spinBox.valueChanged.disconnect(self.commitChangedValue) super(SnFloatCtiEditor, self).finalize()
def hms(self, msg, tic=None, prt=sys.stdout): """Print elapsed time and message.""" if tic is None: tic = self.tic now = timeit.default_timer() hms = str(datetime.timedelta(seconds=(now-tic))) prt.write('{HMS}: {MSG}\n'.format(HMS=hms, MSG=msg)) return now
def function[hms, parameter[self, msg, tic, prt]]: constant[Print elapsed time and message.] if compare[name[tic] is constant[None]] begin[:] variable[tic] assign[=] name[self].tic variable[now] assign[=] call[name[timeit].default_timer, parameter[]] variable[hms] assign[=] call[name[str], parameter[call[name[datetime].timedelta, parameter[]]]] call[name[prt].write, parameter[call[constant[{HMS}: {MSG} ].format, parameter[]]]] return[name[now]]
keyword[def] identifier[hms] ( identifier[self] , identifier[msg] , identifier[tic] = keyword[None] , identifier[prt] = identifier[sys] . identifier[stdout] ): literal[string] keyword[if] identifier[tic] keyword[is] keyword[None] : identifier[tic] = identifier[self] . identifier[tic] identifier[now] = identifier[timeit] . identifier[default_timer] () identifier[hms] = identifier[str] ( identifier[datetime] . identifier[timedelta] ( identifier[seconds] =( identifier[now] - identifier[tic] ))) identifier[prt] . identifier[write] ( literal[string] . identifier[format] ( identifier[HMS] = identifier[hms] , identifier[MSG] = identifier[msg] )) keyword[return] identifier[now]
def hms(self, msg, tic=None, prt=sys.stdout): """Print elapsed time and message.""" if tic is None: tic = self.tic # depends on [control=['if'], data=['tic']] now = timeit.default_timer() hms = str(datetime.timedelta(seconds=now - tic)) prt.write('{HMS}: {MSG}\n'.format(HMS=hms, MSG=msg)) return now
def afx_adam(): """Old version - Adam.""" hparams = transformer.transformer_base_v2() hparams.optimizer_adam_beta1 = 0.9 hparams.optimizer_adam_beta2 = 0.999 hparams.symbol_modality_num_shards = 1 hparams.batch_size = 2048 hparams.optimizer = "adam" hparams.learning_rate_schedule = ( "constant*rsqrt_decay*linear_warmup*rsqrt_hidden_size") hparams.learning_rate_constant = 2.0 return hparams
def function[afx_adam, parameter[]]: constant[Old version - Adam.] variable[hparams] assign[=] call[name[transformer].transformer_base_v2, parameter[]] name[hparams].optimizer_adam_beta1 assign[=] constant[0.9] name[hparams].optimizer_adam_beta2 assign[=] constant[0.999] name[hparams].symbol_modality_num_shards assign[=] constant[1] name[hparams].batch_size assign[=] constant[2048] name[hparams].optimizer assign[=] constant[adam] name[hparams].learning_rate_schedule assign[=] constant[constant*rsqrt_decay*linear_warmup*rsqrt_hidden_size] name[hparams].learning_rate_constant assign[=] constant[2.0] return[name[hparams]]
keyword[def] identifier[afx_adam] (): literal[string] identifier[hparams] = identifier[transformer] . identifier[transformer_base_v2] () identifier[hparams] . identifier[optimizer_adam_beta1] = literal[int] identifier[hparams] . identifier[optimizer_adam_beta2] = literal[int] identifier[hparams] . identifier[symbol_modality_num_shards] = literal[int] identifier[hparams] . identifier[batch_size] = literal[int] identifier[hparams] . identifier[optimizer] = literal[string] identifier[hparams] . identifier[learning_rate_schedule] =( literal[string] ) identifier[hparams] . identifier[learning_rate_constant] = literal[int] keyword[return] identifier[hparams]
def afx_adam(): """Old version - Adam.""" hparams = transformer.transformer_base_v2() hparams.optimizer_adam_beta1 = 0.9 hparams.optimizer_adam_beta2 = 0.999 hparams.symbol_modality_num_shards = 1 hparams.batch_size = 2048 hparams.optimizer = 'adam' hparams.learning_rate_schedule = 'constant*rsqrt_decay*linear_warmup*rsqrt_hidden_size' hparams.learning_rate_constant = 2.0 return hparams
def default_endpoint_from_config(config, option=None): """Return a default endpoint.""" default_endpoint = config.get('core', {}).get('default') project_endpoint = config.get('project', {}).get('core', {}).get('default', default_endpoint) return Endpoint( option or project_endpoint or default_endpoint, default=default_endpoint, project=project_endpoint, option=option )
def function[default_endpoint_from_config, parameter[config, option]]: constant[Return a default endpoint.] variable[default_endpoint] assign[=] call[call[name[config].get, parameter[constant[core], dictionary[[], []]]].get, parameter[constant[default]]] variable[project_endpoint] assign[=] call[call[call[name[config].get, parameter[constant[project], dictionary[[], []]]].get, parameter[constant[core], dictionary[[], []]]].get, parameter[constant[default], name[default_endpoint]]] return[call[name[Endpoint], parameter[<ast.BoolOp object at 0x7da18bc73850>]]]
keyword[def] identifier[default_endpoint_from_config] ( identifier[config] , identifier[option] = keyword[None] ): literal[string] identifier[default_endpoint] = identifier[config] . identifier[get] ( literal[string] ,{}). identifier[get] ( literal[string] ) identifier[project_endpoint] = identifier[config] . identifier[get] ( literal[string] , {}). identifier[get] ( literal[string] , {}). identifier[get] ( literal[string] , identifier[default_endpoint] ) keyword[return] identifier[Endpoint] ( identifier[option] keyword[or] identifier[project_endpoint] keyword[or] identifier[default_endpoint] , identifier[default] = identifier[default_endpoint] , identifier[project] = identifier[project_endpoint] , identifier[option] = identifier[option] )
def default_endpoint_from_config(config, option=None): """Return a default endpoint.""" default_endpoint = config.get('core', {}).get('default') project_endpoint = config.get('project', {}).get('core', {}).get('default', default_endpoint) return Endpoint(option or project_endpoint or default_endpoint, default=default_endpoint, project=project_endpoint, option=option)
def total_flux(F, A=None): r"""Compute the total flux, or turnover flux, that is produced by the flux sources and consumed by the flux sinks Parameters ---------- F : (n, n) ndarray Matrix of flux values between pairs of states. A : array_like (optional) List of integer state labels for set A (reactant) Returns ------- F : float The total flux, or turnover flux, that is produced by the flux sources and consumed by the flux sinks """ if A is None: prod = flux_production(F) zeros = np.zeros(len(prod)) outflux = np.sum(np.maximum(prod, zeros)) return outflux else: X = set(np.arange(F.shape[0])) # total state space A = set(A) notA = X.difference(A) outflux = (F[list(A), :])[:, list(notA)].sum() return outflux
def function[total_flux, parameter[F, A]]: constant[Compute the total flux, or turnover flux, that is produced by the flux sources and consumed by the flux sinks Parameters ---------- F : (n, n) ndarray Matrix of flux values between pairs of states. A : array_like (optional) List of integer state labels for set A (reactant) Returns ------- F : float The total flux, or turnover flux, that is produced by the flux sources and consumed by the flux sinks ] if compare[name[A] is constant[None]] begin[:] variable[prod] assign[=] call[name[flux_production], parameter[name[F]]] variable[zeros] assign[=] call[name[np].zeros, parameter[call[name[len], parameter[name[prod]]]]] variable[outflux] assign[=] call[name[np].sum, parameter[call[name[np].maximum, parameter[name[prod], name[zeros]]]]] return[name[outflux]]
keyword[def] identifier[total_flux] ( identifier[F] , identifier[A] = keyword[None] ): literal[string] keyword[if] identifier[A] keyword[is] keyword[None] : identifier[prod] = identifier[flux_production] ( identifier[F] ) identifier[zeros] = identifier[np] . identifier[zeros] ( identifier[len] ( identifier[prod] )) identifier[outflux] = identifier[np] . identifier[sum] ( identifier[np] . identifier[maximum] ( identifier[prod] , identifier[zeros] )) keyword[return] identifier[outflux] keyword[else] : identifier[X] = identifier[set] ( identifier[np] . identifier[arange] ( identifier[F] . identifier[shape] [ literal[int] ])) identifier[A] = identifier[set] ( identifier[A] ) identifier[notA] = identifier[X] . identifier[difference] ( identifier[A] ) identifier[outflux] =( identifier[F] [ identifier[list] ( identifier[A] ),:])[:, identifier[list] ( identifier[notA] )]. identifier[sum] () keyword[return] identifier[outflux]
def total_flux(F, A=None): """Compute the total flux, or turnover flux, that is produced by the flux sources and consumed by the flux sinks Parameters ---------- F : (n, n) ndarray Matrix of flux values between pairs of states. A : array_like (optional) List of integer state labels for set A (reactant) Returns ------- F : float The total flux, or turnover flux, that is produced by the flux sources and consumed by the flux sinks """ if A is None: prod = flux_production(F) zeros = np.zeros(len(prod)) outflux = np.sum(np.maximum(prod, zeros)) return outflux # depends on [control=['if'], data=[]] else: X = set(np.arange(F.shape[0])) # total state space A = set(A) notA = X.difference(A) outflux = F[list(A), :][:, list(notA)].sum() return outflux
def from_string(cls, string): """ Parse ``string`` into a CPPType instance """ cls.TYPE.setParseAction(cls.make) try: return cls.TYPE.parseString(string, parseAll=True)[0] except ParseException: log.error("Failed to parse '{0}'".format(string)) raise
def function[from_string, parameter[cls, string]]: constant[ Parse ``string`` into a CPPType instance ] call[name[cls].TYPE.setParseAction, parameter[name[cls].make]] <ast.Try object at 0x7da1b1193dc0>
keyword[def] identifier[from_string] ( identifier[cls] , identifier[string] ): literal[string] identifier[cls] . identifier[TYPE] . identifier[setParseAction] ( identifier[cls] . identifier[make] ) keyword[try] : keyword[return] identifier[cls] . identifier[TYPE] . identifier[parseString] ( identifier[string] , identifier[parseAll] = keyword[True] )[ literal[int] ] keyword[except] identifier[ParseException] : identifier[log] . identifier[error] ( literal[string] . identifier[format] ( identifier[string] )) keyword[raise]
def from_string(cls, string): """ Parse ``string`` into a CPPType instance """ cls.TYPE.setParseAction(cls.make) try: return cls.TYPE.parseString(string, parseAll=True)[0] # depends on [control=['try'], data=[]] except ParseException: log.error("Failed to parse '{0}'".format(string)) raise # depends on [control=['except'], data=[]]
def find_tabs(self, custom_table_classes=None): """Finds all classes that are subcalss of Table and loads them into a dictionary named tables.""" for module_name in get_all_modules(self.package_path): for name, _type in get_all_classes(module_name): # pylint: disable=W0640 subclasses = [Table] + (custom_table_classes or list()) iss_subclass = map(lambda c: issubclass(_type, c), subclasses) if isclass(_type) and any(iss_subclass): self.tabs.update([[name, _type]])
def function[find_tabs, parameter[self, custom_table_classes]]: constant[Finds all classes that are subcalss of Table and loads them into a dictionary named tables.] for taget[name[module_name]] in starred[call[name[get_all_modules], parameter[name[self].package_path]]] begin[:] for taget[tuple[[<ast.Name object at 0x7da1b0bd2a40>, <ast.Name object at 0x7da1b0bd3b80>]]] in starred[call[name[get_all_classes], parameter[name[module_name]]]] begin[:] variable[subclasses] assign[=] binary_operation[list[[<ast.Name object at 0x7da1b0bd36d0>]] + <ast.BoolOp object at 0x7da1b0bd3280>] variable[iss_subclass] assign[=] call[name[map], parameter[<ast.Lambda object at 0x7da1b0bd09d0>, name[subclasses]]] if <ast.BoolOp object at 0x7da1b0bd12d0> begin[:] call[name[self].tabs.update, parameter[list[[<ast.List object at 0x7da1b0bd2b60>]]]]
keyword[def] identifier[find_tabs] ( identifier[self] , identifier[custom_table_classes] = keyword[None] ): literal[string] keyword[for] identifier[module_name] keyword[in] identifier[get_all_modules] ( identifier[self] . identifier[package_path] ): keyword[for] identifier[name] , identifier[_type] keyword[in] identifier[get_all_classes] ( identifier[module_name] ): identifier[subclasses] =[ identifier[Table] ]+( identifier[custom_table_classes] keyword[or] identifier[list] ()) identifier[iss_subclass] = identifier[map] ( keyword[lambda] identifier[c] : identifier[issubclass] ( identifier[_type] , identifier[c] ), identifier[subclasses] ) keyword[if] identifier[isclass] ( identifier[_type] ) keyword[and] identifier[any] ( identifier[iss_subclass] ): identifier[self] . identifier[tabs] . identifier[update] ([[ identifier[name] , identifier[_type] ]])
def find_tabs(self, custom_table_classes=None): """Finds all classes that are subcalss of Table and loads them into a dictionary named tables.""" for module_name in get_all_modules(self.package_path): for (name, _type) in get_all_classes(module_name): # pylint: disable=W0640 subclasses = [Table] + (custom_table_classes or list()) iss_subclass = map(lambda c: issubclass(_type, c), subclasses) if isclass(_type) and any(iss_subclass): self.tabs.update([[name, _type]]) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['module_name']]
def beginning_of_code_block(node, line_number, full_contents, is_code_node): """Return line number of beginning of code block.""" if SPHINX_INSTALLED and not is_code_node: delta = len(node.non_default_attributes()) current_line_contents = full_contents.splitlines()[line_number:] blank_lines = next( (i for (i, x) in enumerate(current_line_contents) if x), 0) return ( line_number + delta - 1 + blank_lines - 1 + SPHINX_CODE_BLOCK_DELTA) else: lines = full_contents.splitlines() code_block_length = len(node.rawsource.splitlines()) try: # Case where there are no extra spaces. if lines[line_number - 1].strip(): return line_number - code_block_length + 1 except IndexError: pass # The offsets are wrong if the RST text has multiple blank lines after # the code block. This is a workaround. for line_number in range(line_number, 1, -1): if lines[line_number - 2].strip(): break return line_number - code_block_length
def function[beginning_of_code_block, parameter[node, line_number, full_contents, is_code_node]]: constant[Return line number of beginning of code block.] if <ast.BoolOp object at 0x7da1b0722c50> begin[:] variable[delta] assign[=] call[name[len], parameter[call[name[node].non_default_attributes, parameter[]]]] variable[current_line_contents] assign[=] call[call[name[full_contents].splitlines, parameter[]]][<ast.Slice object at 0x7da1b0722ec0>] variable[blank_lines] assign[=] call[name[next], parameter[<ast.GeneratorExp object at 0x7da1b08f89d0>, constant[0]]] return[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[name[line_number] + name[delta]] - constant[1]] + name[blank_lines]] - constant[1]] + name[SPHINX_CODE_BLOCK_DELTA]]]
keyword[def] identifier[beginning_of_code_block] ( identifier[node] , identifier[line_number] , identifier[full_contents] , identifier[is_code_node] ): literal[string] keyword[if] identifier[SPHINX_INSTALLED] keyword[and] keyword[not] identifier[is_code_node] : identifier[delta] = identifier[len] ( identifier[node] . identifier[non_default_attributes] ()) identifier[current_line_contents] = identifier[full_contents] . identifier[splitlines] ()[ identifier[line_number] :] identifier[blank_lines] = identifier[next] ( ( identifier[i] keyword[for] ( identifier[i] , identifier[x] ) keyword[in] identifier[enumerate] ( identifier[current_line_contents] ) keyword[if] identifier[x] ), literal[int] ) keyword[return] ( identifier[line_number] + identifier[delta] - literal[int] + identifier[blank_lines] - literal[int] + identifier[SPHINX_CODE_BLOCK_DELTA] ) keyword[else] : identifier[lines] = identifier[full_contents] . identifier[splitlines] () identifier[code_block_length] = identifier[len] ( identifier[node] . identifier[rawsource] . identifier[splitlines] ()) keyword[try] : keyword[if] identifier[lines] [ identifier[line_number] - literal[int] ]. identifier[strip] (): keyword[return] identifier[line_number] - identifier[code_block_length] + literal[int] keyword[except] identifier[IndexError] : keyword[pass] keyword[for] identifier[line_number] keyword[in] identifier[range] ( identifier[line_number] , literal[int] ,- literal[int] ): keyword[if] identifier[lines] [ identifier[line_number] - literal[int] ]. identifier[strip] (): keyword[break] keyword[return] identifier[line_number] - identifier[code_block_length]
def beginning_of_code_block(node, line_number, full_contents, is_code_node): """Return line number of beginning of code block.""" if SPHINX_INSTALLED and (not is_code_node): delta = len(node.non_default_attributes()) current_line_contents = full_contents.splitlines()[line_number:] blank_lines = next((i for (i, x) in enumerate(current_line_contents) if x), 0) return line_number + delta - 1 + blank_lines - 1 + SPHINX_CODE_BLOCK_DELTA # depends on [control=['if'], data=[]] else: lines = full_contents.splitlines() code_block_length = len(node.rawsource.splitlines()) try: # Case where there are no extra spaces. if lines[line_number - 1].strip(): return line_number - code_block_length + 1 # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except IndexError: pass # depends on [control=['except'], data=[]] # The offsets are wrong if the RST text has multiple blank lines after # the code block. This is a workaround. for line_number in range(line_number, 1, -1): if lines[line_number - 2].strip(): break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line_number']] return line_number - code_block_length
def subpixel(superpix, nside_superpix, nside_subpix): """ Return the indices of sub-pixels (resolution nside_subpix) within the super-pixel with (resolution nside_superpix). ADW: It would be better to convert to next and do this explicitly """ if nside_superpix==nside_subpix: return superpix vec = hp.pix2vec(nside_superpix, superpix) radius = np.degrees(2. * hp.max_pixrad(nside_superpix)) subpix = query_disc(nside_subpix, vec, radius) pix_for_subpix = superpixel(subpix,nside_subpix,nside_superpix) # Might be able to speed up array indexing... return subpix[pix_for_subpix == superpix]
def function[subpixel, parameter[superpix, nside_superpix, nside_subpix]]: constant[ Return the indices of sub-pixels (resolution nside_subpix) within the super-pixel with (resolution nside_superpix). ADW: It would be better to convert to next and do this explicitly ] if compare[name[nside_superpix] equal[==] name[nside_subpix]] begin[:] return[name[superpix]] variable[vec] assign[=] call[name[hp].pix2vec, parameter[name[nside_superpix], name[superpix]]] variable[radius] assign[=] call[name[np].degrees, parameter[binary_operation[constant[2.0] * call[name[hp].max_pixrad, parameter[name[nside_superpix]]]]]] variable[subpix] assign[=] call[name[query_disc], parameter[name[nside_subpix], name[vec], name[radius]]] variable[pix_for_subpix] assign[=] call[name[superpixel], parameter[name[subpix], name[nside_subpix], name[nside_superpix]]] return[call[name[subpix]][compare[name[pix_for_subpix] equal[==] name[superpix]]]]
keyword[def] identifier[subpixel] ( identifier[superpix] , identifier[nside_superpix] , identifier[nside_subpix] ): literal[string] keyword[if] identifier[nside_superpix] == identifier[nside_subpix] : keyword[return] identifier[superpix] identifier[vec] = identifier[hp] . identifier[pix2vec] ( identifier[nside_superpix] , identifier[superpix] ) identifier[radius] = identifier[np] . identifier[degrees] ( literal[int] * identifier[hp] . identifier[max_pixrad] ( identifier[nside_superpix] )) identifier[subpix] = identifier[query_disc] ( identifier[nside_subpix] , identifier[vec] , identifier[radius] ) identifier[pix_for_subpix] = identifier[superpixel] ( identifier[subpix] , identifier[nside_subpix] , identifier[nside_superpix] ) keyword[return] identifier[subpix] [ identifier[pix_for_subpix] == identifier[superpix] ]
def subpixel(superpix, nside_superpix, nside_subpix): """ Return the indices of sub-pixels (resolution nside_subpix) within the super-pixel with (resolution nside_superpix). ADW: It would be better to convert to next and do this explicitly """ if nside_superpix == nside_subpix: return superpix # depends on [control=['if'], data=[]] vec = hp.pix2vec(nside_superpix, superpix) radius = np.degrees(2.0 * hp.max_pixrad(nside_superpix)) subpix = query_disc(nside_subpix, vec, radius) pix_for_subpix = superpixel(subpix, nside_subpix, nside_superpix) # Might be able to speed up array indexing... return subpix[pix_for_subpix == superpix]
def set_up_phase(self, training_info, model, source: Source): """ Prepare the phase for learning """ self._optimizer_instance = self.optimizer_factory.instantiate(model) self._source = source
def function[set_up_phase, parameter[self, training_info, model, source]]: constant[ Prepare the phase for learning ] name[self]._optimizer_instance assign[=] call[name[self].optimizer_factory.instantiate, parameter[name[model]]] name[self]._source assign[=] name[source]
keyword[def] identifier[set_up_phase] ( identifier[self] , identifier[training_info] , identifier[model] , identifier[source] : identifier[Source] ): literal[string] identifier[self] . identifier[_optimizer_instance] = identifier[self] . identifier[optimizer_factory] . identifier[instantiate] ( identifier[model] ) identifier[self] . identifier[_source] = identifier[source]
def set_up_phase(self, training_info, model, source: Source): """ Prepare the phase for learning """ self._optimizer_instance = self.optimizer_factory.instantiate(model) self._source = source