code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def _parse_log_statement(options): ''' Parses a log path. ''' for i in options: if _is_reference(i): _add_reference(i, _current_statement) elif _is_junction(i): _add_junction(i) elif _is_inline_definition(i): _add_inline_definition(i, _current_statement)
def function[_parse_log_statement, parameter[options]]: constant[ Parses a log path. ] for taget[name[i]] in starred[name[options]] begin[:] if call[name[_is_reference], parameter[name[i]]] begin[:] call[name[_add_reference], parameter[name[i], name[_current_statement]]]
keyword[def] identifier[_parse_log_statement] ( identifier[options] ): literal[string] keyword[for] identifier[i] keyword[in] identifier[options] : keyword[if] identifier[_is_reference] ( identifier[i] ): identifier[_add_reference] ( identifier[i] , identifier[_current_statement] ) keyword[elif] identifier[_is_junction] ( identifier[i] ): identifier[_add_junction] ( identifier[i] ) keyword[elif] identifier[_is_inline_definition] ( identifier[i] ): identifier[_add_inline_definition] ( identifier[i] , identifier[_current_statement] )
def _parse_log_statement(options): """ Parses a log path. """ for i in options: if _is_reference(i): _add_reference(i, _current_statement) # depends on [control=['if'], data=[]] elif _is_junction(i): _add_junction(i) # depends on [control=['if'], data=[]] elif _is_inline_definition(i): _add_inline_definition(i, _current_statement) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']]
def updateEvolution(self): ''' Updates the "population punk proportion" evolution array. Fasion victims believe that the proportion of punks in the subsequent period is a linear function of the proportion of punks this period, subject to a uniform shock. Given attributes of self pNextIntercept, pNextSlope, pNextCount, pNextWidth, and pGrid, this method generates a new array for the attri- bute pEvolution, representing a discrete approximation of next period states for each current period state in pGrid. Parameters ---------- none Returns ------- none ''' self.pEvolution = np.zeros((self.pCount,self.pNextCount)) for j in range(self.pCount): pNow = self.pGrid[j] pNextMean = self.pNextIntercept + self.pNextSlope*pNow dist = approxUniform(N=self.pNextCount,bot=pNextMean-self.pNextWidth,top=pNextMean+self.pNextWidth)[1] self.pEvolution[j,:] = dist
def function[updateEvolution, parameter[self]]: constant[ Updates the "population punk proportion" evolution array. Fasion victims believe that the proportion of punks in the subsequent period is a linear function of the proportion of punks this period, subject to a uniform shock. Given attributes of self pNextIntercept, pNextSlope, pNextCount, pNextWidth, and pGrid, this method generates a new array for the attri- bute pEvolution, representing a discrete approximation of next period states for each current period state in pGrid. Parameters ---------- none Returns ------- none ] name[self].pEvolution assign[=] call[name[np].zeros, parameter[tuple[[<ast.Attribute object at 0x7da2045653c0>, <ast.Attribute object at 0x7da2054a47c0>]]]] for taget[name[j]] in starred[call[name[range], parameter[name[self].pCount]]] begin[:] variable[pNow] assign[=] call[name[self].pGrid][name[j]] variable[pNextMean] assign[=] binary_operation[name[self].pNextIntercept + binary_operation[name[self].pNextSlope * name[pNow]]] variable[dist] assign[=] call[call[name[approxUniform], parameter[]]][constant[1]] call[name[self].pEvolution][tuple[[<ast.Name object at 0x7da18ede6ec0>, <ast.Slice object at 0x7da2041dba90>]]] assign[=] name[dist]
keyword[def] identifier[updateEvolution] ( identifier[self] ): literal[string] identifier[self] . identifier[pEvolution] = identifier[np] . identifier[zeros] (( identifier[self] . identifier[pCount] , identifier[self] . identifier[pNextCount] )) keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[self] . identifier[pCount] ): identifier[pNow] = identifier[self] . identifier[pGrid] [ identifier[j] ] identifier[pNextMean] = identifier[self] . identifier[pNextIntercept] + identifier[self] . identifier[pNextSlope] * identifier[pNow] identifier[dist] = identifier[approxUniform] ( identifier[N] = identifier[self] . identifier[pNextCount] , identifier[bot] = identifier[pNextMean] - identifier[self] . identifier[pNextWidth] , identifier[top] = identifier[pNextMean] + identifier[self] . identifier[pNextWidth] )[ literal[int] ] identifier[self] . identifier[pEvolution] [ identifier[j] ,:]= identifier[dist]
def updateEvolution(self): """ Updates the "population punk proportion" evolution array. Fasion victims believe that the proportion of punks in the subsequent period is a linear function of the proportion of punks this period, subject to a uniform shock. Given attributes of self pNextIntercept, pNextSlope, pNextCount, pNextWidth, and pGrid, this method generates a new array for the attri- bute pEvolution, representing a discrete approximation of next period states for each current period state in pGrid. Parameters ---------- none Returns ------- none """ self.pEvolution = np.zeros((self.pCount, self.pNextCount)) for j in range(self.pCount): pNow = self.pGrid[j] pNextMean = self.pNextIntercept + self.pNextSlope * pNow dist = approxUniform(N=self.pNextCount, bot=pNextMean - self.pNextWidth, top=pNextMean + self.pNextWidth)[1] self.pEvolution[j, :] = dist # depends on [control=['for'], data=['j']]
def equals_order_sensitive(self, other): """Order-sensitive equality check. *See also* :ref:`eq-order-insensitive` """ # Same short-circuit as BidictBase.__eq__. Factoring out not worth function call overhead. if not isinstance(other, Mapping) or len(self) != len(other): return False return all(i == j for (i, j) in izip(iteritems(self), iteritems(other)))
def function[equals_order_sensitive, parameter[self, other]]: constant[Order-sensitive equality check. *See also* :ref:`eq-order-insensitive` ] if <ast.BoolOp object at 0x7da1b0552140> begin[:] return[constant[False]] return[call[name[all], parameter[<ast.GeneratorExp object at 0x7da1b05526b0>]]]
keyword[def] identifier[equals_order_sensitive] ( identifier[self] , identifier[other] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[other] , identifier[Mapping] ) keyword[or] identifier[len] ( identifier[self] )!= identifier[len] ( identifier[other] ): keyword[return] keyword[False] keyword[return] identifier[all] ( identifier[i] == identifier[j] keyword[for] ( identifier[i] , identifier[j] ) keyword[in] identifier[izip] ( identifier[iteritems] ( identifier[self] ), identifier[iteritems] ( identifier[other] )))
def equals_order_sensitive(self, other): """Order-sensitive equality check. *See also* :ref:`eq-order-insensitive` """ # Same short-circuit as BidictBase.__eq__. Factoring out not worth function call overhead. if not isinstance(other, Mapping) or len(self) != len(other): return False # depends on [control=['if'], data=[]] return all((i == j for (i, j) in izip(iteritems(self), iteritems(other))))
def sampleset(self): """Return ``x`` array that samples the feature.""" x1, x4 = self.bounding_box dw = self.width * 0.5 x2 = self.x_0 - dw x3 = self.x_0 + dw if self._n_models == 1: w = [x1, x2, x3, x4] else: w = list(zip(x1, x2, x3, x4)) return np.asarray(w)
def function[sampleset, parameter[self]]: constant[Return ``x`` array that samples the feature.] <ast.Tuple object at 0x7da18f811420> assign[=] name[self].bounding_box variable[dw] assign[=] binary_operation[name[self].width * constant[0.5]] variable[x2] assign[=] binary_operation[name[self].x_0 - name[dw]] variable[x3] assign[=] binary_operation[name[self].x_0 + name[dw]] if compare[name[self]._n_models equal[==] constant[1]] begin[:] variable[w] assign[=] list[[<ast.Name object at 0x7da18f812050>, <ast.Name object at 0x7da18f810730>, <ast.Name object at 0x7da18f811540>, <ast.Name object at 0x7da18f8116c0>]] return[call[name[np].asarray, parameter[name[w]]]]
keyword[def] identifier[sampleset] ( identifier[self] ): literal[string] identifier[x1] , identifier[x4] = identifier[self] . identifier[bounding_box] identifier[dw] = identifier[self] . identifier[width] * literal[int] identifier[x2] = identifier[self] . identifier[x_0] - identifier[dw] identifier[x3] = identifier[self] . identifier[x_0] + identifier[dw] keyword[if] identifier[self] . identifier[_n_models] == literal[int] : identifier[w] =[ identifier[x1] , identifier[x2] , identifier[x3] , identifier[x4] ] keyword[else] : identifier[w] = identifier[list] ( identifier[zip] ( identifier[x1] , identifier[x2] , identifier[x3] , identifier[x4] )) keyword[return] identifier[np] . identifier[asarray] ( identifier[w] )
def sampleset(self): """Return ``x`` array that samples the feature.""" (x1, x4) = self.bounding_box dw = self.width * 0.5 x2 = self.x_0 - dw x3 = self.x_0 + dw if self._n_models == 1: w = [x1, x2, x3, x4] # depends on [control=['if'], data=[]] else: w = list(zip(x1, x2, x3, x4)) return np.asarray(w)
def write_file(self, filename=None, buffer=None, fileobj=None): """Write this NBT file to a file.""" closefile = True if buffer: self.filename = None self.file = buffer closefile = False elif filename: self.filename = filename self.file = GzipFile(filename, "wb") elif fileobj: self.filename = None self.file = GzipFile(fileobj=fileobj, mode="wb") elif self.filename: self.file = GzipFile(self.filename, "wb") elif not self.file: raise ValueError( "NBTFile.write_file(): Need to specify either a " "filename or a file object" ) # Render tree to file TAG_Byte(self.id)._render_buffer(self.file) TAG_String(self.name)._render_buffer(self.file) self._render_buffer(self.file) # make sure the file is complete try: self.file.flush() except (AttributeError, IOError): pass if closefile: try: self.file.close() except (AttributeError, IOError): pass
def function[write_file, parameter[self, filename, buffer, fileobj]]: constant[Write this NBT file to a file.] variable[closefile] assign[=] constant[True] if name[buffer] begin[:] name[self].filename assign[=] constant[None] name[self].file assign[=] name[buffer] variable[closefile] assign[=] constant[False] call[call[name[TAG_Byte], parameter[name[self].id]]._render_buffer, parameter[name[self].file]] call[call[name[TAG_String], parameter[name[self].name]]._render_buffer, parameter[name[self].file]] call[name[self]._render_buffer, parameter[name[self].file]] <ast.Try object at 0x7da18eb57c70> if name[closefile] begin[:] <ast.Try object at 0x7da18eb55090>
keyword[def] identifier[write_file] ( identifier[self] , identifier[filename] = keyword[None] , identifier[buffer] = keyword[None] , identifier[fileobj] = keyword[None] ): literal[string] identifier[closefile] = keyword[True] keyword[if] identifier[buffer] : identifier[self] . identifier[filename] = keyword[None] identifier[self] . identifier[file] = identifier[buffer] identifier[closefile] = keyword[False] keyword[elif] identifier[filename] : identifier[self] . identifier[filename] = identifier[filename] identifier[self] . identifier[file] = identifier[GzipFile] ( identifier[filename] , literal[string] ) keyword[elif] identifier[fileobj] : identifier[self] . identifier[filename] = keyword[None] identifier[self] . identifier[file] = identifier[GzipFile] ( identifier[fileobj] = identifier[fileobj] , identifier[mode] = literal[string] ) keyword[elif] identifier[self] . identifier[filename] : identifier[self] . identifier[file] = identifier[GzipFile] ( identifier[self] . identifier[filename] , literal[string] ) keyword[elif] keyword[not] identifier[self] . identifier[file] : keyword[raise] identifier[ValueError] ( literal[string] literal[string] ) identifier[TAG_Byte] ( identifier[self] . identifier[id] ). identifier[_render_buffer] ( identifier[self] . identifier[file] ) identifier[TAG_String] ( identifier[self] . identifier[name] ). identifier[_render_buffer] ( identifier[self] . identifier[file] ) identifier[self] . identifier[_render_buffer] ( identifier[self] . identifier[file] ) keyword[try] : identifier[self] . identifier[file] . identifier[flush] () keyword[except] ( identifier[AttributeError] , identifier[IOError] ): keyword[pass] keyword[if] identifier[closefile] : keyword[try] : identifier[self] . identifier[file] . identifier[close] () keyword[except] ( identifier[AttributeError] , identifier[IOError] ): keyword[pass]
def write_file(self, filename=None, buffer=None, fileobj=None): """Write this NBT file to a file.""" closefile = True if buffer: self.filename = None self.file = buffer closefile = False # depends on [control=['if'], data=[]] elif filename: self.filename = filename self.file = GzipFile(filename, 'wb') # depends on [control=['if'], data=[]] elif fileobj: self.filename = None self.file = GzipFile(fileobj=fileobj, mode='wb') # depends on [control=['if'], data=[]] elif self.filename: self.file = GzipFile(self.filename, 'wb') # depends on [control=['if'], data=[]] elif not self.file: raise ValueError('NBTFile.write_file(): Need to specify either a filename or a file object') # depends on [control=['if'], data=[]] # Render tree to file TAG_Byte(self.id)._render_buffer(self.file) TAG_String(self.name)._render_buffer(self.file) self._render_buffer(self.file) # make sure the file is complete try: self.file.flush() # depends on [control=['try'], data=[]] except (AttributeError, IOError): pass # depends on [control=['except'], data=[]] if closefile: try: self.file.close() # depends on [control=['try'], data=[]] except (AttributeError, IOError): pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
def has_expired(self, lifetime, now=None): """Report if the session key has expired. :param lifetime: A :class:`datetime.timedelta` that specifies the maximum age this :class:`SessionID` should be checked against. :param now: If specified, use this :class:`~datetime.datetime` instance instead of :meth:`~datetime.datetime.utcnow()` as the current time. """ now = now or datetime.utcnow() return now > self.created + lifetime
def function[has_expired, parameter[self, lifetime, now]]: constant[Report if the session key has expired. :param lifetime: A :class:`datetime.timedelta` that specifies the maximum age this :class:`SessionID` should be checked against. :param now: If specified, use this :class:`~datetime.datetime` instance instead of :meth:`~datetime.datetime.utcnow()` as the current time. ] variable[now] assign[=] <ast.BoolOp object at 0x7da1b0f21570> return[compare[name[now] greater[>] binary_operation[name[self].created + name[lifetime]]]]
keyword[def] identifier[has_expired] ( identifier[self] , identifier[lifetime] , identifier[now] = keyword[None] ): literal[string] identifier[now] = identifier[now] keyword[or] identifier[datetime] . identifier[utcnow] () keyword[return] identifier[now] > identifier[self] . identifier[created] + identifier[lifetime]
def has_expired(self, lifetime, now=None): """Report if the session key has expired. :param lifetime: A :class:`datetime.timedelta` that specifies the maximum age this :class:`SessionID` should be checked against. :param now: If specified, use this :class:`~datetime.datetime` instance instead of :meth:`~datetime.datetime.utcnow()` as the current time. """ now = now or datetime.utcnow() return now > self.created + lifetime
def GetTimeInterpolatedStops(self): """Return a list of (secs, stoptime, is_timepoint) tuples. secs will always be an int. If the StopTime object does not have explict times this method guesses using distance. stoptime is a StopTime object and is_timepoint is a bool. Raises: ValueError if this trip does not have the times needed to interpolate """ rv = [] stoptimes = self.GetStopTimes() # If there are no stoptimes [] is the correct return value but if the start # or end are missing times there is no correct return value. if not stoptimes: return [] if (stoptimes[0].GetTimeSecs() is None or stoptimes[-1].GetTimeSecs() is None): raise ValueError("%s must have time at first and last stop" % (self)) cur_timepoint = None next_timepoint = None distance_between_timepoints = 0 distance_traveled_between_timepoints = 0 for i, st in enumerate(stoptimes): if st.GetTimeSecs() != None: cur_timepoint = st distance_between_timepoints = 0 distance_traveled_between_timepoints = 0 if i + 1 < len(stoptimes): k = i + 1 distance_between_timepoints += util.ApproximateDistanceBetweenStops(stoptimes[k-1].stop, stoptimes[k].stop) while stoptimes[k].GetTimeSecs() == None: k += 1 distance_between_timepoints += util.ApproximateDistanceBetweenStops(stoptimes[k-1].stop, stoptimes[k].stop) next_timepoint = stoptimes[k] rv.append( (st.GetTimeSecs(), st, True) ) else: distance_traveled_between_timepoints += util.ApproximateDistanceBetweenStops(stoptimes[i-1].stop, st.stop) distance_percent = distance_traveled_between_timepoints / distance_between_timepoints total_time = next_timepoint.GetTimeSecs() - cur_timepoint.GetTimeSecs() time_estimate = distance_percent * total_time + cur_timepoint.GetTimeSecs() rv.append( (int(round(time_estimate)), st, False) ) return rv
def function[GetTimeInterpolatedStops, parameter[self]]: constant[Return a list of (secs, stoptime, is_timepoint) tuples. secs will always be an int. If the StopTime object does not have explict times this method guesses using distance. stoptime is a StopTime object and is_timepoint is a bool. Raises: ValueError if this trip does not have the times needed to interpolate ] variable[rv] assign[=] list[[]] variable[stoptimes] assign[=] call[name[self].GetStopTimes, parameter[]] if <ast.UnaryOp object at 0x7da20c76c850> begin[:] return[list[[]]] if <ast.BoolOp object at 0x7da20c76de10> begin[:] <ast.Raise object at 0x7da20c76cc40> variable[cur_timepoint] assign[=] constant[None] variable[next_timepoint] assign[=] constant[None] variable[distance_between_timepoints] assign[=] constant[0] variable[distance_traveled_between_timepoints] assign[=] constant[0] for taget[tuple[[<ast.Name object at 0x7da20c76e3b0>, <ast.Name object at 0x7da20c76d270>]]] in starred[call[name[enumerate], parameter[name[stoptimes]]]] begin[:] if compare[call[name[st].GetTimeSecs, parameter[]] not_equal[!=] constant[None]] begin[:] variable[cur_timepoint] assign[=] name[st] variable[distance_between_timepoints] assign[=] constant[0] variable[distance_traveled_between_timepoints] assign[=] constant[0] if compare[binary_operation[name[i] + constant[1]] less[<] call[name[len], parameter[name[stoptimes]]]] begin[:] variable[k] assign[=] binary_operation[name[i] + constant[1]] <ast.AugAssign object at 0x7da20c76e9b0> while compare[call[call[name[stoptimes]][name[k]].GetTimeSecs, parameter[]] equal[==] constant[None]] begin[:] <ast.AugAssign object at 0x7da20c76d510> <ast.AugAssign object at 0x7da20c76f550> variable[next_timepoint] assign[=] call[name[stoptimes]][name[k]] call[name[rv].append, parameter[tuple[[<ast.Call object at 0x7da20c76d450>, <ast.Name object at 0x7da20c76d540>, <ast.Constant object at 0x7da20c76e9e0>]]]] return[name[rv]]
keyword[def] identifier[GetTimeInterpolatedStops] ( identifier[self] ): literal[string] identifier[rv] =[] identifier[stoptimes] = identifier[self] . identifier[GetStopTimes] () keyword[if] keyword[not] identifier[stoptimes] : keyword[return] [] keyword[if] ( identifier[stoptimes] [ literal[int] ]. identifier[GetTimeSecs] () keyword[is] keyword[None] keyword[or] identifier[stoptimes] [- literal[int] ]. identifier[GetTimeSecs] () keyword[is] keyword[None] ): keyword[raise] identifier[ValueError] ( literal[string] %( identifier[self] )) identifier[cur_timepoint] = keyword[None] identifier[next_timepoint] = keyword[None] identifier[distance_between_timepoints] = literal[int] identifier[distance_traveled_between_timepoints] = literal[int] keyword[for] identifier[i] , identifier[st] keyword[in] identifier[enumerate] ( identifier[stoptimes] ): keyword[if] identifier[st] . identifier[GetTimeSecs] ()!= keyword[None] : identifier[cur_timepoint] = identifier[st] identifier[distance_between_timepoints] = literal[int] identifier[distance_traveled_between_timepoints] = literal[int] keyword[if] identifier[i] + literal[int] < identifier[len] ( identifier[stoptimes] ): identifier[k] = identifier[i] + literal[int] identifier[distance_between_timepoints] += identifier[util] . identifier[ApproximateDistanceBetweenStops] ( identifier[stoptimes] [ identifier[k] - literal[int] ]. identifier[stop] , identifier[stoptimes] [ identifier[k] ]. identifier[stop] ) keyword[while] identifier[stoptimes] [ identifier[k] ]. identifier[GetTimeSecs] ()== keyword[None] : identifier[k] += literal[int] identifier[distance_between_timepoints] += identifier[util] . identifier[ApproximateDistanceBetweenStops] ( identifier[stoptimes] [ identifier[k] - literal[int] ]. identifier[stop] , identifier[stoptimes] [ identifier[k] ]. identifier[stop] ) identifier[next_timepoint] = identifier[stoptimes] [ identifier[k] ] identifier[rv] . identifier[append] (( identifier[st] . identifier[GetTimeSecs] (), identifier[st] , keyword[True] )) keyword[else] : identifier[distance_traveled_between_timepoints] += identifier[util] . identifier[ApproximateDistanceBetweenStops] ( identifier[stoptimes] [ identifier[i] - literal[int] ]. identifier[stop] , identifier[st] . identifier[stop] ) identifier[distance_percent] = identifier[distance_traveled_between_timepoints] / identifier[distance_between_timepoints] identifier[total_time] = identifier[next_timepoint] . identifier[GetTimeSecs] ()- identifier[cur_timepoint] . identifier[GetTimeSecs] () identifier[time_estimate] = identifier[distance_percent] * identifier[total_time] + identifier[cur_timepoint] . identifier[GetTimeSecs] () identifier[rv] . identifier[append] (( identifier[int] ( identifier[round] ( identifier[time_estimate] )), identifier[st] , keyword[False] )) keyword[return] identifier[rv]
def GetTimeInterpolatedStops(self): """Return a list of (secs, stoptime, is_timepoint) tuples. secs will always be an int. If the StopTime object does not have explict times this method guesses using distance. stoptime is a StopTime object and is_timepoint is a bool. Raises: ValueError if this trip does not have the times needed to interpolate """ rv = [] stoptimes = self.GetStopTimes() # If there are no stoptimes [] is the correct return value but if the start # or end are missing times there is no correct return value. if not stoptimes: return [] # depends on [control=['if'], data=[]] if stoptimes[0].GetTimeSecs() is None or stoptimes[-1].GetTimeSecs() is None: raise ValueError('%s must have time at first and last stop' % self) # depends on [control=['if'], data=[]] cur_timepoint = None next_timepoint = None distance_between_timepoints = 0 distance_traveled_between_timepoints = 0 for (i, st) in enumerate(stoptimes): if st.GetTimeSecs() != None: cur_timepoint = st distance_between_timepoints = 0 distance_traveled_between_timepoints = 0 if i + 1 < len(stoptimes): k = i + 1 distance_between_timepoints += util.ApproximateDistanceBetweenStops(stoptimes[k - 1].stop, stoptimes[k].stop) while stoptimes[k].GetTimeSecs() == None: k += 1 distance_between_timepoints += util.ApproximateDistanceBetweenStops(stoptimes[k - 1].stop, stoptimes[k].stop) # depends on [control=['while'], data=[]] next_timepoint = stoptimes[k] # depends on [control=['if'], data=[]] rv.append((st.GetTimeSecs(), st, True)) # depends on [control=['if'], data=[]] else: distance_traveled_between_timepoints += util.ApproximateDistanceBetweenStops(stoptimes[i - 1].stop, st.stop) distance_percent = distance_traveled_between_timepoints / distance_between_timepoints total_time = next_timepoint.GetTimeSecs() - cur_timepoint.GetTimeSecs() time_estimate = distance_percent * total_time + cur_timepoint.GetTimeSecs() rv.append((int(round(time_estimate)), st, False)) # depends on [control=['for'], data=[]] return rv
def __parse_enabled_plugins(self): """ :returns: [(plugin_name, plugin_package, plugin_config), ...] :rtype: list of tuple """ return [ ( plugin_name, plugin['package'], plugin) for plugin_name, plugin in self.raw_config_dict.items() if ( plugin_name not in self.BASE_SCHEMA.keys()) and isinstance( plugin, dict) and plugin.get('enabled')]
def function[__parse_enabled_plugins, parameter[self]]: constant[ :returns: [(plugin_name, plugin_package, plugin_config), ...] :rtype: list of tuple ] return[<ast.ListComp object at 0x7da1b03948e0>]
keyword[def] identifier[__parse_enabled_plugins] ( identifier[self] ): literal[string] keyword[return] [ ( identifier[plugin_name] , identifier[plugin] [ literal[string] ], identifier[plugin] ) keyword[for] identifier[plugin_name] , identifier[plugin] keyword[in] identifier[self] . identifier[raw_config_dict] . identifier[items] () keyword[if] ( identifier[plugin_name] keyword[not] keyword[in] identifier[self] . identifier[BASE_SCHEMA] . identifier[keys] ()) keyword[and] identifier[isinstance] ( identifier[plugin] , identifier[dict] ) keyword[and] identifier[plugin] . identifier[get] ( literal[string] )]
def __parse_enabled_plugins(self): """ :returns: [(plugin_name, plugin_package, plugin_config), ...] :rtype: list of tuple """ return [(plugin_name, plugin['package'], plugin) for (plugin_name, plugin) in self.raw_config_dict.items() if plugin_name not in self.BASE_SCHEMA.keys() and isinstance(plugin, dict) and plugin.get('enabled')]
def normalize_mesh(mesh): '''Scale mesh to fit into -1..1 cube''' mesh = dict(mesh) pos = mesh['position'][:,:3].copy() pos -= (pos.max(0)+pos.min(0)) / 2.0 pos /= np.abs(pos).max() mesh['position'] = pos return mesh
def function[normalize_mesh, parameter[mesh]]: constant[Scale mesh to fit into -1..1 cube] variable[mesh] assign[=] call[name[dict], parameter[name[mesh]]] variable[pos] assign[=] call[call[call[name[mesh]][constant[position]]][tuple[[<ast.Slice object at 0x7da1b1fe4b80>, <ast.Slice object at 0x7da1b1fe47f0>]]].copy, parameter[]] <ast.AugAssign object at 0x7da1b1fe4f10> <ast.AugAssign object at 0x7da1b1fe46d0> call[name[mesh]][constant[position]] assign[=] name[pos] return[name[mesh]]
keyword[def] identifier[normalize_mesh] ( identifier[mesh] ): literal[string] identifier[mesh] = identifier[dict] ( identifier[mesh] ) identifier[pos] = identifier[mesh] [ literal[string] ][:,: literal[int] ]. identifier[copy] () identifier[pos] -=( identifier[pos] . identifier[max] ( literal[int] )+ identifier[pos] . identifier[min] ( literal[int] ))/ literal[int] identifier[pos] /= identifier[np] . identifier[abs] ( identifier[pos] ). identifier[max] () identifier[mesh] [ literal[string] ]= identifier[pos] keyword[return] identifier[mesh]
def normalize_mesh(mesh): """Scale mesh to fit into -1..1 cube""" mesh = dict(mesh) pos = mesh['position'][:, :3].copy() pos -= (pos.max(0) + pos.min(0)) / 2.0 pos /= np.abs(pos).max() mesh['position'] = pos return mesh
def _Matches(path, pattern_list): """Returns true if path matches any patten found in pattern_list. Args: path: A dot separated path to a package, class, method or variable pattern_list: A list of wildcard patterns Returns: True if path matches any wildcard found in pattern_list. """ # Note: This code does not scale to large pattern_list sizes. return any(fnmatch.fnmatchcase(path, pattern) for pattern in pattern_list)
def function[_Matches, parameter[path, pattern_list]]: constant[Returns true if path matches any patten found in pattern_list. Args: path: A dot separated path to a package, class, method or variable pattern_list: A list of wildcard patterns Returns: True if path matches any wildcard found in pattern_list. ] return[call[name[any], parameter[<ast.GeneratorExp object at 0x7da18eb550c0>]]]
keyword[def] identifier[_Matches] ( identifier[path] , identifier[pattern_list] ): literal[string] keyword[return] identifier[any] ( identifier[fnmatch] . identifier[fnmatchcase] ( identifier[path] , identifier[pattern] ) keyword[for] identifier[pattern] keyword[in] identifier[pattern_list] )
def _Matches(path, pattern_list): """Returns true if path matches any patten found in pattern_list. Args: path: A dot separated path to a package, class, method or variable pattern_list: A list of wildcard patterns Returns: True if path matches any wildcard found in pattern_list. """ # Note: This code does not scale to large pattern_list sizes. return any((fnmatch.fnmatchcase(path, pattern) for pattern in pattern_list))
def format_index(data): """Create DatetimeIndex for the Dataframe localized to the timezone provided as the label of the second (time) column. Parameters ---------- data: Dataframe Must contain 'DATE (MM/DD/YYYY)' column, second column must be labeled with the timezone and contain times in 'HH:MM' format. Returns ------- data: Dataframe Dataframe with DatetimeIndex localized to the provided timezone. """ tz_raw = data.columns[1] timezone = TZ_MAP.get(tz_raw, tz_raw) datetime = data['DATE (MM/DD/YYYY)'] + data[tz_raw] datetime = pd.to_datetime(datetime, format='%m/%d/%Y%H:%M') data = data.set_index(datetime) data = data.tz_localize(timezone) return data
def function[format_index, parameter[data]]: constant[Create DatetimeIndex for the Dataframe localized to the timezone provided as the label of the second (time) column. Parameters ---------- data: Dataframe Must contain 'DATE (MM/DD/YYYY)' column, second column must be labeled with the timezone and contain times in 'HH:MM' format. Returns ------- data: Dataframe Dataframe with DatetimeIndex localized to the provided timezone. ] variable[tz_raw] assign[=] call[name[data].columns][constant[1]] variable[timezone] assign[=] call[name[TZ_MAP].get, parameter[name[tz_raw], name[tz_raw]]] variable[datetime] assign[=] binary_operation[call[name[data]][constant[DATE (MM/DD/YYYY)]] + call[name[data]][name[tz_raw]]] variable[datetime] assign[=] call[name[pd].to_datetime, parameter[name[datetime]]] variable[data] assign[=] call[name[data].set_index, parameter[name[datetime]]] variable[data] assign[=] call[name[data].tz_localize, parameter[name[timezone]]] return[name[data]]
keyword[def] identifier[format_index] ( identifier[data] ): literal[string] identifier[tz_raw] = identifier[data] . identifier[columns] [ literal[int] ] identifier[timezone] = identifier[TZ_MAP] . identifier[get] ( identifier[tz_raw] , identifier[tz_raw] ) identifier[datetime] = identifier[data] [ literal[string] ]+ identifier[data] [ identifier[tz_raw] ] identifier[datetime] = identifier[pd] . identifier[to_datetime] ( identifier[datetime] , identifier[format] = literal[string] ) identifier[data] = identifier[data] . identifier[set_index] ( identifier[datetime] ) identifier[data] = identifier[data] . identifier[tz_localize] ( identifier[timezone] ) keyword[return] identifier[data]
def format_index(data): """Create DatetimeIndex for the Dataframe localized to the timezone provided as the label of the second (time) column. Parameters ---------- data: Dataframe Must contain 'DATE (MM/DD/YYYY)' column, second column must be labeled with the timezone and contain times in 'HH:MM' format. Returns ------- data: Dataframe Dataframe with DatetimeIndex localized to the provided timezone. """ tz_raw = data.columns[1] timezone = TZ_MAP.get(tz_raw, tz_raw) datetime = data['DATE (MM/DD/YYYY)'] + data[tz_raw] datetime = pd.to_datetime(datetime, format='%m/%d/%Y%H:%M') data = data.set_index(datetime) data = data.tz_localize(timezone) return data
def open(self, file, mode='r', perm=0o0644): """ Opens a file on the node :param file: file path to open :param mode: open mode :param perm: file permission in octet form mode: 'r' read only 'w' write only (truncate) '+' read/write 'x' create if not exist 'a' append :return: a file descriptor """ args = { 'file': file, 'mode': mode, 'perm': perm, } return self._client.json('filesystem.open', args)
def function[open, parameter[self, file, mode, perm]]: constant[ Opens a file on the node :param file: file path to open :param mode: open mode :param perm: file permission in octet form mode: 'r' read only 'w' write only (truncate) '+' read/write 'x' create if not exist 'a' append :return: a file descriptor ] variable[args] assign[=] dictionary[[<ast.Constant object at 0x7da1b031e050>, <ast.Constant object at 0x7da1b031c040>, <ast.Constant object at 0x7da1b031c1c0>], [<ast.Name object at 0x7da1b031d780>, <ast.Name object at 0x7da1b031dbd0>, <ast.Name object at 0x7da1b031eaa0>]] return[call[name[self]._client.json, parameter[constant[filesystem.open], name[args]]]]
keyword[def] identifier[open] ( identifier[self] , identifier[file] , identifier[mode] = literal[string] , identifier[perm] = literal[int] ): literal[string] identifier[args] ={ literal[string] : identifier[file] , literal[string] : identifier[mode] , literal[string] : identifier[perm] , } keyword[return] identifier[self] . identifier[_client] . identifier[json] ( literal[string] , identifier[args] )
def open(self, file, mode='r', perm=420): """ Opens a file on the node :param file: file path to open :param mode: open mode :param perm: file permission in octet form mode: 'r' read only 'w' write only (truncate) '+' read/write 'x' create if not exist 'a' append :return: a file descriptor """ args = {'file': file, 'mode': mode, 'perm': perm} return self._client.json('filesystem.open', args)
def preprocess_value(self, value, default=tuple()): """Preprocess the value for set """ # empty value if not value: return default # list with one empty item if isinstance(value, (list, tuple)): if len(value) == 1 and not value[0]: return default if not isinstance(value, (list, tuple)): value = value, return value
def function[preprocess_value, parameter[self, value, default]]: constant[Preprocess the value for set ] if <ast.UnaryOp object at 0x7da2054a66e0> begin[:] return[name[default]] if call[name[isinstance], parameter[name[value], tuple[[<ast.Name object at 0x7da2054a5030>, <ast.Name object at 0x7da2054a4fa0>]]]] begin[:] if <ast.BoolOp object at 0x7da2054a6050> begin[:] return[name[default]] if <ast.UnaryOp object at 0x7da2054a5ab0> begin[:] variable[value] assign[=] tuple[[<ast.Name object at 0x7da2054a6110>]] return[name[value]]
keyword[def] identifier[preprocess_value] ( identifier[self] , identifier[value] , identifier[default] = identifier[tuple] ()): literal[string] keyword[if] keyword[not] identifier[value] : keyword[return] identifier[default] keyword[if] identifier[isinstance] ( identifier[value] ,( identifier[list] , identifier[tuple] )): keyword[if] identifier[len] ( identifier[value] )== literal[int] keyword[and] keyword[not] identifier[value] [ literal[int] ]: keyword[return] identifier[default] keyword[if] keyword[not] identifier[isinstance] ( identifier[value] ,( identifier[list] , identifier[tuple] )): identifier[value] = identifier[value] , keyword[return] identifier[value]
def preprocess_value(self, value, default=tuple()): """Preprocess the value for set """ # empty value if not value: return default # depends on [control=['if'], data=[]] # list with one empty item if isinstance(value, (list, tuple)): if len(value) == 1 and (not value[0]): return default # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] if not isinstance(value, (list, tuple)): value = (value,) # depends on [control=['if'], data=[]] return value
def _ltu32(ins): """ Compares & pops top 2 operands out of the stack, and checks if the 1st operand < 2nd operand (top of the stack). Pushes 0 if False, 1 if True. 32 bit unsigned version """ op1, op2 = tuple(ins.quad[2:]) rev = op1[0] != 't' and not is_int(op1) and op2[0] == 't' output = _32bit_oper(op1, op2, rev) output.append('call __SUB32') output.append('sbc a, a') output.append('push af') REQUIRES.add('sub32.asm') return output
def function[_ltu32, parameter[ins]]: constant[ Compares & pops top 2 operands out of the stack, and checks if the 1st operand < 2nd operand (top of the stack). Pushes 0 if False, 1 if True. 32 bit unsigned version ] <ast.Tuple object at 0x7da204567640> assign[=] call[name[tuple], parameter[call[name[ins].quad][<ast.Slice object at 0x7da2045641f0>]]] variable[rev] assign[=] <ast.BoolOp object at 0x7da2045665c0> variable[output] assign[=] call[name[_32bit_oper], parameter[name[op1], name[op2], name[rev]]] call[name[output].append, parameter[constant[call __SUB32]]] call[name[output].append, parameter[constant[sbc a, a]]] call[name[output].append, parameter[constant[push af]]] call[name[REQUIRES].add, parameter[constant[sub32.asm]]] return[name[output]]
keyword[def] identifier[_ltu32] ( identifier[ins] ): literal[string] identifier[op1] , identifier[op2] = identifier[tuple] ( identifier[ins] . identifier[quad] [ literal[int] :]) identifier[rev] = identifier[op1] [ literal[int] ]!= literal[string] keyword[and] keyword[not] identifier[is_int] ( identifier[op1] ) keyword[and] identifier[op2] [ literal[int] ]== literal[string] identifier[output] = identifier[_32bit_oper] ( identifier[op1] , identifier[op2] , identifier[rev] ) identifier[output] . identifier[append] ( literal[string] ) identifier[output] . identifier[append] ( literal[string] ) identifier[output] . identifier[append] ( literal[string] ) identifier[REQUIRES] . identifier[add] ( literal[string] ) keyword[return] identifier[output]
def _ltu32(ins): """ Compares & pops top 2 operands out of the stack, and checks if the 1st operand < 2nd operand (top of the stack). Pushes 0 if False, 1 if True. 32 bit unsigned version """ (op1, op2) = tuple(ins.quad[2:]) rev = op1[0] != 't' and (not is_int(op1)) and (op2[0] == 't') output = _32bit_oper(op1, op2, rev) output.append('call __SUB32') output.append('sbc a, a') output.append('push af') REQUIRES.add('sub32.asm') return output
def get_last_value_from_timeseries(timeseries): """Gets the most recent non-zero value for a .last metric or zero for empty data.""" if not timeseries: return 0 for metric, points in timeseries.items(): return next((p['y'] for p in reversed(points) if p['y'] > 0), 0)
def function[get_last_value_from_timeseries, parameter[timeseries]]: constant[Gets the most recent non-zero value for a .last metric or zero for empty data.] if <ast.UnaryOp object at 0x7da20e962bc0> begin[:] return[constant[0]] for taget[tuple[[<ast.Name object at 0x7da20e963160>, <ast.Name object at 0x7da20e963640>]]] in starred[call[name[timeseries].items, parameter[]]] begin[:] return[call[name[next], parameter[<ast.GeneratorExp object at 0x7da20e960ca0>, constant[0]]]]
keyword[def] identifier[get_last_value_from_timeseries] ( identifier[timeseries] ): literal[string] keyword[if] keyword[not] identifier[timeseries] : keyword[return] literal[int] keyword[for] identifier[metric] , identifier[points] keyword[in] identifier[timeseries] . identifier[items] (): keyword[return] identifier[next] (( identifier[p] [ literal[string] ] keyword[for] identifier[p] keyword[in] identifier[reversed] ( identifier[points] ) keyword[if] identifier[p] [ literal[string] ]> literal[int] ), literal[int] )
def get_last_value_from_timeseries(timeseries): """Gets the most recent non-zero value for a .last metric or zero for empty data.""" if not timeseries: return 0 # depends on [control=['if'], data=[]] for (metric, points) in timeseries.items(): return next((p['y'] for p in reversed(points) if p['y'] > 0), 0) # depends on [control=['for'], data=[]]
def _output_type_by_input_path(inpaths, itype, fmsg): """ :param inpaths: List of input file paths :param itype: Input type or None :param fmsg: message if it cannot detect otype by 'inpath' :return: Output type :: str """ msg = ("Specify inpath and/or outpath type[s] with -I/--itype " "or -O/--otype option explicitly") if itype is None: try: otype = API.find(inpaths[0]).type() except API.UnknownFileTypeError: _exit_with_output((fmsg % inpaths[0]) + msg, 1) except (ValueError, IndexError): _exit_with_output(msg, 1) else: otype = itype return otype
def function[_output_type_by_input_path, parameter[inpaths, itype, fmsg]]: constant[ :param inpaths: List of input file paths :param itype: Input type or None :param fmsg: message if it cannot detect otype by 'inpath' :return: Output type :: str ] variable[msg] assign[=] constant[Specify inpath and/or outpath type[s] with -I/--itype or -O/--otype option explicitly] if compare[name[itype] is constant[None]] begin[:] <ast.Try object at 0x7da20c794b50> return[name[otype]]
keyword[def] identifier[_output_type_by_input_path] ( identifier[inpaths] , identifier[itype] , identifier[fmsg] ): literal[string] identifier[msg] =( literal[string] literal[string] ) keyword[if] identifier[itype] keyword[is] keyword[None] : keyword[try] : identifier[otype] = identifier[API] . identifier[find] ( identifier[inpaths] [ literal[int] ]). identifier[type] () keyword[except] identifier[API] . identifier[UnknownFileTypeError] : identifier[_exit_with_output] (( identifier[fmsg] % identifier[inpaths] [ literal[int] ])+ identifier[msg] , literal[int] ) keyword[except] ( identifier[ValueError] , identifier[IndexError] ): identifier[_exit_with_output] ( identifier[msg] , literal[int] ) keyword[else] : identifier[otype] = identifier[itype] keyword[return] identifier[otype]
def _output_type_by_input_path(inpaths, itype, fmsg): """ :param inpaths: List of input file paths :param itype: Input type or None :param fmsg: message if it cannot detect otype by 'inpath' :return: Output type :: str """ msg = 'Specify inpath and/or outpath type[s] with -I/--itype or -O/--otype option explicitly' if itype is None: try: otype = API.find(inpaths[0]).type() # depends on [control=['try'], data=[]] except API.UnknownFileTypeError: _exit_with_output(fmsg % inpaths[0] + msg, 1) # depends on [control=['except'], data=[]] except (ValueError, IndexError): _exit_with_output(msg, 1) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] else: otype = itype return otype
def get_vnetwork_portgroups_output_vnetwork_pgs_vlan(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") get_vnetwork_portgroups = ET.Element("get_vnetwork_portgroups") config = get_vnetwork_portgroups output = ET.SubElement(get_vnetwork_portgroups, "output") vnetwork_pgs = ET.SubElement(output, "vnetwork-pgs") vlan = ET.SubElement(vnetwork_pgs, "vlan") vlan.text = kwargs.pop('vlan') callback = kwargs.pop('callback', self._callback) return callback(config)
def function[get_vnetwork_portgroups_output_vnetwork_pgs_vlan, parameter[self]]: constant[Auto Generated Code ] variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]] variable[get_vnetwork_portgroups] assign[=] call[name[ET].Element, parameter[constant[get_vnetwork_portgroups]]] variable[config] assign[=] name[get_vnetwork_portgroups] variable[output] assign[=] call[name[ET].SubElement, parameter[name[get_vnetwork_portgroups], constant[output]]] variable[vnetwork_pgs] assign[=] call[name[ET].SubElement, parameter[name[output], constant[vnetwork-pgs]]] variable[vlan] assign[=] call[name[ET].SubElement, parameter[name[vnetwork_pgs], constant[vlan]]] name[vlan].text assign[=] call[name[kwargs].pop, parameter[constant[vlan]]] variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]] return[call[name[callback], parameter[name[config]]]]
keyword[def] identifier[get_vnetwork_portgroups_output_vnetwork_pgs_vlan] ( identifier[self] ,** identifier[kwargs] ): literal[string] identifier[config] = identifier[ET] . identifier[Element] ( literal[string] ) identifier[get_vnetwork_portgroups] = identifier[ET] . identifier[Element] ( literal[string] ) identifier[config] = identifier[get_vnetwork_portgroups] identifier[output] = identifier[ET] . identifier[SubElement] ( identifier[get_vnetwork_portgroups] , literal[string] ) identifier[vnetwork_pgs] = identifier[ET] . identifier[SubElement] ( identifier[output] , literal[string] ) identifier[vlan] = identifier[ET] . identifier[SubElement] ( identifier[vnetwork_pgs] , literal[string] ) identifier[vlan] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] ) identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] ) keyword[return] identifier[callback] ( identifier[config] )
def get_vnetwork_portgroups_output_vnetwork_pgs_vlan(self, **kwargs): """Auto Generated Code """ config = ET.Element('config') get_vnetwork_portgroups = ET.Element('get_vnetwork_portgroups') config = get_vnetwork_portgroups output = ET.SubElement(get_vnetwork_portgroups, 'output') vnetwork_pgs = ET.SubElement(output, 'vnetwork-pgs') vlan = ET.SubElement(vnetwork_pgs, 'vlan') vlan.text = kwargs.pop('vlan') callback = kwargs.pop('callback', self._callback) return callback(config)
def hints(self, **kwargs): """ Use this method to update hints value of the underlying query example: queryset.hints(permissive=False) """ new_query = self.query.clone() new_query.hints.update(kwargs) return self._clone(query=new_query)
def function[hints, parameter[self]]: constant[ Use this method to update hints value of the underlying query example: queryset.hints(permissive=False) ] variable[new_query] assign[=] call[name[self].query.clone, parameter[]] call[name[new_query].hints.update, parameter[name[kwargs]]] return[call[name[self]._clone, parameter[]]]
keyword[def] identifier[hints] ( identifier[self] ,** identifier[kwargs] ): literal[string] identifier[new_query] = identifier[self] . identifier[query] . identifier[clone] () identifier[new_query] . identifier[hints] . identifier[update] ( identifier[kwargs] ) keyword[return] identifier[self] . identifier[_clone] ( identifier[query] = identifier[new_query] )
def hints(self, **kwargs): """ Use this method to update hints value of the underlying query example: queryset.hints(permissive=False) """ new_query = self.query.clone() new_query.hints.update(kwargs) return self._clone(query=new_query)
def get_config_map(self, name): """ Get a ConfigMap object from the server Raises exception on error :param name: str, name of configMap to get from the server :returns: ConfigMapResponse containing the ConfigMap with the requested name """ response = self.os.get_config_map(name) config_map_response = ConfigMapResponse(response.json()) return config_map_response
def function[get_config_map, parameter[self, name]]: constant[ Get a ConfigMap object from the server Raises exception on error :param name: str, name of configMap to get from the server :returns: ConfigMapResponse containing the ConfigMap with the requested name ] variable[response] assign[=] call[name[self].os.get_config_map, parameter[name[name]]] variable[config_map_response] assign[=] call[name[ConfigMapResponse], parameter[call[name[response].json, parameter[]]]] return[name[config_map_response]]
keyword[def] identifier[get_config_map] ( identifier[self] , identifier[name] ): literal[string] identifier[response] = identifier[self] . identifier[os] . identifier[get_config_map] ( identifier[name] ) identifier[config_map_response] = identifier[ConfigMapResponse] ( identifier[response] . identifier[json] ()) keyword[return] identifier[config_map_response]
def get_config_map(self, name): """ Get a ConfigMap object from the server Raises exception on error :param name: str, name of configMap to get from the server :returns: ConfigMapResponse containing the ConfigMap with the requested name """ response = self.os.get_config_map(name) config_map_response = ConfigMapResponse(response.json()) return config_map_response
def focus_last_reply(self): """move focus to last reply to currently focussed message""" mid = self.get_selected_mid() newpos = self._tree.last_child_position(mid) if newpos is not None: newpos = self._sanitize_position((newpos,)) self.body.set_focus(newpos)
def function[focus_last_reply, parameter[self]]: constant[move focus to last reply to currently focussed message] variable[mid] assign[=] call[name[self].get_selected_mid, parameter[]] variable[newpos] assign[=] call[name[self]._tree.last_child_position, parameter[name[mid]]] if compare[name[newpos] is_not constant[None]] begin[:] variable[newpos] assign[=] call[name[self]._sanitize_position, parameter[tuple[[<ast.Name object at 0x7da1b0718c10>]]]] call[name[self].body.set_focus, parameter[name[newpos]]]
keyword[def] identifier[focus_last_reply] ( identifier[self] ): literal[string] identifier[mid] = identifier[self] . identifier[get_selected_mid] () identifier[newpos] = identifier[self] . identifier[_tree] . identifier[last_child_position] ( identifier[mid] ) keyword[if] identifier[newpos] keyword[is] keyword[not] keyword[None] : identifier[newpos] = identifier[self] . identifier[_sanitize_position] (( identifier[newpos] ,)) identifier[self] . identifier[body] . identifier[set_focus] ( identifier[newpos] )
def focus_last_reply(self): """move focus to last reply to currently focussed message""" mid = self.get_selected_mid() newpos = self._tree.last_child_position(mid) if newpos is not None: newpos = self._sanitize_position((newpos,)) self.body.set_focus(newpos) # depends on [control=['if'], data=['newpos']]
def unregisterChecker(self, checker): """ Remove a checker from the list of registered checkers. @param checker: the checker to remove """ self.linter._checkers[checker.name].remove(checker) if checker in self.linter._reports: del self.linter._reports[checker] if checker in self.linter.options_providers: self.linter.options_providers.remove(checker)
def function[unregisterChecker, parameter[self, checker]]: constant[ Remove a checker from the list of registered checkers. @param checker: the checker to remove ] call[call[name[self].linter._checkers][name[checker].name].remove, parameter[name[checker]]] if compare[name[checker] in name[self].linter._reports] begin[:] <ast.Delete object at 0x7da20cabc250> if compare[name[checker] in name[self].linter.options_providers] begin[:] call[name[self].linter.options_providers.remove, parameter[name[checker]]]
keyword[def] identifier[unregisterChecker] ( identifier[self] , identifier[checker] ): literal[string] identifier[self] . identifier[linter] . identifier[_checkers] [ identifier[checker] . identifier[name] ]. identifier[remove] ( identifier[checker] ) keyword[if] identifier[checker] keyword[in] identifier[self] . identifier[linter] . identifier[_reports] : keyword[del] identifier[self] . identifier[linter] . identifier[_reports] [ identifier[checker] ] keyword[if] identifier[checker] keyword[in] identifier[self] . identifier[linter] . identifier[options_providers] : identifier[self] . identifier[linter] . identifier[options_providers] . identifier[remove] ( identifier[checker] )
def unregisterChecker(self, checker): """ Remove a checker from the list of registered checkers. @param checker: the checker to remove """ self.linter._checkers[checker.name].remove(checker) if checker in self.linter._reports: del self.linter._reports[checker] # depends on [control=['if'], data=['checker']] if checker in self.linter.options_providers: self.linter.options_providers.remove(checker) # depends on [control=['if'], data=['checker']]
def _update_limits_from_api(self): """ Call the service's API action to retrieve limit/quota information, and update AwsLimit objects in ``self.limits`` with this information. """ try: self.connect() resp = self.conn.get_send_quota() except EndpointConnectionError as ex: logger.warning('Skipping SES: %s', str(ex)) return except ClientError as ex: if ex.response['Error']['Code'] in ['AccessDenied', '503']: logger.warning('Skipping SES: %s', ex) return raise self.limits['Daily sending quota']._set_api_limit(resp['Max24HourSend'])
def function[_update_limits_from_api, parameter[self]]: constant[ Call the service's API action to retrieve limit/quota information, and update AwsLimit objects in ``self.limits`` with this information. ] <ast.Try object at 0x7da20c7c8760> call[call[name[self].limits][constant[Daily sending quota]]._set_api_limit, parameter[call[name[resp]][constant[Max24HourSend]]]]
keyword[def] identifier[_update_limits_from_api] ( identifier[self] ): literal[string] keyword[try] : identifier[self] . identifier[connect] () identifier[resp] = identifier[self] . identifier[conn] . identifier[get_send_quota] () keyword[except] identifier[EndpointConnectionError] keyword[as] identifier[ex] : identifier[logger] . identifier[warning] ( literal[string] , identifier[str] ( identifier[ex] )) keyword[return] keyword[except] identifier[ClientError] keyword[as] identifier[ex] : keyword[if] identifier[ex] . identifier[response] [ literal[string] ][ literal[string] ] keyword[in] [ literal[string] , literal[string] ]: identifier[logger] . identifier[warning] ( literal[string] , identifier[ex] ) keyword[return] keyword[raise] identifier[self] . identifier[limits] [ literal[string] ]. identifier[_set_api_limit] ( identifier[resp] [ literal[string] ])
def _update_limits_from_api(self): """ Call the service's API action to retrieve limit/quota information, and update AwsLimit objects in ``self.limits`` with this information. """ try: self.connect() resp = self.conn.get_send_quota() # depends on [control=['try'], data=[]] except EndpointConnectionError as ex: logger.warning('Skipping SES: %s', str(ex)) return # depends on [control=['except'], data=['ex']] except ClientError as ex: if ex.response['Error']['Code'] in ['AccessDenied', '503']: logger.warning('Skipping SES: %s', ex) return # depends on [control=['if'], data=[]] raise # depends on [control=['except'], data=['ex']] self.limits['Daily sending quota']._set_api_limit(resp['Max24HourSend'])
def all_elements_by_type(name): """ Get specified elements based on the entry point verb from SMC api To get the entry points available, you can get these from the session:: session.cache.entry_points Execution will get the entry point for the element type, then get all elements that match. For example:: search.all_elements_by_type('host') :param name: top level entry point name :raises: `smc.api.exceptions.UnsupportedEntryPoint` :return: list with json representation of name match, else None """ if name: entry = element_entry_point(name) if entry: # in case an invalid entry point is specified result = element_by_href_as_json(entry) return result
def function[all_elements_by_type, parameter[name]]: constant[ Get specified elements based on the entry point verb from SMC api To get the entry points available, you can get these from the session:: session.cache.entry_points Execution will get the entry point for the element type, then get all elements that match. For example:: search.all_elements_by_type('host') :param name: top level entry point name :raises: `smc.api.exceptions.UnsupportedEntryPoint` :return: list with json representation of name match, else None ] if name[name] begin[:] variable[entry] assign[=] call[name[element_entry_point], parameter[name[name]]] if name[entry] begin[:] variable[result] assign[=] call[name[element_by_href_as_json], parameter[name[entry]]] return[name[result]]
keyword[def] identifier[all_elements_by_type] ( identifier[name] ): literal[string] keyword[if] identifier[name] : identifier[entry] = identifier[element_entry_point] ( identifier[name] ) keyword[if] identifier[entry] : identifier[result] = identifier[element_by_href_as_json] ( identifier[entry] ) keyword[return] identifier[result]
def all_elements_by_type(name): """ Get specified elements based on the entry point verb from SMC api To get the entry points available, you can get these from the session:: session.cache.entry_points Execution will get the entry point for the element type, then get all elements that match. For example:: search.all_elements_by_type('host') :param name: top level entry point name :raises: `smc.api.exceptions.UnsupportedEntryPoint` :return: list with json representation of name match, else None """ if name: entry = element_entry_point(name) if entry: # in case an invalid entry point is specified result = element_by_href_as_json(entry) return result # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
def repo_commits(self, repo): """ Get the commit IDs for all of the branches of a repository """ commits = [] try: status = self.path_dirs.apply_path(repo) # switch to directory where repo will be cloned to if status[0]: cwd = status[1] else: self.logger.error('apply_path failed. Exiting repo_commits with' ' status: ' + str(status)) return status status = self.repo_branches(repo) if status[0]: branches = status[1] for branch in branches: try: branch_output = check_output(shlex .split('git rev-list origin/' + branch), stderr=STDOUT, close_fds=True).decode('utf-8') branch_output = branch_output.split('\n')[:-1] branch_output += ['HEAD'] commits.append((branch, branch_output)) except Exception as e: # pragma: no cover self.logger.error('repo_commits failed with error: ' + str(e) + ' on branch: ' + str(branch)) status = (False, e) return status else: self.logger.error('repo_branches failed. Exiting repo_commits' ' with status: ' + str(status)) return status chdir(cwd) status = (True, commits) except Exception as e: # pragma: no cover self.logger.error('repo_commits failed with error: ' + str(e)) status = (False, e) return status
def function[repo_commits, parameter[self, repo]]: constant[ Get the commit IDs for all of the branches of a repository ] variable[commits] assign[=] list[[]] <ast.Try object at 0x7da18bcc8d60> return[name[status]]
keyword[def] identifier[repo_commits] ( identifier[self] , identifier[repo] ): literal[string] identifier[commits] =[] keyword[try] : identifier[status] = identifier[self] . identifier[path_dirs] . identifier[apply_path] ( identifier[repo] ) keyword[if] identifier[status] [ literal[int] ]: identifier[cwd] = identifier[status] [ literal[int] ] keyword[else] : identifier[self] . identifier[logger] . identifier[error] ( literal[string] literal[string] + identifier[str] ( identifier[status] )) keyword[return] identifier[status] identifier[status] = identifier[self] . identifier[repo_branches] ( identifier[repo] ) keyword[if] identifier[status] [ literal[int] ]: identifier[branches] = identifier[status] [ literal[int] ] keyword[for] identifier[branch] keyword[in] identifier[branches] : keyword[try] : identifier[branch_output] = identifier[check_output] ( identifier[shlex] . identifier[split] ( literal[string] + identifier[branch] ), identifier[stderr] = identifier[STDOUT] , identifier[close_fds] = keyword[True] ). identifier[decode] ( literal[string] ) identifier[branch_output] = identifier[branch_output] . identifier[split] ( literal[string] )[:- literal[int] ] identifier[branch_output] +=[ literal[string] ] identifier[commits] . identifier[append] (( identifier[branch] , identifier[branch_output] )) keyword[except] identifier[Exception] keyword[as] identifier[e] : identifier[self] . identifier[logger] . identifier[error] ( literal[string] + identifier[str] ( identifier[e] )+ literal[string] + identifier[str] ( identifier[branch] )) identifier[status] =( keyword[False] , identifier[e] ) keyword[return] identifier[status] keyword[else] : identifier[self] . identifier[logger] . identifier[error] ( literal[string] literal[string] + identifier[str] ( identifier[status] )) keyword[return] identifier[status] identifier[chdir] ( identifier[cwd] ) identifier[status] =( keyword[True] , identifier[commits] ) keyword[except] identifier[Exception] keyword[as] identifier[e] : identifier[self] . identifier[logger] . identifier[error] ( literal[string] + identifier[str] ( identifier[e] )) identifier[status] =( keyword[False] , identifier[e] ) keyword[return] identifier[status]
def repo_commits(self, repo): """ Get the commit IDs for all of the branches of a repository """ commits = [] try: status = self.path_dirs.apply_path(repo) # switch to directory where repo will be cloned to if status[0]: cwd = status[1] # depends on [control=['if'], data=[]] else: self.logger.error('apply_path failed. Exiting repo_commits with status: ' + str(status)) return status status = self.repo_branches(repo) if status[0]: branches = status[1] for branch in branches: try: branch_output = check_output(shlex.split('git rev-list origin/' + branch), stderr=STDOUT, close_fds=True).decode('utf-8') branch_output = branch_output.split('\n')[:-1] branch_output += ['HEAD'] commits.append((branch, branch_output)) # depends on [control=['try'], data=[]] except Exception as e: # pragma: no cover self.logger.error('repo_commits failed with error: ' + str(e) + ' on branch: ' + str(branch)) status = (False, e) return status # depends on [control=['except'], data=['e']] # depends on [control=['for'], data=['branch']] # depends on [control=['if'], data=[]] else: self.logger.error('repo_branches failed. Exiting repo_commits with status: ' + str(status)) return status chdir(cwd) status = (True, commits) # depends on [control=['try'], data=[]] except Exception as e: # pragma: no cover self.logger.error('repo_commits failed with error: ' + str(e)) status = (False, e) # depends on [control=['except'], data=['e']] return status
def _removeUnlikelyPredictions(cls, likelihoodsDict, minLikelihoodThreshold, maxPredictionsPerStep): """Remove entries with 0 likelihood or likelihood less than minLikelihoodThreshold, but don't leave an empty dict. """ maxVal = (None, None) for (k, v) in likelihoodsDict.items(): if len(likelihoodsDict) <= 1: break if maxVal[0] is None or v >= maxVal[1]: if maxVal[0] is not None and maxVal[1] < minLikelihoodThreshold: del likelihoodsDict[maxVal[0]] maxVal = (k, v) elif v < minLikelihoodThreshold: del likelihoodsDict[k] # Limit the number of predictions to include. likelihoodsDict = dict(sorted(likelihoodsDict.iteritems(), key=itemgetter(1), reverse=True)[:maxPredictionsPerStep]) return likelihoodsDict
def function[_removeUnlikelyPredictions, parameter[cls, likelihoodsDict, minLikelihoodThreshold, maxPredictionsPerStep]]: constant[Remove entries with 0 likelihood or likelihood less than minLikelihoodThreshold, but don't leave an empty dict. ] variable[maxVal] assign[=] tuple[[<ast.Constant object at 0x7da18dc990c0>, <ast.Constant object at 0x7da18dc9b040>]] for taget[tuple[[<ast.Name object at 0x7da18dc99c60>, <ast.Name object at 0x7da18dc983d0>]]] in starred[call[name[likelihoodsDict].items, parameter[]]] begin[:] if compare[call[name[len], parameter[name[likelihoodsDict]]] less_or_equal[<=] constant[1]] begin[:] break if <ast.BoolOp object at 0x7da18dc9a620> begin[:] if <ast.BoolOp object at 0x7da18dc9acb0> begin[:] <ast.Delete object at 0x7da18dc9b100> variable[maxVal] assign[=] tuple[[<ast.Name object at 0x7da20c6a8220>, <ast.Name object at 0x7da20c6a8b20>]] variable[likelihoodsDict] assign[=] call[name[dict], parameter[call[call[name[sorted], parameter[call[name[likelihoodsDict].iteritems, parameter[]]]]][<ast.Slice object at 0x7da20c6a8490>]]] return[name[likelihoodsDict]]
keyword[def] identifier[_removeUnlikelyPredictions] ( identifier[cls] , identifier[likelihoodsDict] , identifier[minLikelihoodThreshold] , identifier[maxPredictionsPerStep] ): literal[string] identifier[maxVal] =( keyword[None] , keyword[None] ) keyword[for] ( identifier[k] , identifier[v] ) keyword[in] identifier[likelihoodsDict] . identifier[items] (): keyword[if] identifier[len] ( identifier[likelihoodsDict] )<= literal[int] : keyword[break] keyword[if] identifier[maxVal] [ literal[int] ] keyword[is] keyword[None] keyword[or] identifier[v] >= identifier[maxVal] [ literal[int] ]: keyword[if] identifier[maxVal] [ literal[int] ] keyword[is] keyword[not] keyword[None] keyword[and] identifier[maxVal] [ literal[int] ]< identifier[minLikelihoodThreshold] : keyword[del] identifier[likelihoodsDict] [ identifier[maxVal] [ literal[int] ]] identifier[maxVal] =( identifier[k] , identifier[v] ) keyword[elif] identifier[v] < identifier[minLikelihoodThreshold] : keyword[del] identifier[likelihoodsDict] [ identifier[k] ] identifier[likelihoodsDict] = identifier[dict] ( identifier[sorted] ( identifier[likelihoodsDict] . identifier[iteritems] (), identifier[key] = identifier[itemgetter] ( literal[int] ), identifier[reverse] = keyword[True] )[: identifier[maxPredictionsPerStep] ]) keyword[return] identifier[likelihoodsDict]
def _removeUnlikelyPredictions(cls, likelihoodsDict, minLikelihoodThreshold, maxPredictionsPerStep): """Remove entries with 0 likelihood or likelihood less than minLikelihoodThreshold, but don't leave an empty dict. """ maxVal = (None, None) for (k, v) in likelihoodsDict.items(): if len(likelihoodsDict) <= 1: break # depends on [control=['if'], data=[]] if maxVal[0] is None or v >= maxVal[1]: if maxVal[0] is not None and maxVal[1] < minLikelihoodThreshold: del likelihoodsDict[maxVal[0]] # depends on [control=['if'], data=[]] maxVal = (k, v) # depends on [control=['if'], data=[]] elif v < minLikelihoodThreshold: del likelihoodsDict[k] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # Limit the number of predictions to include. likelihoodsDict = dict(sorted(likelihoodsDict.iteritems(), key=itemgetter(1), reverse=True)[:maxPredictionsPerStep]) return likelihoodsDict
def delete_scheme(self): """Deletes the currently selected custom color scheme.""" scheme_name = self.current_scheme answer = QMessageBox.warning(self, _("Warning"), _("Are you sure you want to delete " "this scheme?"), QMessageBox.Yes | QMessageBox.No) if answer == QMessageBox.Yes: # Put the combobox in Spyder by default, when deleting a scheme names = self.get_option('names') self.set_scheme('spyder') self.schemes_combobox.setCurrentIndex(names.index('spyder')) self.set_option('selected', 'spyder') # Delete from custom_names custom_names = self.get_option('custom_names', []) if scheme_name in custom_names: custom_names.remove(scheme_name) self.set_option('custom_names', custom_names) # Delete config options for key in syntaxhighlighters.COLOR_SCHEME_KEYS: option = "{0}/{1}".format(scheme_name, key) CONF.remove_option(self.CONF_SECTION, option) CONF.remove_option(self.CONF_SECTION, "{0}/name".format(scheme_name)) self.update_combobox() self.update_preview()
def function[delete_scheme, parameter[self]]: constant[Deletes the currently selected custom color scheme.] variable[scheme_name] assign[=] name[self].current_scheme variable[answer] assign[=] call[name[QMessageBox].warning, parameter[name[self], call[name[_], parameter[constant[Warning]]], call[name[_], parameter[constant[Are you sure you want to delete this scheme?]]], binary_operation[name[QMessageBox].Yes <ast.BitOr object at 0x7da2590d6aa0> name[QMessageBox].No]]] if compare[name[answer] equal[==] name[QMessageBox].Yes] begin[:] variable[names] assign[=] call[name[self].get_option, parameter[constant[names]]] call[name[self].set_scheme, parameter[constant[spyder]]] call[name[self].schemes_combobox.setCurrentIndex, parameter[call[name[names].index, parameter[constant[spyder]]]]] call[name[self].set_option, parameter[constant[selected], constant[spyder]]] variable[custom_names] assign[=] call[name[self].get_option, parameter[constant[custom_names], list[[]]]] if compare[name[scheme_name] in name[custom_names]] begin[:] call[name[custom_names].remove, parameter[name[scheme_name]]] call[name[self].set_option, parameter[constant[custom_names], name[custom_names]]] for taget[name[key]] in starred[name[syntaxhighlighters].COLOR_SCHEME_KEYS] begin[:] variable[option] assign[=] call[constant[{0}/{1}].format, parameter[name[scheme_name], name[key]]] call[name[CONF].remove_option, parameter[name[self].CONF_SECTION, name[option]]] call[name[CONF].remove_option, parameter[name[self].CONF_SECTION, call[constant[{0}/name].format, parameter[name[scheme_name]]]]] call[name[self].update_combobox, parameter[]] call[name[self].update_preview, parameter[]]
keyword[def] identifier[delete_scheme] ( identifier[self] ): literal[string] identifier[scheme_name] = identifier[self] . identifier[current_scheme] identifier[answer] = identifier[QMessageBox] . identifier[warning] ( identifier[self] , identifier[_] ( literal[string] ), identifier[_] ( literal[string] literal[string] ), identifier[QMessageBox] . identifier[Yes] | identifier[QMessageBox] . identifier[No] ) keyword[if] identifier[answer] == identifier[QMessageBox] . identifier[Yes] : identifier[names] = identifier[self] . identifier[get_option] ( literal[string] ) identifier[self] . identifier[set_scheme] ( literal[string] ) identifier[self] . identifier[schemes_combobox] . identifier[setCurrentIndex] ( identifier[names] . identifier[index] ( literal[string] )) identifier[self] . identifier[set_option] ( literal[string] , literal[string] ) identifier[custom_names] = identifier[self] . identifier[get_option] ( literal[string] ,[]) keyword[if] identifier[scheme_name] keyword[in] identifier[custom_names] : identifier[custom_names] . identifier[remove] ( identifier[scheme_name] ) identifier[self] . identifier[set_option] ( literal[string] , identifier[custom_names] ) keyword[for] identifier[key] keyword[in] identifier[syntaxhighlighters] . identifier[COLOR_SCHEME_KEYS] : identifier[option] = literal[string] . identifier[format] ( identifier[scheme_name] , identifier[key] ) identifier[CONF] . identifier[remove_option] ( identifier[self] . identifier[CONF_SECTION] , identifier[option] ) identifier[CONF] . identifier[remove_option] ( identifier[self] . identifier[CONF_SECTION] , literal[string] . identifier[format] ( identifier[scheme_name] )) identifier[self] . identifier[update_combobox] () identifier[self] . identifier[update_preview] ()
def delete_scheme(self): """Deletes the currently selected custom color scheme.""" scheme_name = self.current_scheme answer = QMessageBox.warning(self, _('Warning'), _('Are you sure you want to delete this scheme?'), QMessageBox.Yes | QMessageBox.No) if answer == QMessageBox.Yes: # Put the combobox in Spyder by default, when deleting a scheme names = self.get_option('names') self.set_scheme('spyder') self.schemes_combobox.setCurrentIndex(names.index('spyder')) self.set_option('selected', 'spyder') # Delete from custom_names custom_names = self.get_option('custom_names', []) if scheme_name in custom_names: custom_names.remove(scheme_name) # depends on [control=['if'], data=['scheme_name', 'custom_names']] self.set_option('custom_names', custom_names) # Delete config options for key in syntaxhighlighters.COLOR_SCHEME_KEYS: option = '{0}/{1}'.format(scheme_name, key) CONF.remove_option(self.CONF_SECTION, option) # depends on [control=['for'], data=['key']] CONF.remove_option(self.CONF_SECTION, '{0}/name'.format(scheme_name)) self.update_combobox() self.update_preview() # depends on [control=['if'], data=[]]
def bulleted_list(items, max_count=None, indent=2): """Format a bulleted list of values. """ if max_count is not None and len(items) > max_count: item_list = list(items) items = item_list[:max_count - 1] items.append('...') items.append(item_list[-1]) line_template = (" " * indent) + "- {}" return "\n".join(map(line_template.format, items))
def function[bulleted_list, parameter[items, max_count, indent]]: constant[Format a bulleted list of values. ] if <ast.BoolOp object at 0x7da1b2042e00> begin[:] variable[item_list] assign[=] call[name[list], parameter[name[items]]] variable[items] assign[=] call[name[item_list]][<ast.Slice object at 0x7da1b1e8fe80>] call[name[items].append, parameter[constant[...]]] call[name[items].append, parameter[call[name[item_list]][<ast.UnaryOp object at 0x7da1b1e8f040>]]] variable[line_template] assign[=] binary_operation[binary_operation[constant[ ] * name[indent]] + constant[- {}]] return[call[constant[ ].join, parameter[call[name[map], parameter[name[line_template].format, name[items]]]]]]
keyword[def] identifier[bulleted_list] ( identifier[items] , identifier[max_count] = keyword[None] , identifier[indent] = literal[int] ): literal[string] keyword[if] identifier[max_count] keyword[is] keyword[not] keyword[None] keyword[and] identifier[len] ( identifier[items] )> identifier[max_count] : identifier[item_list] = identifier[list] ( identifier[items] ) identifier[items] = identifier[item_list] [: identifier[max_count] - literal[int] ] identifier[items] . identifier[append] ( literal[string] ) identifier[items] . identifier[append] ( identifier[item_list] [- literal[int] ]) identifier[line_template] =( literal[string] * identifier[indent] )+ literal[string] keyword[return] literal[string] . identifier[join] ( identifier[map] ( identifier[line_template] . identifier[format] , identifier[items] ))
def bulleted_list(items, max_count=None, indent=2): """Format a bulleted list of values. """ if max_count is not None and len(items) > max_count: item_list = list(items) items = item_list[:max_count - 1] items.append('...') items.append(item_list[-1]) # depends on [control=['if'], data=[]] line_template = ' ' * indent + '- {}' return '\n'.join(map(line_template.format, items))
def Read(self, length=None): """Read from the file.""" if self.progress_callback: self.progress_callback() available_to_read = max(0, (self.size or 0) - self.offset) if length is None: to_read = available_to_read else: to_read = min(length, available_to_read) with FileHandleManager(self.filename) as fd: offset = self.file_offset + self.offset pre_padding = offset % self.alignment # Due to alignment we read some more data than we need to. aligned_offset = offset - pre_padding fd.Seek(aligned_offset) data = fd.Read(to_read + pre_padding) self.offset += len(data) - pre_padding return data[pre_padding:]
def function[Read, parameter[self, length]]: constant[Read from the file.] if name[self].progress_callback begin[:] call[name[self].progress_callback, parameter[]] variable[available_to_read] assign[=] call[name[max], parameter[constant[0], binary_operation[<ast.BoolOp object at 0x7da1b1cecbe0> - name[self].offset]]] if compare[name[length] is constant[None]] begin[:] variable[to_read] assign[=] name[available_to_read] with call[name[FileHandleManager], parameter[name[self].filename]] begin[:] variable[offset] assign[=] binary_operation[name[self].file_offset + name[self].offset] variable[pre_padding] assign[=] binary_operation[name[offset] <ast.Mod object at 0x7da2590d6920> name[self].alignment] variable[aligned_offset] assign[=] binary_operation[name[offset] - name[pre_padding]] call[name[fd].Seek, parameter[name[aligned_offset]]] variable[data] assign[=] call[name[fd].Read, parameter[binary_operation[name[to_read] + name[pre_padding]]]] <ast.AugAssign object at 0x7da1b1ceeec0> return[call[name[data]][<ast.Slice object at 0x7da1b1ceea40>]]
keyword[def] identifier[Read] ( identifier[self] , identifier[length] = keyword[None] ): literal[string] keyword[if] identifier[self] . identifier[progress_callback] : identifier[self] . identifier[progress_callback] () identifier[available_to_read] = identifier[max] ( literal[int] ,( identifier[self] . identifier[size] keyword[or] literal[int] )- identifier[self] . identifier[offset] ) keyword[if] identifier[length] keyword[is] keyword[None] : identifier[to_read] = identifier[available_to_read] keyword[else] : identifier[to_read] = identifier[min] ( identifier[length] , identifier[available_to_read] ) keyword[with] identifier[FileHandleManager] ( identifier[self] . identifier[filename] ) keyword[as] identifier[fd] : identifier[offset] = identifier[self] . identifier[file_offset] + identifier[self] . identifier[offset] identifier[pre_padding] = identifier[offset] % identifier[self] . identifier[alignment] identifier[aligned_offset] = identifier[offset] - identifier[pre_padding] identifier[fd] . identifier[Seek] ( identifier[aligned_offset] ) identifier[data] = identifier[fd] . identifier[Read] ( identifier[to_read] + identifier[pre_padding] ) identifier[self] . identifier[offset] += identifier[len] ( identifier[data] )- identifier[pre_padding] keyword[return] identifier[data] [ identifier[pre_padding] :]
def Read(self, length=None): """Read from the file.""" if self.progress_callback: self.progress_callback() # depends on [control=['if'], data=[]] available_to_read = max(0, (self.size or 0) - self.offset) if length is None: to_read = available_to_read # depends on [control=['if'], data=[]] else: to_read = min(length, available_to_read) with FileHandleManager(self.filename) as fd: offset = self.file_offset + self.offset pre_padding = offset % self.alignment # Due to alignment we read some more data than we need to. aligned_offset = offset - pre_padding fd.Seek(aligned_offset) data = fd.Read(to_read + pre_padding) self.offset += len(data) - pre_padding return data[pre_padding:] # depends on [control=['with'], data=['fd']]
def matches_factor_conditions(s, env): """"Returns True if py{33, 34} expanded is contained in env.name.""" env_labels = set(env.name.split('-')) labels = set(bash_expand(s)) return bool(labels & env_labels)
def function[matches_factor_conditions, parameter[s, env]]: constant["Returns True if py{33, 34} expanded is contained in env.name.] variable[env_labels] assign[=] call[name[set], parameter[call[name[env].name.split, parameter[constant[-]]]]] variable[labels] assign[=] call[name[set], parameter[call[name[bash_expand], parameter[name[s]]]]] return[call[name[bool], parameter[binary_operation[name[labels] <ast.BitAnd object at 0x7da2590d6b60> name[env_labels]]]]]
keyword[def] identifier[matches_factor_conditions] ( identifier[s] , identifier[env] ): literal[string] identifier[env_labels] = identifier[set] ( identifier[env] . identifier[name] . identifier[split] ( literal[string] )) identifier[labels] = identifier[set] ( identifier[bash_expand] ( identifier[s] )) keyword[return] identifier[bool] ( identifier[labels] & identifier[env_labels] )
def matches_factor_conditions(s, env): """"Returns True if py{33, 34} expanded is contained in env.name.""" env_labels = set(env.name.split('-')) labels = set(bash_expand(s)) return bool(labels & env_labels)
def _handle_typename(self, node, scope, ctxt, stream): """TODO: Docstring for _handle_typename :node: TODO :scope: TODO :ctxt: TODO :stream: TODO :returns: TODO """ self._dlog("handling typename") return self._handle_node(node.type, scope, ctxt, stream)
def function[_handle_typename, parameter[self, node, scope, ctxt, stream]]: constant[TODO: Docstring for _handle_typename :node: TODO :scope: TODO :ctxt: TODO :stream: TODO :returns: TODO ] call[name[self]._dlog, parameter[constant[handling typename]]] return[call[name[self]._handle_node, parameter[name[node].type, name[scope], name[ctxt], name[stream]]]]
keyword[def] identifier[_handle_typename] ( identifier[self] , identifier[node] , identifier[scope] , identifier[ctxt] , identifier[stream] ): literal[string] identifier[self] . identifier[_dlog] ( literal[string] ) keyword[return] identifier[self] . identifier[_handle_node] ( identifier[node] . identifier[type] , identifier[scope] , identifier[ctxt] , identifier[stream] )
def _handle_typename(self, node, scope, ctxt, stream): """TODO: Docstring for _handle_typename :node: TODO :scope: TODO :ctxt: TODO :stream: TODO :returns: TODO """ self._dlog('handling typename') return self._handle_node(node.type, scope, ctxt, stream)
def _process_outgoing(self, xmlstream, token): """ Process the current outgoing stanza `token` and also any other outgoing stanza which is currently in the active queue. After all stanzas have been processed, use :meth:`_send_ping` to allow an opportunistic ping to be sent. """ self._send_stanza(xmlstream, token) # try to send a bulk while True: try: token = self._active_queue.get_nowait() except asyncio.QueueEmpty: break self._send_stanza(xmlstream, token) if self._sm_enabled: self._logger.debug("sending SM req") xmlstream.send_xso(nonza.SMRequest())
def function[_process_outgoing, parameter[self, xmlstream, token]]: constant[ Process the current outgoing stanza `token` and also any other outgoing stanza which is currently in the active queue. After all stanzas have been processed, use :meth:`_send_ping` to allow an opportunistic ping to be sent. ] call[name[self]._send_stanza, parameter[name[xmlstream], name[token]]] while constant[True] begin[:] <ast.Try object at 0x7da204566a10> call[name[self]._send_stanza, parameter[name[xmlstream], name[token]]] if name[self]._sm_enabled begin[:] call[name[self]._logger.debug, parameter[constant[sending SM req]]] call[name[xmlstream].send_xso, parameter[call[name[nonza].SMRequest, parameter[]]]]
keyword[def] identifier[_process_outgoing] ( identifier[self] , identifier[xmlstream] , identifier[token] ): literal[string] identifier[self] . identifier[_send_stanza] ( identifier[xmlstream] , identifier[token] ) keyword[while] keyword[True] : keyword[try] : identifier[token] = identifier[self] . identifier[_active_queue] . identifier[get_nowait] () keyword[except] identifier[asyncio] . identifier[QueueEmpty] : keyword[break] identifier[self] . identifier[_send_stanza] ( identifier[xmlstream] , identifier[token] ) keyword[if] identifier[self] . identifier[_sm_enabled] : identifier[self] . identifier[_logger] . identifier[debug] ( literal[string] ) identifier[xmlstream] . identifier[send_xso] ( identifier[nonza] . identifier[SMRequest] ())
def _process_outgoing(self, xmlstream, token): """ Process the current outgoing stanza `token` and also any other outgoing stanza which is currently in the active queue. After all stanzas have been processed, use :meth:`_send_ping` to allow an opportunistic ping to be sent. """ self._send_stanza(xmlstream, token) # try to send a bulk while True: try: token = self._active_queue.get_nowait() # depends on [control=['try'], data=[]] except asyncio.QueueEmpty: break # depends on [control=['except'], data=[]] self._send_stanza(xmlstream, token) # depends on [control=['while'], data=[]] if self._sm_enabled: self._logger.debug('sending SM req') xmlstream.send_xso(nonza.SMRequest()) # depends on [control=['if'], data=[]]
def populateFromHeader(self, readGroupHeader): """ Populate the instance variables using the specified SAM header. """ self._sampleName = readGroupHeader.get('SM', None) self._description = readGroupHeader.get('DS', None) if 'PI' in readGroupHeader: self._predictedInsertSize = int(readGroupHeader['PI']) self._instrumentModel = readGroupHeader.get('PL', None) self._sequencingCenter = readGroupHeader.get('CN', None) self._experimentDescription = readGroupHeader.get('DS', None) self._library = readGroupHeader.get('LB', None) self._platformUnit = readGroupHeader.get('PU', None) self._runTime = readGroupHeader.get('DT', None)
def function[populateFromHeader, parameter[self, readGroupHeader]]: constant[ Populate the instance variables using the specified SAM header. ] name[self]._sampleName assign[=] call[name[readGroupHeader].get, parameter[constant[SM], constant[None]]] name[self]._description assign[=] call[name[readGroupHeader].get, parameter[constant[DS], constant[None]]] if compare[constant[PI] in name[readGroupHeader]] begin[:] name[self]._predictedInsertSize assign[=] call[name[int], parameter[call[name[readGroupHeader]][constant[PI]]]] name[self]._instrumentModel assign[=] call[name[readGroupHeader].get, parameter[constant[PL], constant[None]]] name[self]._sequencingCenter assign[=] call[name[readGroupHeader].get, parameter[constant[CN], constant[None]]] name[self]._experimentDescription assign[=] call[name[readGroupHeader].get, parameter[constant[DS], constant[None]]] name[self]._library assign[=] call[name[readGroupHeader].get, parameter[constant[LB], constant[None]]] name[self]._platformUnit assign[=] call[name[readGroupHeader].get, parameter[constant[PU], constant[None]]] name[self]._runTime assign[=] call[name[readGroupHeader].get, parameter[constant[DT], constant[None]]]
keyword[def] identifier[populateFromHeader] ( identifier[self] , identifier[readGroupHeader] ): literal[string] identifier[self] . identifier[_sampleName] = identifier[readGroupHeader] . identifier[get] ( literal[string] , keyword[None] ) identifier[self] . identifier[_description] = identifier[readGroupHeader] . identifier[get] ( literal[string] , keyword[None] ) keyword[if] literal[string] keyword[in] identifier[readGroupHeader] : identifier[self] . identifier[_predictedInsertSize] = identifier[int] ( identifier[readGroupHeader] [ literal[string] ]) identifier[self] . identifier[_instrumentModel] = identifier[readGroupHeader] . identifier[get] ( literal[string] , keyword[None] ) identifier[self] . identifier[_sequencingCenter] = identifier[readGroupHeader] . identifier[get] ( literal[string] , keyword[None] ) identifier[self] . identifier[_experimentDescription] = identifier[readGroupHeader] . identifier[get] ( literal[string] , keyword[None] ) identifier[self] . identifier[_library] = identifier[readGroupHeader] . identifier[get] ( literal[string] , keyword[None] ) identifier[self] . identifier[_platformUnit] = identifier[readGroupHeader] . identifier[get] ( literal[string] , keyword[None] ) identifier[self] . identifier[_runTime] = identifier[readGroupHeader] . identifier[get] ( literal[string] , keyword[None] )
def populateFromHeader(self, readGroupHeader): """ Populate the instance variables using the specified SAM header. """ self._sampleName = readGroupHeader.get('SM', None) self._description = readGroupHeader.get('DS', None) if 'PI' in readGroupHeader: self._predictedInsertSize = int(readGroupHeader['PI']) # depends on [control=['if'], data=['readGroupHeader']] self._instrumentModel = readGroupHeader.get('PL', None) self._sequencingCenter = readGroupHeader.get('CN', None) self._experimentDescription = readGroupHeader.get('DS', None) self._library = readGroupHeader.get('LB', None) self._platformUnit = readGroupHeader.get('PU', None) self._runTime = readGroupHeader.get('DT', None)
def _get_broker_offsets(self, instance, topics): """ Fetch highwater offsets for each topic/partition from Kafka cluster. Do this for all partitions in the cluster because even if it has no consumers, we may want to measure whether producers are successfully producing. No need to limit this for performance because fetching broker offsets from Kafka is a relatively inexpensive operation. Sends one OffsetRequest per broker to get offsets for all partitions where that broker is the leader: https://cwiki.apache.org/confluence/display/KAFKA/A+Guide+To+The+Kafka+Protocol#AGuideToTheKafkaProtocol-OffsetAPI(AKAListOffset) Can we cleanup connections on agent restart? Brokers before 0.9 - accumulate stale connections on restarts. In 0.9 Kafka added connections.max.idle.ms https://issues.apache.org/jira/browse/KAFKA-1282 """ # Connect to Kafka highwater_offsets = {} topic_partitions_without_a_leader = [] topics_to_fetch = defaultdict(set) cli = self._get_kafka_client(instance) for topic, partitions in iteritems(topics): # if no partitions are provided # we're falling back to all available partitions (?) if len(partitions) == 0: partitions = cli.cluster.available_partitions_for_topic(topic) topics_to_fetch[topic].update(partitions) leader_tp = defaultdict(lambda: defaultdict(set)) for topic, partitions in iteritems(topics_to_fetch): for partition in partitions: partition_leader = cli.cluster.leader_for_partition(TopicPartition(topic, partition)) if partition_leader is not None and partition_leader >= 0: leader_tp[partition_leader][topic].add(partition) max_offsets = 1 for node_id, tps in iteritems(leader_tp): # Construct the OffsetRequest request = OffsetRequest[0]( replica_id=-1, topics=[ (topic, [(partition, OffsetResetStrategy.LATEST, max_offsets) for partition in partitions]) for topic, partitions in iteritems(tps) ], ) response = self._make_blocking_req(cli, request, node_id=node_id) offsets, unled = self._process_highwater_offsets(response) highwater_offsets.update(offsets) topic_partitions_without_a_leader.extend(unled) return highwater_offsets, list(set(topic_partitions_without_a_leader))
def function[_get_broker_offsets, parameter[self, instance, topics]]: constant[ Fetch highwater offsets for each topic/partition from Kafka cluster. Do this for all partitions in the cluster because even if it has no consumers, we may want to measure whether producers are successfully producing. No need to limit this for performance because fetching broker offsets from Kafka is a relatively inexpensive operation. Sends one OffsetRequest per broker to get offsets for all partitions where that broker is the leader: https://cwiki.apache.org/confluence/display/KAFKA/A+Guide+To+The+Kafka+Protocol#AGuideToTheKafkaProtocol-OffsetAPI(AKAListOffset) Can we cleanup connections on agent restart? Brokers before 0.9 - accumulate stale connections on restarts. In 0.9 Kafka added connections.max.idle.ms https://issues.apache.org/jira/browse/KAFKA-1282 ] variable[highwater_offsets] assign[=] dictionary[[], []] variable[topic_partitions_without_a_leader] assign[=] list[[]] variable[topics_to_fetch] assign[=] call[name[defaultdict], parameter[name[set]]] variable[cli] assign[=] call[name[self]._get_kafka_client, parameter[name[instance]]] for taget[tuple[[<ast.Name object at 0x7da1b21bb910>, <ast.Name object at 0x7da1b21bad40>]]] in starred[call[name[iteritems], parameter[name[topics]]]] begin[:] if compare[call[name[len], parameter[name[partitions]]] equal[==] constant[0]] begin[:] variable[partitions] assign[=] call[name[cli].cluster.available_partitions_for_topic, parameter[name[topic]]] call[call[name[topics_to_fetch]][name[topic]].update, parameter[name[partitions]]] variable[leader_tp] assign[=] call[name[defaultdict], parameter[<ast.Lambda object at 0x7da1b21b9030>]] for taget[tuple[[<ast.Name object at 0x7da207f01e70>, <ast.Name object at 0x7da207f00d60>]]] in starred[call[name[iteritems], parameter[name[topics_to_fetch]]]] begin[:] for taget[name[partition]] in starred[name[partitions]] begin[:] variable[partition_leader] assign[=] call[name[cli].cluster.leader_for_partition, parameter[call[name[TopicPartition], parameter[name[topic], name[partition]]]]] if <ast.BoolOp object at 0x7da1b26ada50> begin[:] call[call[call[name[leader_tp]][name[partition_leader]]][name[topic]].add, parameter[name[partition]]] variable[max_offsets] assign[=] constant[1] for taget[tuple[[<ast.Name object at 0x7da1b26aceb0>, <ast.Name object at 0x7da1b26aeb90>]]] in starred[call[name[iteritems], parameter[name[leader_tp]]]] begin[:] variable[request] assign[=] call[call[name[OffsetRequest]][constant[0]], parameter[]] variable[response] assign[=] call[name[self]._make_blocking_req, parameter[name[cli], name[request]]] <ast.Tuple object at 0x7da20c7962f0> assign[=] call[name[self]._process_highwater_offsets, parameter[name[response]]] call[name[highwater_offsets].update, parameter[name[offsets]]] call[name[topic_partitions_without_a_leader].extend, parameter[name[unled]]] return[tuple[[<ast.Name object at 0x7da18f812830>, <ast.Call object at 0x7da18f812aa0>]]]
keyword[def] identifier[_get_broker_offsets] ( identifier[self] , identifier[instance] , identifier[topics] ): literal[string] identifier[highwater_offsets] ={} identifier[topic_partitions_without_a_leader] =[] identifier[topics_to_fetch] = identifier[defaultdict] ( identifier[set] ) identifier[cli] = identifier[self] . identifier[_get_kafka_client] ( identifier[instance] ) keyword[for] identifier[topic] , identifier[partitions] keyword[in] identifier[iteritems] ( identifier[topics] ): keyword[if] identifier[len] ( identifier[partitions] )== literal[int] : identifier[partitions] = identifier[cli] . identifier[cluster] . identifier[available_partitions_for_topic] ( identifier[topic] ) identifier[topics_to_fetch] [ identifier[topic] ]. identifier[update] ( identifier[partitions] ) identifier[leader_tp] = identifier[defaultdict] ( keyword[lambda] : identifier[defaultdict] ( identifier[set] )) keyword[for] identifier[topic] , identifier[partitions] keyword[in] identifier[iteritems] ( identifier[topics_to_fetch] ): keyword[for] identifier[partition] keyword[in] identifier[partitions] : identifier[partition_leader] = identifier[cli] . identifier[cluster] . identifier[leader_for_partition] ( identifier[TopicPartition] ( identifier[topic] , identifier[partition] )) keyword[if] identifier[partition_leader] keyword[is] keyword[not] keyword[None] keyword[and] identifier[partition_leader] >= literal[int] : identifier[leader_tp] [ identifier[partition_leader] ][ identifier[topic] ]. identifier[add] ( identifier[partition] ) identifier[max_offsets] = literal[int] keyword[for] identifier[node_id] , identifier[tps] keyword[in] identifier[iteritems] ( identifier[leader_tp] ): identifier[request] = identifier[OffsetRequest] [ literal[int] ]( identifier[replica_id] =- literal[int] , identifier[topics] =[ ( identifier[topic] ,[( identifier[partition] , identifier[OffsetResetStrategy] . identifier[LATEST] , identifier[max_offsets] ) keyword[for] identifier[partition] keyword[in] identifier[partitions] ]) keyword[for] identifier[topic] , identifier[partitions] keyword[in] identifier[iteritems] ( identifier[tps] ) ], ) identifier[response] = identifier[self] . identifier[_make_blocking_req] ( identifier[cli] , identifier[request] , identifier[node_id] = identifier[node_id] ) identifier[offsets] , identifier[unled] = identifier[self] . identifier[_process_highwater_offsets] ( identifier[response] ) identifier[highwater_offsets] . identifier[update] ( identifier[offsets] ) identifier[topic_partitions_without_a_leader] . identifier[extend] ( identifier[unled] ) keyword[return] identifier[highwater_offsets] , identifier[list] ( identifier[set] ( identifier[topic_partitions_without_a_leader] ))
def _get_broker_offsets(self, instance, topics): """ Fetch highwater offsets for each topic/partition from Kafka cluster. Do this for all partitions in the cluster because even if it has no consumers, we may want to measure whether producers are successfully producing. No need to limit this for performance because fetching broker offsets from Kafka is a relatively inexpensive operation. Sends one OffsetRequest per broker to get offsets for all partitions where that broker is the leader: https://cwiki.apache.org/confluence/display/KAFKA/A+Guide+To+The+Kafka+Protocol#AGuideToTheKafkaProtocol-OffsetAPI(AKAListOffset) Can we cleanup connections on agent restart? Brokers before 0.9 - accumulate stale connections on restarts. In 0.9 Kafka added connections.max.idle.ms https://issues.apache.org/jira/browse/KAFKA-1282 """ # Connect to Kafka highwater_offsets = {} topic_partitions_without_a_leader = [] topics_to_fetch = defaultdict(set) cli = self._get_kafka_client(instance) for (topic, partitions) in iteritems(topics): # if no partitions are provided # we're falling back to all available partitions (?) if len(partitions) == 0: partitions = cli.cluster.available_partitions_for_topic(topic) # depends on [control=['if'], data=[]] topics_to_fetch[topic].update(partitions) # depends on [control=['for'], data=[]] leader_tp = defaultdict(lambda : defaultdict(set)) for (topic, partitions) in iteritems(topics_to_fetch): for partition in partitions: partition_leader = cli.cluster.leader_for_partition(TopicPartition(topic, partition)) if partition_leader is not None and partition_leader >= 0: leader_tp[partition_leader][topic].add(partition) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['partition']] # depends on [control=['for'], data=[]] max_offsets = 1 for (node_id, tps) in iteritems(leader_tp): # Construct the OffsetRequest request = OffsetRequest[0](replica_id=-1, topics=[(topic, [(partition, OffsetResetStrategy.LATEST, max_offsets) for partition in partitions]) for (topic, partitions) in iteritems(tps)]) response = self._make_blocking_req(cli, request, node_id=node_id) (offsets, unled) = self._process_highwater_offsets(response) highwater_offsets.update(offsets) topic_partitions_without_a_leader.extend(unled) # depends on [control=['for'], data=[]] return (highwater_offsets, list(set(topic_partitions_without_a_leader)))
def check_next(self, tag): """ If next tag is link with same href, combine them. """ if (type(tag.next_sibling) == element.Tag and tag.next_sibling.name == 'a'): next_tag = tag.next_sibling if tag.get('href') and next_tag.get('href'): href = self._parse_href(tag.get('href')) next_href = self._parse_href(next_tag.get('href')) if href == next_href: next_text = next_tag.get_text() tag.append(next_text) self.tags_blacklist.append(next_tag)
def function[check_next, parameter[self, tag]]: constant[ If next tag is link with same href, combine them. ] if <ast.BoolOp object at 0x7da20cabde70> begin[:] variable[next_tag] assign[=] name[tag].next_sibling if <ast.BoolOp object at 0x7da20cabc940> begin[:] variable[href] assign[=] call[name[self]._parse_href, parameter[call[name[tag].get, parameter[constant[href]]]]] variable[next_href] assign[=] call[name[self]._parse_href, parameter[call[name[next_tag].get, parameter[constant[href]]]]] if compare[name[href] equal[==] name[next_href]] begin[:] variable[next_text] assign[=] call[name[next_tag].get_text, parameter[]] call[name[tag].append, parameter[name[next_text]]] call[name[self].tags_blacklist.append, parameter[name[next_tag]]]
keyword[def] identifier[check_next] ( identifier[self] , identifier[tag] ): literal[string] keyword[if] ( identifier[type] ( identifier[tag] . identifier[next_sibling] )== identifier[element] . identifier[Tag] keyword[and] identifier[tag] . identifier[next_sibling] . identifier[name] == literal[string] ): identifier[next_tag] = identifier[tag] . identifier[next_sibling] keyword[if] identifier[tag] . identifier[get] ( literal[string] ) keyword[and] identifier[next_tag] . identifier[get] ( literal[string] ): identifier[href] = identifier[self] . identifier[_parse_href] ( identifier[tag] . identifier[get] ( literal[string] )) identifier[next_href] = identifier[self] . identifier[_parse_href] ( identifier[next_tag] . identifier[get] ( literal[string] )) keyword[if] identifier[href] == identifier[next_href] : identifier[next_text] = identifier[next_tag] . identifier[get_text] () identifier[tag] . identifier[append] ( identifier[next_text] ) identifier[self] . identifier[tags_blacklist] . identifier[append] ( identifier[next_tag] )
def check_next(self, tag): """ If next tag is link with same href, combine them. """ if type(tag.next_sibling) == element.Tag and tag.next_sibling.name == 'a': next_tag = tag.next_sibling if tag.get('href') and next_tag.get('href'): href = self._parse_href(tag.get('href')) next_href = self._parse_href(next_tag.get('href')) if href == next_href: next_text = next_tag.get_text() tag.append(next_text) self.tags_blacklist.append(next_tag) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
def first_timestamp(self, event_key=None): """Obtain the first timestamp. Args: event_key: the type key of the sought events (e.g., constants.NAN_KEY). If None, includes all event type keys. Returns: First (earliest) timestamp of all the events of the given type (or all event types if event_key is None). """ if event_key is None: timestamps = [self._trackers[key].first_timestamp for key in self._trackers] return min(timestamp for timestamp in timestamps if timestamp >= 0) else: return self._trackers[event_key].first_timestamp
def function[first_timestamp, parameter[self, event_key]]: constant[Obtain the first timestamp. Args: event_key: the type key of the sought events (e.g., constants.NAN_KEY). If None, includes all event type keys. Returns: First (earliest) timestamp of all the events of the given type (or all event types if event_key is None). ] if compare[name[event_key] is constant[None]] begin[:] variable[timestamps] assign[=] <ast.ListComp object at 0x7da1b2169bd0> return[call[name[min], parameter[<ast.GeneratorExp object at 0x7da1b2169ed0>]]]
keyword[def] identifier[first_timestamp] ( identifier[self] , identifier[event_key] = keyword[None] ): literal[string] keyword[if] identifier[event_key] keyword[is] keyword[None] : identifier[timestamps] =[ identifier[self] . identifier[_trackers] [ identifier[key] ]. identifier[first_timestamp] keyword[for] identifier[key] keyword[in] identifier[self] . identifier[_trackers] ] keyword[return] identifier[min] ( identifier[timestamp] keyword[for] identifier[timestamp] keyword[in] identifier[timestamps] keyword[if] identifier[timestamp] >= literal[int] ) keyword[else] : keyword[return] identifier[self] . identifier[_trackers] [ identifier[event_key] ]. identifier[first_timestamp]
def first_timestamp(self, event_key=None): """Obtain the first timestamp. Args: event_key: the type key of the sought events (e.g., constants.NAN_KEY). If None, includes all event type keys. Returns: First (earliest) timestamp of all the events of the given type (or all event types if event_key is None). """ if event_key is None: timestamps = [self._trackers[key].first_timestamp for key in self._trackers] return min((timestamp for timestamp in timestamps if timestamp >= 0)) # depends on [control=['if'], data=[]] else: return self._trackers[event_key].first_timestamp
def transformations(self, relationship="all"): """Get all the transformations of this info. Return a list of transformations involving this info. ``relationship`` can be "parent" (in which case only transformations where the info is the ``info_in`` are returned), "child" (in which case only transformations where the info is the ``info_out`` are returned) or ``all`` (in which case any transformations where the info is the ``info_out`` or the ``info_in`` are returned). The default is ``all`` """ if relationship not in ["all", "parent", "child"]: raise ValueError( "You cannot get transformations of relationship {}".format(relationship) + "Relationship can only be parent, child or all." ) if relationship == "all": return Transformation.query.filter( and_( Transformation.failed == false(), or_( Transformation.info_in == self, Transformation.info_out == self ), ) ).all() if relationship == "parent": return Transformation.query.filter_by( info_in_id=self.id, failed=False ).all() if relationship == "child": return Transformation.query.filter_by( info_out_id=self.id, failed=False ).all()
def function[transformations, parameter[self, relationship]]: constant[Get all the transformations of this info. Return a list of transformations involving this info. ``relationship`` can be "parent" (in which case only transformations where the info is the ``info_in`` are returned), "child" (in which case only transformations where the info is the ``info_out`` are returned) or ``all`` (in which case any transformations where the info is the ``info_out`` or the ``info_in`` are returned). The default is ``all`` ] if compare[name[relationship] <ast.NotIn object at 0x7da2590d7190> list[[<ast.Constant object at 0x7da1b030bd00>, <ast.Constant object at 0x7da1b030a0b0>, <ast.Constant object at 0x7da1b030bf40>]]] begin[:] <ast.Raise object at 0x7da1b0308f40> if compare[name[relationship] equal[==] constant[all]] begin[:] return[call[call[name[Transformation].query.filter, parameter[call[name[and_], parameter[compare[name[Transformation].failed equal[==] call[name[false], parameter[]]], call[name[or_], parameter[compare[name[Transformation].info_in equal[==] name[self]], compare[name[Transformation].info_out equal[==] name[self]]]]]]]].all, parameter[]]] if compare[name[relationship] equal[==] constant[parent]] begin[:] return[call[call[name[Transformation].query.filter_by, parameter[]].all, parameter[]]] if compare[name[relationship] equal[==] constant[child]] begin[:] return[call[call[name[Transformation].query.filter_by, parameter[]].all, parameter[]]]
keyword[def] identifier[transformations] ( identifier[self] , identifier[relationship] = literal[string] ): literal[string] keyword[if] identifier[relationship] keyword[not] keyword[in] [ literal[string] , literal[string] , literal[string] ]: keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[relationship] ) + literal[string] ) keyword[if] identifier[relationship] == literal[string] : keyword[return] identifier[Transformation] . identifier[query] . identifier[filter] ( identifier[and_] ( identifier[Transformation] . identifier[failed] == identifier[false] (), identifier[or_] ( identifier[Transformation] . identifier[info_in] == identifier[self] , identifier[Transformation] . identifier[info_out] == identifier[self] ), ) ). identifier[all] () keyword[if] identifier[relationship] == literal[string] : keyword[return] identifier[Transformation] . identifier[query] . identifier[filter_by] ( identifier[info_in_id] = identifier[self] . identifier[id] , identifier[failed] = keyword[False] ). identifier[all] () keyword[if] identifier[relationship] == literal[string] : keyword[return] identifier[Transformation] . identifier[query] . identifier[filter_by] ( identifier[info_out_id] = identifier[self] . identifier[id] , identifier[failed] = keyword[False] ). identifier[all] ()
def transformations(self, relationship='all'): """Get all the transformations of this info. Return a list of transformations involving this info. ``relationship`` can be "parent" (in which case only transformations where the info is the ``info_in`` are returned), "child" (in which case only transformations where the info is the ``info_out`` are returned) or ``all`` (in which case any transformations where the info is the ``info_out`` or the ``info_in`` are returned). The default is ``all`` """ if relationship not in ['all', 'parent', 'child']: raise ValueError('You cannot get transformations of relationship {}'.format(relationship) + 'Relationship can only be parent, child or all.') # depends on [control=['if'], data=['relationship']] if relationship == 'all': return Transformation.query.filter(and_(Transformation.failed == false(), or_(Transformation.info_in == self, Transformation.info_out == self))).all() # depends on [control=['if'], data=[]] if relationship == 'parent': return Transformation.query.filter_by(info_in_id=self.id, failed=False).all() # depends on [control=['if'], data=[]] if relationship == 'child': return Transformation.query.filter_by(info_out_id=self.id, failed=False).all() # depends on [control=['if'], data=[]]
def _absolute_spike_times_from_labels(self,time_dimension=0,*args,**kwargs): """ internal function that gives absolute_spike_times_from_labels, as well as the factor (maximum range) """ x = self[time_dimension].copy() #x = self.spike_times.get_converted(time_dimension,kwargs.get('units',None)) factor = self.len(time_dimension,units=kwargs.get('units',None),resolution=kwargs.get('resolution',1.0)) for a in args: if a in self: x += factor * self[a] factor *= self.len(a) for k in kwargs.keys(): if k in self: x += factor * self[k] if kwargs[k] is not None: factor *= kwargs[k] else: factor *= self.len(k) return x,factor
def function[_absolute_spike_times_from_labels, parameter[self, time_dimension]]: constant[ internal function that gives absolute_spike_times_from_labels, as well as the factor (maximum range) ] variable[x] assign[=] call[call[name[self]][name[time_dimension]].copy, parameter[]] variable[factor] assign[=] call[name[self].len, parameter[name[time_dimension]]] for taget[name[a]] in starred[name[args]] begin[:] if compare[name[a] in name[self]] begin[:] <ast.AugAssign object at 0x7da2044c34c0> <ast.AugAssign object at 0x7da2044c2a40> for taget[name[k]] in starred[call[name[kwargs].keys, parameter[]]] begin[:] if compare[name[k] in name[self]] begin[:] <ast.AugAssign object at 0x7da2044c1d80> if compare[call[name[kwargs]][name[k]] is_not constant[None]] begin[:] <ast.AugAssign object at 0x7da2044c3880> return[tuple[[<ast.Name object at 0x7da2044c1d20>, <ast.Name object at 0x7da2044c3be0>]]]
keyword[def] identifier[_absolute_spike_times_from_labels] ( identifier[self] , identifier[time_dimension] = literal[int] ,* identifier[args] ,** identifier[kwargs] ): literal[string] identifier[x] = identifier[self] [ identifier[time_dimension] ]. identifier[copy] () identifier[factor] = identifier[self] . identifier[len] ( identifier[time_dimension] , identifier[units] = identifier[kwargs] . identifier[get] ( literal[string] , keyword[None] ), identifier[resolution] = identifier[kwargs] . identifier[get] ( literal[string] , literal[int] )) keyword[for] identifier[a] keyword[in] identifier[args] : keyword[if] identifier[a] keyword[in] identifier[self] : identifier[x] += identifier[factor] * identifier[self] [ identifier[a] ] identifier[factor] *= identifier[self] . identifier[len] ( identifier[a] ) keyword[for] identifier[k] keyword[in] identifier[kwargs] . identifier[keys] (): keyword[if] identifier[k] keyword[in] identifier[self] : identifier[x] += identifier[factor] * identifier[self] [ identifier[k] ] keyword[if] identifier[kwargs] [ identifier[k] ] keyword[is] keyword[not] keyword[None] : identifier[factor] *= identifier[kwargs] [ identifier[k] ] keyword[else] : identifier[factor] *= identifier[self] . identifier[len] ( identifier[k] ) keyword[return] identifier[x] , identifier[factor]
def _absolute_spike_times_from_labels(self, time_dimension=0, *args, **kwargs): """ internal function that gives absolute_spike_times_from_labels, as well as the factor (maximum range) """ x = self[time_dimension].copy() #x = self.spike_times.get_converted(time_dimension,kwargs.get('units',None)) factor = self.len(time_dimension, units=kwargs.get('units', None), resolution=kwargs.get('resolution', 1.0)) for a in args: if a in self: x += factor * self[a] factor *= self.len(a) # depends on [control=['if'], data=['a', 'self']] # depends on [control=['for'], data=['a']] for k in kwargs.keys(): if k in self: x += factor * self[k] if kwargs[k] is not None: factor *= kwargs[k] # depends on [control=['if'], data=[]] else: factor *= self.len(k) # depends on [control=['if'], data=['k', 'self']] # depends on [control=['for'], data=['k']] return (x, factor)
def parse(self, stream): """ Parse the given stream """ lines = re.sub("[\r\n]+", "\n", stream.read()).split("\n") for line in lines: self.parseline(line)
def function[parse, parameter[self, stream]]: constant[ Parse the given stream ] variable[lines] assign[=] call[call[name[re].sub, parameter[constant[[ ]+], constant[ ], call[name[stream].read, parameter[]]]].split, parameter[constant[ ]]] for taget[name[line]] in starred[name[lines]] begin[:] call[name[self].parseline, parameter[name[line]]]
keyword[def] identifier[parse] ( identifier[self] , identifier[stream] ): literal[string] identifier[lines] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[stream] . identifier[read] ()). identifier[split] ( literal[string] ) keyword[for] identifier[line] keyword[in] identifier[lines] : identifier[self] . identifier[parseline] ( identifier[line] )
def parse(self, stream): """ Parse the given stream """ lines = re.sub('[\r\n]+', '\n', stream.read()).split('\n') for line in lines: self.parseline(line) # depends on [control=['for'], data=['line']]
def table(columns, names, page_size=None, format_strings=None): """ Return an html table of this data Parameters ---------- columns : list of numpy arrays names : list of strings The list of columns names page_size : {int, None}, optional The number of items to show on each page of the table format_strings : {lists of strings, None}, optional The ICU format string for this column, None for no formatting. All columns must have a format string if provided. Returns ------- html_table : str A str containing the html code to display a table of this data """ if page_size is None: page = 'disable' else: page = 'enable' div_id = uuid.uuid4() column_descriptions = [] for column, name in zip(columns, names): if column.dtype.kind == 'S': ctype = 'string' else: ctype = 'number' column_descriptions.append((ctype, name)) data = [] for item in zip(*columns): data.append(list(item)) return google_table_template.render(div_id=div_id, page_enable=page, column_descriptions = column_descriptions, page_size=page_size, data=data, format_strings=format_strings, )
def function[table, parameter[columns, names, page_size, format_strings]]: constant[ Return an html table of this data Parameters ---------- columns : list of numpy arrays names : list of strings The list of columns names page_size : {int, None}, optional The number of items to show on each page of the table format_strings : {lists of strings, None}, optional The ICU format string for this column, None for no formatting. All columns must have a format string if provided. Returns ------- html_table : str A str containing the html code to display a table of this data ] if compare[name[page_size] is constant[None]] begin[:] variable[page] assign[=] constant[disable] variable[div_id] assign[=] call[name[uuid].uuid4, parameter[]] variable[column_descriptions] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da2054a44c0>, <ast.Name object at 0x7da2054a4550>]]] in starred[call[name[zip], parameter[name[columns], name[names]]]] begin[:] if compare[name[column].dtype.kind equal[==] constant[S]] begin[:] variable[ctype] assign[=] constant[string] call[name[column_descriptions].append, parameter[tuple[[<ast.Name object at 0x7da2054a4700>, <ast.Name object at 0x7da2054a7f70>]]]] variable[data] assign[=] list[[]] for taget[name[item]] in starred[call[name[zip], parameter[<ast.Starred object at 0x7da2054a7a00>]]] begin[:] call[name[data].append, parameter[call[name[list], parameter[name[item]]]]] return[call[name[google_table_template].render, parameter[]]]
keyword[def] identifier[table] ( identifier[columns] , identifier[names] , identifier[page_size] = keyword[None] , identifier[format_strings] = keyword[None] ): literal[string] keyword[if] identifier[page_size] keyword[is] keyword[None] : identifier[page] = literal[string] keyword[else] : identifier[page] = literal[string] identifier[div_id] = identifier[uuid] . identifier[uuid4] () identifier[column_descriptions] =[] keyword[for] identifier[column] , identifier[name] keyword[in] identifier[zip] ( identifier[columns] , identifier[names] ): keyword[if] identifier[column] . identifier[dtype] . identifier[kind] == literal[string] : identifier[ctype] = literal[string] keyword[else] : identifier[ctype] = literal[string] identifier[column_descriptions] . identifier[append] (( identifier[ctype] , identifier[name] )) identifier[data] =[] keyword[for] identifier[item] keyword[in] identifier[zip] (* identifier[columns] ): identifier[data] . identifier[append] ( identifier[list] ( identifier[item] )) keyword[return] identifier[google_table_template] . identifier[render] ( identifier[div_id] = identifier[div_id] , identifier[page_enable] = identifier[page] , identifier[column_descriptions] = identifier[column_descriptions] , identifier[page_size] = identifier[page_size] , identifier[data] = identifier[data] , identifier[format_strings] = identifier[format_strings] , )
def table(columns, names, page_size=None, format_strings=None): """ Return an html table of this data Parameters ---------- columns : list of numpy arrays names : list of strings The list of columns names page_size : {int, None}, optional The number of items to show on each page of the table format_strings : {lists of strings, None}, optional The ICU format string for this column, None for no formatting. All columns must have a format string if provided. Returns ------- html_table : str A str containing the html code to display a table of this data """ if page_size is None: page = 'disable' # depends on [control=['if'], data=[]] else: page = 'enable' div_id = uuid.uuid4() column_descriptions = [] for (column, name) in zip(columns, names): if column.dtype.kind == 'S': ctype = 'string' # depends on [control=['if'], data=[]] else: ctype = 'number' column_descriptions.append((ctype, name)) # depends on [control=['for'], data=[]] data = [] for item in zip(*columns): data.append(list(item)) # depends on [control=['for'], data=['item']] return google_table_template.render(div_id=div_id, page_enable=page, column_descriptions=column_descriptions, page_size=page_size, data=data, format_strings=format_strings)
def parse_config_file(job, ids, input_args): """ Launches pipeline for each sample. shared_ids: dict Dictionary of fileStore IDs input_args: dict Dictionary of input arguments """ samples = [] config = input_args['config'] with open(config, 'r') as f: for line in f.readlines(): if not line.isspace(): sample = line.strip().split(',') samples.append(sample) for sample in samples: job.addChildJobFn(download_sample, ids, input_args, sample)
def function[parse_config_file, parameter[job, ids, input_args]]: constant[ Launches pipeline for each sample. shared_ids: dict Dictionary of fileStore IDs input_args: dict Dictionary of input arguments ] variable[samples] assign[=] list[[]] variable[config] assign[=] call[name[input_args]][constant[config]] with call[name[open], parameter[name[config], constant[r]]] begin[:] for taget[name[line]] in starred[call[name[f].readlines, parameter[]]] begin[:] if <ast.UnaryOp object at 0x7da18f813400> begin[:] variable[sample] assign[=] call[call[name[line].strip, parameter[]].split, parameter[constant[,]]] call[name[samples].append, parameter[name[sample]]] for taget[name[sample]] in starred[name[samples]] begin[:] call[name[job].addChildJobFn, parameter[name[download_sample], name[ids], name[input_args], name[sample]]]
keyword[def] identifier[parse_config_file] ( identifier[job] , identifier[ids] , identifier[input_args] ): literal[string] identifier[samples] =[] identifier[config] = identifier[input_args] [ literal[string] ] keyword[with] identifier[open] ( identifier[config] , literal[string] ) keyword[as] identifier[f] : keyword[for] identifier[line] keyword[in] identifier[f] . identifier[readlines] (): keyword[if] keyword[not] identifier[line] . identifier[isspace] (): identifier[sample] = identifier[line] . identifier[strip] (). identifier[split] ( literal[string] ) identifier[samples] . identifier[append] ( identifier[sample] ) keyword[for] identifier[sample] keyword[in] identifier[samples] : identifier[job] . identifier[addChildJobFn] ( identifier[download_sample] , identifier[ids] , identifier[input_args] , identifier[sample] )
def parse_config_file(job, ids, input_args): """ Launches pipeline for each sample. shared_ids: dict Dictionary of fileStore IDs input_args: dict Dictionary of input arguments """ samples = [] config = input_args['config'] with open(config, 'r') as f: for line in f.readlines(): if not line.isspace(): sample = line.strip().split(',') samples.append(sample) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line']] # depends on [control=['with'], data=['f']] for sample in samples: job.addChildJobFn(download_sample, ids, input_args, sample) # depends on [control=['for'], data=['sample']]
def shell_out(cmd, stderr=STDOUT, cwd=None): """Friendlier version of check_output.""" if cwd is None: from os import getcwd cwd = getcwd() # TODO do I need to normalize this on Windows out = check_output(cmd, cwd=cwd, stderr=stderr, universal_newlines=True) return _clean_output(out)
def function[shell_out, parameter[cmd, stderr, cwd]]: constant[Friendlier version of check_output.] if compare[name[cwd] is constant[None]] begin[:] from relative_module[os] import module[getcwd] variable[cwd] assign[=] call[name[getcwd], parameter[]] variable[out] assign[=] call[name[check_output], parameter[name[cmd]]] return[call[name[_clean_output], parameter[name[out]]]]
keyword[def] identifier[shell_out] ( identifier[cmd] , identifier[stderr] = identifier[STDOUT] , identifier[cwd] = keyword[None] ): literal[string] keyword[if] identifier[cwd] keyword[is] keyword[None] : keyword[from] identifier[os] keyword[import] identifier[getcwd] identifier[cwd] = identifier[getcwd] () identifier[out] = identifier[check_output] ( identifier[cmd] , identifier[cwd] = identifier[cwd] , identifier[stderr] = identifier[stderr] , identifier[universal_newlines] = keyword[True] ) keyword[return] identifier[_clean_output] ( identifier[out] )
def shell_out(cmd, stderr=STDOUT, cwd=None): """Friendlier version of check_output.""" if cwd is None: from os import getcwd cwd = getcwd() # TODO do I need to normalize this on Windows # depends on [control=['if'], data=['cwd']] out = check_output(cmd, cwd=cwd, stderr=stderr, universal_newlines=True) return _clean_output(out)
def sync_account(self, sync_message): """同步账户 Arguments: sync_message {[type]} -- [description] """ self.init_hold = sync_message['hold_available'] self.init_cash = sync_message['cash_available'] self.sell_available = copy.deepcopy(self.init_hold) self.history = [] self.cash = [self.init_cash] self.cash_available = self.cash[-1]
def function[sync_account, parameter[self, sync_message]]: constant[同步账户 Arguments: sync_message {[type]} -- [description] ] name[self].init_hold assign[=] call[name[sync_message]][constant[hold_available]] name[self].init_cash assign[=] call[name[sync_message]][constant[cash_available]] name[self].sell_available assign[=] call[name[copy].deepcopy, parameter[name[self].init_hold]] name[self].history assign[=] list[[]] name[self].cash assign[=] list[[<ast.Attribute object at 0x7da1b20c9c00>]] name[self].cash_available assign[=] call[name[self].cash][<ast.UnaryOp object at 0x7da1b20cb040>]
keyword[def] identifier[sync_account] ( identifier[self] , identifier[sync_message] ): literal[string] identifier[self] . identifier[init_hold] = identifier[sync_message] [ literal[string] ] identifier[self] . identifier[init_cash] = identifier[sync_message] [ literal[string] ] identifier[self] . identifier[sell_available] = identifier[copy] . identifier[deepcopy] ( identifier[self] . identifier[init_hold] ) identifier[self] . identifier[history] =[] identifier[self] . identifier[cash] =[ identifier[self] . identifier[init_cash] ] identifier[self] . identifier[cash_available] = identifier[self] . identifier[cash] [- literal[int] ]
def sync_account(self, sync_message): """同步账户 Arguments: sync_message {[type]} -- [description] """ self.init_hold = sync_message['hold_available'] self.init_cash = sync_message['cash_available'] self.sell_available = copy.deepcopy(self.init_hold) self.history = [] self.cash = [self.init_cash] self.cash_available = self.cash[-1]
def index_missing_documents(self, documents, request=None): """ Index documents that are missing from ES index. Determines which documents are missing using ES `mget` call which returns a list of document IDs as `documents`. Then missing `documents` from that list are indexed. """ log.info('Trying to index documents of type `{}` missing from ' '`{}` index'.format(self.doc_type, self.index_name)) if not documents: log.info('No documents to index') return query_kwargs = dict( index=self.index_name, doc_type=self.doc_type, fields=['_id'], body={'ids': [d['_pk'] for d in documents]}, ) try: response = self.api.mget(**query_kwargs) except IndexNotFoundException: indexed_ids = set() else: indexed_ids = set( d['_id'] for d in response['docs'] if d.get('found')) documents = [d for d in documents if str(d['_pk']) not in indexed_ids] if not documents: log.info('No documents of type `{}` are missing from ' 'index `{}`'.format(self.doc_type, self.index_name)) return self._bulk('index', documents, request)
def function[index_missing_documents, parameter[self, documents, request]]: constant[ Index documents that are missing from ES index. Determines which documents are missing using ES `mget` call which returns a list of document IDs as `documents`. Then missing `documents` from that list are indexed. ] call[name[log].info, parameter[call[constant[Trying to index documents of type `{}` missing from `{}` index].format, parameter[name[self].doc_type, name[self].index_name]]]] if <ast.UnaryOp object at 0x7da18f58e1a0> begin[:] call[name[log].info, parameter[constant[No documents to index]]] return[None] variable[query_kwargs] assign[=] call[name[dict], parameter[]] <ast.Try object at 0x7da18f58ef20> variable[documents] assign[=] <ast.ListComp object at 0x7da1b0ff1780> if <ast.UnaryOp object at 0x7da1b0ff1810> begin[:] call[name[log].info, parameter[call[constant[No documents of type `{}` are missing from index `{}`].format, parameter[name[self].doc_type, name[self].index_name]]]] return[None] call[name[self]._bulk, parameter[constant[index], name[documents], name[request]]]
keyword[def] identifier[index_missing_documents] ( identifier[self] , identifier[documents] , identifier[request] = keyword[None] ): literal[string] identifier[log] . identifier[info] ( literal[string] literal[string] . identifier[format] ( identifier[self] . identifier[doc_type] , identifier[self] . identifier[index_name] )) keyword[if] keyword[not] identifier[documents] : identifier[log] . identifier[info] ( literal[string] ) keyword[return] identifier[query_kwargs] = identifier[dict] ( identifier[index] = identifier[self] . identifier[index_name] , identifier[doc_type] = identifier[self] . identifier[doc_type] , identifier[fields] =[ literal[string] ], identifier[body] ={ literal[string] :[ identifier[d] [ literal[string] ] keyword[for] identifier[d] keyword[in] identifier[documents] ]}, ) keyword[try] : identifier[response] = identifier[self] . identifier[api] . identifier[mget] (** identifier[query_kwargs] ) keyword[except] identifier[IndexNotFoundException] : identifier[indexed_ids] = identifier[set] () keyword[else] : identifier[indexed_ids] = identifier[set] ( identifier[d] [ literal[string] ] keyword[for] identifier[d] keyword[in] identifier[response] [ literal[string] ] keyword[if] identifier[d] . identifier[get] ( literal[string] )) identifier[documents] =[ identifier[d] keyword[for] identifier[d] keyword[in] identifier[documents] keyword[if] identifier[str] ( identifier[d] [ literal[string] ]) keyword[not] keyword[in] identifier[indexed_ids] ] keyword[if] keyword[not] identifier[documents] : identifier[log] . identifier[info] ( literal[string] literal[string] . identifier[format] ( identifier[self] . identifier[doc_type] , identifier[self] . identifier[index_name] )) keyword[return] identifier[self] . identifier[_bulk] ( literal[string] , identifier[documents] , identifier[request] )
def index_missing_documents(self, documents, request=None): """ Index documents that are missing from ES index. Determines which documents are missing using ES `mget` call which returns a list of document IDs as `documents`. Then missing `documents` from that list are indexed. """ log.info('Trying to index documents of type `{}` missing from `{}` index'.format(self.doc_type, self.index_name)) if not documents: log.info('No documents to index') return # depends on [control=['if'], data=[]] query_kwargs = dict(index=self.index_name, doc_type=self.doc_type, fields=['_id'], body={'ids': [d['_pk'] for d in documents]}) try: response = self.api.mget(**query_kwargs) # depends on [control=['try'], data=[]] except IndexNotFoundException: indexed_ids = set() # depends on [control=['except'], data=[]] else: indexed_ids = set((d['_id'] for d in response['docs'] if d.get('found'))) documents = [d for d in documents if str(d['_pk']) not in indexed_ids] if not documents: log.info('No documents of type `{}` are missing from index `{}`'.format(self.doc_type, self.index_name)) return # depends on [control=['if'], data=[]] self._bulk('index', documents, request)
def _descend_cashed(self, curr, s): """ Спуск из вершины curr по строке s с кэшированием """ if s == "": return curr curr_cash = self._descendance_cash[curr] answer = curr_cash.get(s, None) if answer is not None: return answer # для оптимизации дублируем код res = curr for a in s: res = self.graph[res][self.alphabet_codes[a]] # res = self.graph[res][a] if res == Trie.NO_NODE: break curr_cash[s] = res return res
def function[_descend_cashed, parameter[self, curr, s]]: constant[ Спуск из вершины curr по строке s с кэшированием ] if compare[name[s] equal[==] constant[]] begin[:] return[name[curr]] variable[curr_cash] assign[=] call[name[self]._descendance_cash][name[curr]] variable[answer] assign[=] call[name[curr_cash].get, parameter[name[s], constant[None]]] if compare[name[answer] is_not constant[None]] begin[:] return[name[answer]] variable[res] assign[=] name[curr] for taget[name[a]] in starred[name[s]] begin[:] variable[res] assign[=] call[call[name[self].graph][name[res]]][call[name[self].alphabet_codes][name[a]]] if compare[name[res] equal[==] name[Trie].NO_NODE] begin[:] break call[name[curr_cash]][name[s]] assign[=] name[res] return[name[res]]
keyword[def] identifier[_descend_cashed] ( identifier[self] , identifier[curr] , identifier[s] ): literal[string] keyword[if] identifier[s] == literal[string] : keyword[return] identifier[curr] identifier[curr_cash] = identifier[self] . identifier[_descendance_cash] [ identifier[curr] ] identifier[answer] = identifier[curr_cash] . identifier[get] ( identifier[s] , keyword[None] ) keyword[if] identifier[answer] keyword[is] keyword[not] keyword[None] : keyword[return] identifier[answer] identifier[res] = identifier[curr] keyword[for] identifier[a] keyword[in] identifier[s] : identifier[res] = identifier[self] . identifier[graph] [ identifier[res] ][ identifier[self] . identifier[alphabet_codes] [ identifier[a] ]] keyword[if] identifier[res] == identifier[Trie] . identifier[NO_NODE] : keyword[break] identifier[curr_cash] [ identifier[s] ]= identifier[res] keyword[return] identifier[res]
def _descend_cashed(self, curr, s): """ Спуск из вершины curr по строке s с кэшированием """ if s == '': return curr # depends on [control=['if'], data=[]] curr_cash = self._descendance_cash[curr] answer = curr_cash.get(s, None) if answer is not None: return answer # depends on [control=['if'], data=['answer']] # для оптимизации дублируем код res = curr for a in s: res = self.graph[res][self.alphabet_codes[a]] # res = self.graph[res][a] if res == Trie.NO_NODE: break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['a']] curr_cash[s] = res return res
def serialize_rules(self, rules): """Creates a payload for the redis server.""" # TODO(mdietz): If/when we support other rule types, this comment # will have to be revised. # Action and direction are static, for now. The implementation may # support 'deny' and 'egress' respectively in the future. We allow # the direction to be set to something else, technically, but current # plugin level call actually raises. It's supported here for unit # test purposes at this time serialized = [] for rule in rules: direction = rule["direction"] source = '' destination = '' if rule.get("remote_ip_prefix"): prefix = rule["remote_ip_prefix"] if direction == "ingress": source = self._convert_remote_network(prefix) else: if (Capabilities.EGRESS not in CONF.QUARK.environment_capabilities): raise q_exc.EgressSecurityGroupRulesNotEnabled() else: destination = self._convert_remote_network(prefix) optional_fields = {} # NOTE(mdietz): this will expand as we add more protocols protocol_map = protocols.PROTOCOL_MAP[rule["ethertype"]] if rule["protocol"] == protocol_map["icmp"]: optional_fields["icmp type"] = rule["port_range_min"] optional_fields["icmp code"] = rule["port_range_max"] else: optional_fields["port start"] = rule["port_range_min"] optional_fields["port end"] = rule["port_range_max"] payload = {"ethertype": rule["ethertype"], "protocol": rule["protocol"], "source network": source, "destination network": destination, "action": "allow", "direction": direction} payload.update(optional_fields) serialized.append(payload) return serialized
def function[serialize_rules, parameter[self, rules]]: constant[Creates a payload for the redis server.] variable[serialized] assign[=] list[[]] for taget[name[rule]] in starred[name[rules]] begin[:] variable[direction] assign[=] call[name[rule]][constant[direction]] variable[source] assign[=] constant[] variable[destination] assign[=] constant[] if call[name[rule].get, parameter[constant[remote_ip_prefix]]] begin[:] variable[prefix] assign[=] call[name[rule]][constant[remote_ip_prefix]] if compare[name[direction] equal[==] constant[ingress]] begin[:] variable[source] assign[=] call[name[self]._convert_remote_network, parameter[name[prefix]]] variable[optional_fields] assign[=] dictionary[[], []] variable[protocol_map] assign[=] call[name[protocols].PROTOCOL_MAP][call[name[rule]][constant[ethertype]]] if compare[call[name[rule]][constant[protocol]] equal[==] call[name[protocol_map]][constant[icmp]]] begin[:] call[name[optional_fields]][constant[icmp type]] assign[=] call[name[rule]][constant[port_range_min]] call[name[optional_fields]][constant[icmp code]] assign[=] call[name[rule]][constant[port_range_max]] variable[payload] assign[=] dictionary[[<ast.Constant object at 0x7da1b00f94b0>, <ast.Constant object at 0x7da1b00f92a0>, <ast.Constant object at 0x7da1b00f8be0>, <ast.Constant object at 0x7da1b00fab90>, <ast.Constant object at 0x7da1b00fb280>, <ast.Constant object at 0x7da1b00fa5c0>], [<ast.Subscript object at 0x7da1b00f9450>, <ast.Subscript object at 0x7da1b00f8ee0>, <ast.Name object at 0x7da1b00fa2f0>, <ast.Name object at 0x7da1b00fbfa0>, <ast.Constant object at 0x7da1b00f8a00>, <ast.Name object at 0x7da1b00fa440>]] call[name[payload].update, parameter[name[optional_fields]]] call[name[serialized].append, parameter[name[payload]]] return[name[serialized]]
keyword[def] identifier[serialize_rules] ( identifier[self] , identifier[rules] ): literal[string] identifier[serialized] =[] keyword[for] identifier[rule] keyword[in] identifier[rules] : identifier[direction] = identifier[rule] [ literal[string] ] identifier[source] = literal[string] identifier[destination] = literal[string] keyword[if] identifier[rule] . identifier[get] ( literal[string] ): identifier[prefix] = identifier[rule] [ literal[string] ] keyword[if] identifier[direction] == literal[string] : identifier[source] = identifier[self] . identifier[_convert_remote_network] ( identifier[prefix] ) keyword[else] : keyword[if] ( identifier[Capabilities] . identifier[EGRESS] keyword[not] keyword[in] identifier[CONF] . identifier[QUARK] . identifier[environment_capabilities] ): keyword[raise] identifier[q_exc] . identifier[EgressSecurityGroupRulesNotEnabled] () keyword[else] : identifier[destination] = identifier[self] . identifier[_convert_remote_network] ( identifier[prefix] ) identifier[optional_fields] ={} identifier[protocol_map] = identifier[protocols] . identifier[PROTOCOL_MAP] [ identifier[rule] [ literal[string] ]] keyword[if] identifier[rule] [ literal[string] ]== identifier[protocol_map] [ literal[string] ]: identifier[optional_fields] [ literal[string] ]= identifier[rule] [ literal[string] ] identifier[optional_fields] [ literal[string] ]= identifier[rule] [ literal[string] ] keyword[else] : identifier[optional_fields] [ literal[string] ]= identifier[rule] [ literal[string] ] identifier[optional_fields] [ literal[string] ]= identifier[rule] [ literal[string] ] identifier[payload] ={ literal[string] : identifier[rule] [ literal[string] ], literal[string] : identifier[rule] [ literal[string] ], literal[string] : identifier[source] , literal[string] : identifier[destination] , literal[string] : literal[string] , literal[string] : identifier[direction] } identifier[payload] . identifier[update] ( identifier[optional_fields] ) identifier[serialized] . identifier[append] ( identifier[payload] ) keyword[return] identifier[serialized]
def serialize_rules(self, rules): """Creates a payload for the redis server.""" # TODO(mdietz): If/when we support other rule types, this comment # will have to be revised. # Action and direction are static, for now. The implementation may # support 'deny' and 'egress' respectively in the future. We allow # the direction to be set to something else, technically, but current # plugin level call actually raises. It's supported here for unit # test purposes at this time serialized = [] for rule in rules: direction = rule['direction'] source = '' destination = '' if rule.get('remote_ip_prefix'): prefix = rule['remote_ip_prefix'] if direction == 'ingress': source = self._convert_remote_network(prefix) # depends on [control=['if'], data=[]] elif Capabilities.EGRESS not in CONF.QUARK.environment_capabilities: raise q_exc.EgressSecurityGroupRulesNotEnabled() # depends on [control=['if'], data=[]] else: destination = self._convert_remote_network(prefix) # depends on [control=['if'], data=[]] optional_fields = {} # NOTE(mdietz): this will expand as we add more protocols protocol_map = protocols.PROTOCOL_MAP[rule['ethertype']] if rule['protocol'] == protocol_map['icmp']: optional_fields['icmp type'] = rule['port_range_min'] optional_fields['icmp code'] = rule['port_range_max'] # depends on [control=['if'], data=[]] else: optional_fields['port start'] = rule['port_range_min'] optional_fields['port end'] = rule['port_range_max'] payload = {'ethertype': rule['ethertype'], 'protocol': rule['protocol'], 'source network': source, 'destination network': destination, 'action': 'allow', 'direction': direction} payload.update(optional_fields) serialized.append(payload) # depends on [control=['for'], data=['rule']] return serialized
def getDebt(self): """Sums up all the balances of the account and returns them. """ debt = float(self['principalBalance']) + float(self['interestBalance']) debt += float(self['feesBalance']) + float(self['penaltyBalance']) return debt
def function[getDebt, parameter[self]]: constant[Sums up all the balances of the account and returns them. ] variable[debt] assign[=] binary_operation[call[name[float], parameter[call[name[self]][constant[principalBalance]]]] + call[name[float], parameter[call[name[self]][constant[interestBalance]]]]] <ast.AugAssign object at 0x7da2041daa70> return[name[debt]]
keyword[def] identifier[getDebt] ( identifier[self] ): literal[string] identifier[debt] = identifier[float] ( identifier[self] [ literal[string] ])+ identifier[float] ( identifier[self] [ literal[string] ]) identifier[debt] += identifier[float] ( identifier[self] [ literal[string] ])+ identifier[float] ( identifier[self] [ literal[string] ]) keyword[return] identifier[debt]
def getDebt(self): """Sums up all the balances of the account and returns them. """ debt = float(self['principalBalance']) + float(self['interestBalance']) debt += float(self['feesBalance']) + float(self['penaltyBalance']) return debt
def bleu_advanced(y_true: List[Any], y_predicted: List[Any], weights: Tuple=(1,), smoothing_function=SMOOTH.method1, auto_reweigh=False, penalty=True) -> float: """Calculate BLEU score Parameters: y_true: list of reference tokens y_predicted: list of query tokens weights: n-gram weights smoothing_function: SmoothingFunction auto_reweigh: Option to re-normalize the weights uniformly penalty: either enable brevity penalty or not Return: BLEU score """ bleu_measure = sentence_bleu([y_true], y_predicted, weights, smoothing_function, auto_reweigh) hyp_len = len(y_predicted) hyp_lengths = hyp_len ref_lengths = closest_ref_length([y_true], hyp_len) bpenalty = brevity_penalty(ref_lengths, hyp_lengths) if penalty is True or bpenalty == 0: return bleu_measure return bleu_measure/bpenalty
def function[bleu_advanced, parameter[y_true, y_predicted, weights, smoothing_function, auto_reweigh, penalty]]: constant[Calculate BLEU score Parameters: y_true: list of reference tokens y_predicted: list of query tokens weights: n-gram weights smoothing_function: SmoothingFunction auto_reweigh: Option to re-normalize the weights uniformly penalty: either enable brevity penalty or not Return: BLEU score ] variable[bleu_measure] assign[=] call[name[sentence_bleu], parameter[list[[<ast.Name object at 0x7da1b0355570>]], name[y_predicted], name[weights], name[smoothing_function], name[auto_reweigh]]] variable[hyp_len] assign[=] call[name[len], parameter[name[y_predicted]]] variable[hyp_lengths] assign[=] name[hyp_len] variable[ref_lengths] assign[=] call[name[closest_ref_length], parameter[list[[<ast.Name object at 0x7da1b03554e0>]], name[hyp_len]]] variable[bpenalty] assign[=] call[name[brevity_penalty], parameter[name[ref_lengths], name[hyp_lengths]]] if <ast.BoolOp object at 0x7da1b0356710> begin[:] return[name[bleu_measure]] return[binary_operation[name[bleu_measure] / name[bpenalty]]]
keyword[def] identifier[bleu_advanced] ( identifier[y_true] : identifier[List] [ identifier[Any] ], identifier[y_predicted] : identifier[List] [ identifier[Any] ], identifier[weights] : identifier[Tuple] =( literal[int] ,), identifier[smoothing_function] = identifier[SMOOTH] . identifier[method1] , identifier[auto_reweigh] = keyword[False] , identifier[penalty] = keyword[True] )-> identifier[float] : literal[string] identifier[bleu_measure] = identifier[sentence_bleu] ([ identifier[y_true] ], identifier[y_predicted] , identifier[weights] , identifier[smoothing_function] , identifier[auto_reweigh] ) identifier[hyp_len] = identifier[len] ( identifier[y_predicted] ) identifier[hyp_lengths] = identifier[hyp_len] identifier[ref_lengths] = identifier[closest_ref_length] ([ identifier[y_true] ], identifier[hyp_len] ) identifier[bpenalty] = identifier[brevity_penalty] ( identifier[ref_lengths] , identifier[hyp_lengths] ) keyword[if] identifier[penalty] keyword[is] keyword[True] keyword[or] identifier[bpenalty] == literal[int] : keyword[return] identifier[bleu_measure] keyword[return] identifier[bleu_measure] / identifier[bpenalty]
def bleu_advanced(y_true: List[Any], y_predicted: List[Any], weights: Tuple=(1,), smoothing_function=SMOOTH.method1, auto_reweigh=False, penalty=True) -> float: """Calculate BLEU score Parameters: y_true: list of reference tokens y_predicted: list of query tokens weights: n-gram weights smoothing_function: SmoothingFunction auto_reweigh: Option to re-normalize the weights uniformly penalty: either enable brevity penalty or not Return: BLEU score """ bleu_measure = sentence_bleu([y_true], y_predicted, weights, smoothing_function, auto_reweigh) hyp_len = len(y_predicted) hyp_lengths = hyp_len ref_lengths = closest_ref_length([y_true], hyp_len) bpenalty = brevity_penalty(ref_lengths, hyp_lengths) if penalty is True or bpenalty == 0: return bleu_measure # depends on [control=['if'], data=[]] return bleu_measure / bpenalty
def get_phi_ss(imt, mag, params): """ Returns the single station phi (or it's variance) for a given magnitude and intensity measure type according to equation 5.14 of Al Atik (2015) """ C = params[imt] if mag <= 5.0: phi = C["a"] elif mag > 6.5: phi = C["b"] else: phi = C["a"] + (mag - 5.0) * ((C["b"] - C["a"]) / 1.5) return phi
def function[get_phi_ss, parameter[imt, mag, params]]: constant[ Returns the single station phi (or it's variance) for a given magnitude and intensity measure type according to equation 5.14 of Al Atik (2015) ] variable[C] assign[=] call[name[params]][name[imt]] if compare[name[mag] less_or_equal[<=] constant[5.0]] begin[:] variable[phi] assign[=] call[name[C]][constant[a]] return[name[phi]]
keyword[def] identifier[get_phi_ss] ( identifier[imt] , identifier[mag] , identifier[params] ): literal[string] identifier[C] = identifier[params] [ identifier[imt] ] keyword[if] identifier[mag] <= literal[int] : identifier[phi] = identifier[C] [ literal[string] ] keyword[elif] identifier[mag] > literal[int] : identifier[phi] = identifier[C] [ literal[string] ] keyword[else] : identifier[phi] = identifier[C] [ literal[string] ]+( identifier[mag] - literal[int] )*(( identifier[C] [ literal[string] ]- identifier[C] [ literal[string] ])/ literal[int] ) keyword[return] identifier[phi]
def get_phi_ss(imt, mag, params): """ Returns the single station phi (or it's variance) for a given magnitude and intensity measure type according to equation 5.14 of Al Atik (2015) """ C = params[imt] if mag <= 5.0: phi = C['a'] # depends on [control=['if'], data=[]] elif mag > 6.5: phi = C['b'] # depends on [control=['if'], data=[]] else: phi = C['a'] + (mag - 5.0) * ((C['b'] - C['a']) / 1.5) return phi
def get_group_names(self): """ Returns the set of Django group names that this user belongs to by virtue of LDAP group memberships. """ if self._group_names is None: self._load_cached_attr("_group_names") if self._group_names is None: group_infos = self._get_group_infos() self._group_names = { self._group_type.group_name_from_info(group_info) for group_info in group_infos } self._cache_attr("_group_names") return self._group_names
def function[get_group_names, parameter[self]]: constant[ Returns the set of Django group names that this user belongs to by virtue of LDAP group memberships. ] if compare[name[self]._group_names is constant[None]] begin[:] call[name[self]._load_cached_attr, parameter[constant[_group_names]]] if compare[name[self]._group_names is constant[None]] begin[:] variable[group_infos] assign[=] call[name[self]._get_group_infos, parameter[]] name[self]._group_names assign[=] <ast.SetComp object at 0x7da1b23464d0> call[name[self]._cache_attr, parameter[constant[_group_names]]] return[name[self]._group_names]
keyword[def] identifier[get_group_names] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[_group_names] keyword[is] keyword[None] : identifier[self] . identifier[_load_cached_attr] ( literal[string] ) keyword[if] identifier[self] . identifier[_group_names] keyword[is] keyword[None] : identifier[group_infos] = identifier[self] . identifier[_get_group_infos] () identifier[self] . identifier[_group_names] ={ identifier[self] . identifier[_group_type] . identifier[group_name_from_info] ( identifier[group_info] ) keyword[for] identifier[group_info] keyword[in] identifier[group_infos] } identifier[self] . identifier[_cache_attr] ( literal[string] ) keyword[return] identifier[self] . identifier[_group_names]
def get_group_names(self): """ Returns the set of Django group names that this user belongs to by virtue of LDAP group memberships. """ if self._group_names is None: self._load_cached_attr('_group_names') # depends on [control=['if'], data=[]] if self._group_names is None: group_infos = self._get_group_infos() self._group_names = {self._group_type.group_name_from_info(group_info) for group_info in group_infos} self._cache_attr('_group_names') # depends on [control=['if'], data=[]] return self._group_names
def assertNotNone(expr, message=None): """ Assert that expr is not None. :param expr: expression. :param message: Message set to raised Exception :raises: TestStepFail if expr is None. """ if expr is None: raise TestStepFail( format_message(message) if message is not None else "Assert: %s == None" % str(expr))
def function[assertNotNone, parameter[expr, message]]: constant[ Assert that expr is not None. :param expr: expression. :param message: Message set to raised Exception :raises: TestStepFail if expr is None. ] if compare[name[expr] is constant[None]] begin[:] <ast.Raise object at 0x7da1b0e651e0>
keyword[def] identifier[assertNotNone] ( identifier[expr] , identifier[message] = keyword[None] ): literal[string] keyword[if] identifier[expr] keyword[is] keyword[None] : keyword[raise] identifier[TestStepFail] ( identifier[format_message] ( identifier[message] ) keyword[if] identifier[message] keyword[is] keyword[not] keyword[None] keyword[else] literal[string] % identifier[str] ( identifier[expr] ))
def assertNotNone(expr, message=None): """ Assert that expr is not None. :param expr: expression. :param message: Message set to raised Exception :raises: TestStepFail if expr is None. """ if expr is None: raise TestStepFail(format_message(message) if message is not None else 'Assert: %s == None' % str(expr)) # depends on [control=['if'], data=['expr']]
def profile_prior_model_dict(self): """ Returns ------- profile_prior_model_dict: {str: PriorModel} A dictionary mapping_matrix instance variable names to variable profiles. """ return {key: value for key, value in filter(lambda t: isinstance(t[1], pm.PriorModel) and is_profile_class(t[1].cls), self.__dict__.items())}
def function[profile_prior_model_dict, parameter[self]]: constant[ Returns ------- profile_prior_model_dict: {str: PriorModel} A dictionary mapping_matrix instance variable names to variable profiles. ] return[<ast.DictComp object at 0x7da20c76f970>]
keyword[def] identifier[profile_prior_model_dict] ( identifier[self] ): literal[string] keyword[return] { identifier[key] : identifier[value] keyword[for] identifier[key] , identifier[value] keyword[in] identifier[filter] ( keyword[lambda] identifier[t] : identifier[isinstance] ( identifier[t] [ literal[int] ], identifier[pm] . identifier[PriorModel] ) keyword[and] identifier[is_profile_class] ( identifier[t] [ literal[int] ]. identifier[cls] ), identifier[self] . identifier[__dict__] . identifier[items] ())}
def profile_prior_model_dict(self): """ Returns ------- profile_prior_model_dict: {str: PriorModel} A dictionary mapping_matrix instance variable names to variable profiles. """ return {key: value for (key, value) in filter(lambda t: isinstance(t[1], pm.PriorModel) and is_profile_class(t[1].cls), self.__dict__.items())}
def get_parameters(self): """ Method to estimate the model parameters (CPDs) using Maximum Likelihood Estimation. Returns ------- parameters: list List of TabularCPDs, one for each variable of the model Examples -------- >>> import numpy as np >>> import pandas as pd >>> from pgmpy.models import BayesianModel >>> from pgmpy.estimators import MaximumLikelihoodEstimator >>> values = pd.DataFrame(np.random.randint(low=0, high=2, size=(1000, 4)), ... columns=['A', 'B', 'C', 'D']) >>> model = BayesianModel([('A', 'B'), ('C', 'B'), ('C', 'D')) >>> estimator = MaximumLikelihoodEstimator(model, values) >>> estimator.get_parameters() [<TabularCPD representing P(C:2) at 0x7f7b534251d0>, <TabularCPD representing P(B:2 | C:2, A:2) at 0x7f7b4dfd4da0>, <TabularCPD representing P(A:2) at 0x7f7b4dfd4fd0>, <TabularCPD representing P(D:2 | C:2) at 0x7f7b4df822b0>] """ parameters = [] for node in sorted(self.model.nodes()): cpd = self.estimate_cpd(node) parameters.append(cpd) return parameters
def function[get_parameters, parameter[self]]: constant[ Method to estimate the model parameters (CPDs) using Maximum Likelihood Estimation. Returns ------- parameters: list List of TabularCPDs, one for each variable of the model Examples -------- >>> import numpy as np >>> import pandas as pd >>> from pgmpy.models import BayesianModel >>> from pgmpy.estimators import MaximumLikelihoodEstimator >>> values = pd.DataFrame(np.random.randint(low=0, high=2, size=(1000, 4)), ... columns=['A', 'B', 'C', 'D']) >>> model = BayesianModel([('A', 'B'), ('C', 'B'), ('C', 'D')) >>> estimator = MaximumLikelihoodEstimator(model, values) >>> estimator.get_parameters() [<TabularCPD representing P(C:2) at 0x7f7b534251d0>, <TabularCPD representing P(B:2 | C:2, A:2) at 0x7f7b4dfd4da0>, <TabularCPD representing P(A:2) at 0x7f7b4dfd4fd0>, <TabularCPD representing P(D:2 | C:2) at 0x7f7b4df822b0>] ] variable[parameters] assign[=] list[[]] for taget[name[node]] in starred[call[name[sorted], parameter[call[name[self].model.nodes, parameter[]]]]] begin[:] variable[cpd] assign[=] call[name[self].estimate_cpd, parameter[name[node]]] call[name[parameters].append, parameter[name[cpd]]] return[name[parameters]]
keyword[def] identifier[get_parameters] ( identifier[self] ): literal[string] identifier[parameters] =[] keyword[for] identifier[node] keyword[in] identifier[sorted] ( identifier[self] . identifier[model] . identifier[nodes] ()): identifier[cpd] = identifier[self] . identifier[estimate_cpd] ( identifier[node] ) identifier[parameters] . identifier[append] ( identifier[cpd] ) keyword[return] identifier[parameters]
def get_parameters(self): """ Method to estimate the model parameters (CPDs) using Maximum Likelihood Estimation. Returns ------- parameters: list List of TabularCPDs, one for each variable of the model Examples -------- >>> import numpy as np >>> import pandas as pd >>> from pgmpy.models import BayesianModel >>> from pgmpy.estimators import MaximumLikelihoodEstimator >>> values = pd.DataFrame(np.random.randint(low=0, high=2, size=(1000, 4)), ... columns=['A', 'B', 'C', 'D']) >>> model = BayesianModel([('A', 'B'), ('C', 'B'), ('C', 'D')) >>> estimator = MaximumLikelihoodEstimator(model, values) >>> estimator.get_parameters() [<TabularCPD representing P(C:2) at 0x7f7b534251d0>, <TabularCPD representing P(B:2 | C:2, A:2) at 0x7f7b4dfd4da0>, <TabularCPD representing P(A:2) at 0x7f7b4dfd4fd0>, <TabularCPD representing P(D:2 | C:2) at 0x7f7b4df822b0>] """ parameters = [] for node in sorted(self.model.nodes()): cpd = self.estimate_cpd(node) parameters.append(cpd) # depends on [control=['for'], data=['node']] return parameters
def get_cosmo(self, Dd, Ds_Dds): """ return the values of H0 and omega_m computed with an interpolation :param Dd: flat :param Ds_Dds: float :return: """ if not hasattr(self, '_f_H0') or not hasattr(self, '_f_omega_m'): self._make_interpolation() H0 = self._f_H0(Dd, Ds_Dds) print(H0, 'H0') omega_m = self._f_omega_m(Dd, Ds_Dds) Dd_new, Ds_Dds_new = self.cosmo2Dd_Ds_Dds(H0[0], omega_m[0]) if abs(Dd - Dd_new)/Dd > 0.01 or abs(Ds_Dds - Ds_Dds_new)/Ds_Dds > 0.01: return [-1], [-1] else: return H0[0], omega_m[0]
def function[get_cosmo, parameter[self, Dd, Ds_Dds]]: constant[ return the values of H0 and omega_m computed with an interpolation :param Dd: flat :param Ds_Dds: float :return: ] if <ast.BoolOp object at 0x7da18bcc9330> begin[:] call[name[self]._make_interpolation, parameter[]] variable[H0] assign[=] call[name[self]._f_H0, parameter[name[Dd], name[Ds_Dds]]] call[name[print], parameter[name[H0], constant[H0]]] variable[omega_m] assign[=] call[name[self]._f_omega_m, parameter[name[Dd], name[Ds_Dds]]] <ast.Tuple object at 0x7da18bcc9f60> assign[=] call[name[self].cosmo2Dd_Ds_Dds, parameter[call[name[H0]][constant[0]], call[name[omega_m]][constant[0]]]] if <ast.BoolOp object at 0x7da18bccb580> begin[:] return[tuple[[<ast.List object at 0x7da18bccb6d0>, <ast.List object at 0x7da18bccb250>]]]
keyword[def] identifier[get_cosmo] ( identifier[self] , identifier[Dd] , identifier[Ds_Dds] ): literal[string] keyword[if] keyword[not] identifier[hasattr] ( identifier[self] , literal[string] ) keyword[or] keyword[not] identifier[hasattr] ( identifier[self] , literal[string] ): identifier[self] . identifier[_make_interpolation] () identifier[H0] = identifier[self] . identifier[_f_H0] ( identifier[Dd] , identifier[Ds_Dds] ) identifier[print] ( identifier[H0] , literal[string] ) identifier[omega_m] = identifier[self] . identifier[_f_omega_m] ( identifier[Dd] , identifier[Ds_Dds] ) identifier[Dd_new] , identifier[Ds_Dds_new] = identifier[self] . identifier[cosmo2Dd_Ds_Dds] ( identifier[H0] [ literal[int] ], identifier[omega_m] [ literal[int] ]) keyword[if] identifier[abs] ( identifier[Dd] - identifier[Dd_new] )/ identifier[Dd] > literal[int] keyword[or] identifier[abs] ( identifier[Ds_Dds] - identifier[Ds_Dds_new] )/ identifier[Ds_Dds] > literal[int] : keyword[return] [- literal[int] ],[- literal[int] ] keyword[else] : keyword[return] identifier[H0] [ literal[int] ], identifier[omega_m] [ literal[int] ]
def get_cosmo(self, Dd, Ds_Dds): """ return the values of H0 and omega_m computed with an interpolation :param Dd: flat :param Ds_Dds: float :return: """ if not hasattr(self, '_f_H0') or not hasattr(self, '_f_omega_m'): self._make_interpolation() # depends on [control=['if'], data=[]] H0 = self._f_H0(Dd, Ds_Dds) print(H0, 'H0') omega_m = self._f_omega_m(Dd, Ds_Dds) (Dd_new, Ds_Dds_new) = self.cosmo2Dd_Ds_Dds(H0[0], omega_m[0]) if abs(Dd - Dd_new) / Dd > 0.01 or abs(Ds_Dds - Ds_Dds_new) / Ds_Dds > 0.01: return ([-1], [-1]) # depends on [control=['if'], data=[]] else: return (H0[0], omega_m[0])
def contribute_to_class(cls, main_cls, name): """ Handle the inner 'Translation' class. """ # delay the creation of the *Translation until the master model is # fully created signals.class_prepared.connect(cls.finish_multilingual_class, sender=main_cls, weak=False) # connect the post_save signal on master class to a handler # that saves translations signals.post_save.connect(translation_save_translated_fields, sender=main_cls)
def function[contribute_to_class, parameter[cls, main_cls, name]]: constant[ Handle the inner 'Translation' class. ] call[name[signals].class_prepared.connect, parameter[name[cls].finish_multilingual_class]] call[name[signals].post_save.connect, parameter[name[translation_save_translated_fields]]]
keyword[def] identifier[contribute_to_class] ( identifier[cls] , identifier[main_cls] , identifier[name] ): literal[string] identifier[signals] . identifier[class_prepared] . identifier[connect] ( identifier[cls] . identifier[finish_multilingual_class] , identifier[sender] = identifier[main_cls] , identifier[weak] = keyword[False] ) identifier[signals] . identifier[post_save] . identifier[connect] ( identifier[translation_save_translated_fields] , identifier[sender] = identifier[main_cls] )
def contribute_to_class(cls, main_cls, name): """ Handle the inner 'Translation' class. """ # delay the creation of the *Translation until the master model is # fully created signals.class_prepared.connect(cls.finish_multilingual_class, sender=main_cls, weak=False) # connect the post_save signal on master class to a handler # that saves translations signals.post_save.connect(translation_save_translated_fields, sender=main_cls)
def update_attachment(self, volumeID, attachmentID, metadata): '''update an existing attachment the given metadata dict will be merged with the old one. only the following fields could be updated: [name, mime, notes, download_count] ''' log.debug('updating metadata of attachment {} from volume {}'.format(attachmentID, volumeID)) modifiable_fields = ['name', 'mime', 'notes', 'download_count'] for k in metadata.keys(): if k not in modifiable_fields: raise ValueError('Not modifiable field given: {}'.format(k)) if 'name' in metadata and not isinstance(metadata['name'], basestring): raise ValueError("'name' must be a string") if 'mime' in metadata and not isinstance(metadata['mime'], basestring): raise ValueError("'mime' must be a string") if 'notes' in metadata and not isinstance(metadata['notes'], basestring): raise ValueError("'notes' must be a string") if 'download_count' in metadata and not isinstance(metadata['download_count'], Integral): raise ValueError("'download_count' must be a number") rawVolume = self._req_raw_volume(volumeID) for attachment in rawVolume['_source']['_attachments']: if attachment['id'] == attachmentID: attachment.update(metadata) self._db.modify_book(id=volumeID, body=rawVolume['_source'], version=rawVolume['_version']) return raise NotFoundException('Could not found attachment with id {} in volume {}'.format(attachmentID, volumeID))
def function[update_attachment, parameter[self, volumeID, attachmentID, metadata]]: constant[update an existing attachment the given metadata dict will be merged with the old one. only the following fields could be updated: [name, mime, notes, download_count] ] call[name[log].debug, parameter[call[constant[updating metadata of attachment {} from volume {}].format, parameter[name[attachmentID], name[volumeID]]]]] variable[modifiable_fields] assign[=] list[[<ast.Constant object at 0x7da1b2631720>, <ast.Constant object at 0x7da1b26304c0>, <ast.Constant object at 0x7da1b2630070>, <ast.Constant object at 0x7da1b2632f80>]] for taget[name[k]] in starred[call[name[metadata].keys, parameter[]]] begin[:] if compare[name[k] <ast.NotIn object at 0x7da2590d7190> name[modifiable_fields]] begin[:] <ast.Raise object at 0x7da1b2632cb0> if <ast.BoolOp object at 0x7da1b2632140> begin[:] <ast.Raise object at 0x7da1b2632080> if <ast.BoolOp object at 0x7da1b2632ef0> begin[:] <ast.Raise object at 0x7da1b2633040> if <ast.BoolOp object at 0x7da1b2633190> begin[:] <ast.Raise object at 0x7da1b2631990> if <ast.BoolOp object at 0x7da1b2632c80> begin[:] <ast.Raise object at 0x7da1b26310c0> variable[rawVolume] assign[=] call[name[self]._req_raw_volume, parameter[name[volumeID]]] for taget[name[attachment]] in starred[call[call[name[rawVolume]][constant[_source]]][constant[_attachments]]] begin[:] if compare[call[name[attachment]][constant[id]] equal[==] name[attachmentID]] begin[:] call[name[attachment].update, parameter[name[metadata]]] call[name[self]._db.modify_book, parameter[]] return[None] <ast.Raise object at 0x7da1b26331c0>
keyword[def] identifier[update_attachment] ( identifier[self] , identifier[volumeID] , identifier[attachmentID] , identifier[metadata] ): literal[string] identifier[log] . identifier[debug] ( literal[string] . identifier[format] ( identifier[attachmentID] , identifier[volumeID] )) identifier[modifiable_fields] =[ literal[string] , literal[string] , literal[string] , literal[string] ] keyword[for] identifier[k] keyword[in] identifier[metadata] . identifier[keys] (): keyword[if] identifier[k] keyword[not] keyword[in] identifier[modifiable_fields] : keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[k] )) keyword[if] literal[string] keyword[in] identifier[metadata] keyword[and] keyword[not] identifier[isinstance] ( identifier[metadata] [ literal[string] ], identifier[basestring] ): keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] literal[string] keyword[in] identifier[metadata] keyword[and] keyword[not] identifier[isinstance] ( identifier[metadata] [ literal[string] ], identifier[basestring] ): keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] literal[string] keyword[in] identifier[metadata] keyword[and] keyword[not] identifier[isinstance] ( identifier[metadata] [ literal[string] ], identifier[basestring] ): keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] literal[string] keyword[in] identifier[metadata] keyword[and] keyword[not] identifier[isinstance] ( identifier[metadata] [ literal[string] ], identifier[Integral] ): keyword[raise] identifier[ValueError] ( literal[string] ) identifier[rawVolume] = identifier[self] . identifier[_req_raw_volume] ( identifier[volumeID] ) keyword[for] identifier[attachment] keyword[in] identifier[rawVolume] [ literal[string] ][ literal[string] ]: keyword[if] identifier[attachment] [ literal[string] ]== identifier[attachmentID] : identifier[attachment] . identifier[update] ( identifier[metadata] ) identifier[self] . identifier[_db] . identifier[modify_book] ( identifier[id] = identifier[volumeID] , identifier[body] = identifier[rawVolume] [ literal[string] ], identifier[version] = identifier[rawVolume] [ literal[string] ]) keyword[return] keyword[raise] identifier[NotFoundException] ( literal[string] . identifier[format] ( identifier[attachmentID] , identifier[volumeID] ))
def update_attachment(self, volumeID, attachmentID, metadata): """update an existing attachment the given metadata dict will be merged with the old one. only the following fields could be updated: [name, mime, notes, download_count] """ log.debug('updating metadata of attachment {} from volume {}'.format(attachmentID, volumeID)) modifiable_fields = ['name', 'mime', 'notes', 'download_count'] for k in metadata.keys(): if k not in modifiable_fields: raise ValueError('Not modifiable field given: {}'.format(k)) # depends on [control=['if'], data=['k']] # depends on [control=['for'], data=['k']] if 'name' in metadata and (not isinstance(metadata['name'], basestring)): raise ValueError("'name' must be a string") # depends on [control=['if'], data=[]] if 'mime' in metadata and (not isinstance(metadata['mime'], basestring)): raise ValueError("'mime' must be a string") # depends on [control=['if'], data=[]] if 'notes' in metadata and (not isinstance(metadata['notes'], basestring)): raise ValueError("'notes' must be a string") # depends on [control=['if'], data=[]] if 'download_count' in metadata and (not isinstance(metadata['download_count'], Integral)): raise ValueError("'download_count' must be a number") # depends on [control=['if'], data=[]] rawVolume = self._req_raw_volume(volumeID) for attachment in rawVolume['_source']['_attachments']: if attachment['id'] == attachmentID: attachment.update(metadata) self._db.modify_book(id=volumeID, body=rawVolume['_source'], version=rawVolume['_version']) return # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['attachment']] raise NotFoundException('Could not found attachment with id {} in volume {}'.format(attachmentID, volumeID))
def application_list(request): """ a user wants to see all applications possible. """ if util.is_admin(request): queryset = Application.objects.all() else: queryset = Application.objects.get_for_applicant(request.user) q_filter = ApplicationFilter(request.GET, queryset=queryset) table = ApplicationTable(q_filter.qs.order_by("-expires")) tables.RequestConfig(request).configure(table) spec = [] for name, value in six.iteritems(q_filter.form.cleaned_data): if value is not None and value != "": name = name.replace('_', ' ').capitalize() spec.append((name, value)) return render( template_name="kgapplications/application_list.html", context={ 'table': table, 'filter': q_filter, 'spec': spec, 'title': "Application list", }, request=request)
def function[application_list, parameter[request]]: constant[ a user wants to see all applications possible. ] if call[name[util].is_admin, parameter[name[request]]] begin[:] variable[queryset] assign[=] call[name[Application].objects.all, parameter[]] variable[q_filter] assign[=] call[name[ApplicationFilter], parameter[name[request].GET]] variable[table] assign[=] call[name[ApplicationTable], parameter[call[name[q_filter].qs.order_by, parameter[constant[-expires]]]]] call[call[name[tables].RequestConfig, parameter[name[request]]].configure, parameter[name[table]]] variable[spec] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da20c794f70>, <ast.Name object at 0x7da20c795e70>]]] in starred[call[name[six].iteritems, parameter[name[q_filter].form.cleaned_data]]] begin[:] if <ast.BoolOp object at 0x7da20c795090> begin[:] variable[name] assign[=] call[call[name[name].replace, parameter[constant[_], constant[ ]]].capitalize, parameter[]] call[name[spec].append, parameter[tuple[[<ast.Name object at 0x7da20c796aa0>, <ast.Name object at 0x7da20c795900>]]]] return[call[name[render], parameter[]]]
keyword[def] identifier[application_list] ( identifier[request] ): literal[string] keyword[if] identifier[util] . identifier[is_admin] ( identifier[request] ): identifier[queryset] = identifier[Application] . identifier[objects] . identifier[all] () keyword[else] : identifier[queryset] = identifier[Application] . identifier[objects] . identifier[get_for_applicant] ( identifier[request] . identifier[user] ) identifier[q_filter] = identifier[ApplicationFilter] ( identifier[request] . identifier[GET] , identifier[queryset] = identifier[queryset] ) identifier[table] = identifier[ApplicationTable] ( identifier[q_filter] . identifier[qs] . identifier[order_by] ( literal[string] )) identifier[tables] . identifier[RequestConfig] ( identifier[request] ). identifier[configure] ( identifier[table] ) identifier[spec] =[] keyword[for] identifier[name] , identifier[value] keyword[in] identifier[six] . identifier[iteritems] ( identifier[q_filter] . identifier[form] . identifier[cleaned_data] ): keyword[if] identifier[value] keyword[is] keyword[not] keyword[None] keyword[and] identifier[value] != literal[string] : identifier[name] = identifier[name] . identifier[replace] ( literal[string] , literal[string] ). identifier[capitalize] () identifier[spec] . identifier[append] (( identifier[name] , identifier[value] )) keyword[return] identifier[render] ( identifier[template_name] = literal[string] , identifier[context] ={ literal[string] : identifier[table] , literal[string] : identifier[q_filter] , literal[string] : identifier[spec] , literal[string] : literal[string] , }, identifier[request] = identifier[request] )
def application_list(request): """ a user wants to see all applications possible. """ if util.is_admin(request): queryset = Application.objects.all() # depends on [control=['if'], data=[]] else: queryset = Application.objects.get_for_applicant(request.user) q_filter = ApplicationFilter(request.GET, queryset=queryset) table = ApplicationTable(q_filter.qs.order_by('-expires')) tables.RequestConfig(request).configure(table) spec = [] for (name, value) in six.iteritems(q_filter.form.cleaned_data): if value is not None and value != '': name = name.replace('_', ' ').capitalize() spec.append((name, value)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] return render(template_name='kgapplications/application_list.html', context={'table': table, 'filter': q_filter, 'spec': spec, 'title': 'Application list'}, request=request)
def process_stats(mv_districts, n_of_districts, source, mode, critical, filename, output): '''Generates stats dataframes for districts in mv_districts. If source=='ding0', then runned districts are saved to a pickle named filename+str(n_of_districts[0])+'_to_'+str(n_of_districts[-1])+'.pkl' Parameters ---------- districts_list: list of int List with all districts to be run. n_of_districts: int Number of districts to be run in each cluster source: str If 'pkl', pickle files are read. If 'ding0', ding0 is run over the districts. mode: str If 'MV', medium voltage stats are calculated. If 'LV', low voltage stats are calculated. If empty, medium and low voltage stats are calculated. critical: bool If True, critical nodes and branches are returned filename: str filename prefix for saving pickles output: outer variable where the output is stored as a tuple of 6 lists:: * mv_stats: MV stats DataFrames. If mode=='LV', then DataFrame is empty. * lv_stats: LV stats DataFrames. If mode=='MV', then DataFrame is empty. * mv_crit_nodes: MV critical nodes stats DataFrames. If mode=='LV', then DataFrame is empty. If critical==False, then DataFrame is empty. * mv_crit_edges: MV critical edges stats DataFrames. If mode=='LV', then DataFrame is empty. If critical==False, then DataFrame is empty. * lv_crit_nodes: LV critical nodes stats DataFrames. If mode=='MV', then DataFrame is empty. If critical==False, then DataFrame is empty. * lv_crit_edges: LV critical edges stats DataFrames. If mode=='MV', then DataFrame is empty. If critical==False, then DataFrame is empty. ''' ####################################################################### # decide what exactly to do with MV LV if mode == 'MV': calc_mv = True calc_lv = False elif mode == 'LV': calc_mv = False calc_lv = True else: calc_mv = True calc_lv = True ####################################################################### clusters = [mv_districts[x:x + n_of_districts] for x in range(0, len(mv_districts), n_of_districts)] mv_stats = [] lv_stats = [] mv_crit_nodes = [] mv_crit_edges = [] lv_crit_nodes = [] lv_crit_edges = [] ####################################################################### for cl in clusters: nw_name = filename + str(cl[0]) if not cl[0] == cl[-1]: nw_name = nw_name + '_to_' + str(cl[-1]) nw = NetworkDing0(name=nw_name) if source == 'pkl': print('\n########################################') print(' Reading data from pickle district', cl) print('########################################') try: nw = load_nd_from_pickle(nw_name + '.pkl') except Exception: continue else: # database connection/ session engine = db.connection(section='oedb') session = sessionmaker(bind=engine)() print('\n########################################') print(' Running ding0 for district', cl) print('########################################') try: nw.run_ding0(session=session, mv_grid_districts_no=cl) try: save_nd_to_pickle(nw, filename=nw_name + '.pkl') except Exception: continue except Exception: continue # Close database connection if calc_mv: stats = calculate_mvgd_stats(nw) mv_stats.append(stats) if calc_lv: stats = calculate_lvgd_stats(nw) lv_stats.append(stats) if critical and calc_mv: stats = calculate_mvgd_voltage_current_stats(nw) mv_crit_nodes.append(stats[0]) mv_crit_edges.append(stats[1]) if critical and calc_lv: stats = calculate_lvgd_voltage_current_stats(nw) lv_crit_nodes.append(stats[0]) lv_crit_edges.append(stats[1]) ####################################################################### salida = (mv_stats, lv_stats, mv_crit_nodes, mv_crit_edges, lv_crit_nodes, lv_crit_edges) output.put(salida)
def function[process_stats, parameter[mv_districts, n_of_districts, source, mode, critical, filename, output]]: constant[Generates stats dataframes for districts in mv_districts. If source=='ding0', then runned districts are saved to a pickle named filename+str(n_of_districts[0])+'_to_'+str(n_of_districts[-1])+'.pkl' Parameters ---------- districts_list: list of int List with all districts to be run. n_of_districts: int Number of districts to be run in each cluster source: str If 'pkl', pickle files are read. If 'ding0', ding0 is run over the districts. mode: str If 'MV', medium voltage stats are calculated. If 'LV', low voltage stats are calculated. If empty, medium and low voltage stats are calculated. critical: bool If True, critical nodes and branches are returned filename: str filename prefix for saving pickles output: outer variable where the output is stored as a tuple of 6 lists:: * mv_stats: MV stats DataFrames. If mode=='LV', then DataFrame is empty. * lv_stats: LV stats DataFrames. If mode=='MV', then DataFrame is empty. * mv_crit_nodes: MV critical nodes stats DataFrames. If mode=='LV', then DataFrame is empty. If critical==False, then DataFrame is empty. * mv_crit_edges: MV critical edges stats DataFrames. If mode=='LV', then DataFrame is empty. If critical==False, then DataFrame is empty. * lv_crit_nodes: LV critical nodes stats DataFrames. If mode=='MV', then DataFrame is empty. If critical==False, then DataFrame is empty. * lv_crit_edges: LV critical edges stats DataFrames. If mode=='MV', then DataFrame is empty. If critical==False, then DataFrame is empty. ] if compare[name[mode] equal[==] constant[MV]] begin[:] variable[calc_mv] assign[=] constant[True] variable[calc_lv] assign[=] constant[False] variable[clusters] assign[=] <ast.ListComp object at 0x7da2047eb7f0> variable[mv_stats] assign[=] list[[]] variable[lv_stats] assign[=] list[[]] variable[mv_crit_nodes] assign[=] list[[]] variable[mv_crit_edges] assign[=] list[[]] variable[lv_crit_nodes] assign[=] list[[]] variable[lv_crit_edges] assign[=] list[[]] for taget[name[cl]] in starred[name[clusters]] begin[:] variable[nw_name] assign[=] binary_operation[name[filename] + call[name[str], parameter[call[name[cl]][constant[0]]]]] if <ast.UnaryOp object at 0x7da2047e92a0> begin[:] variable[nw_name] assign[=] binary_operation[binary_operation[name[nw_name] + constant[_to_]] + call[name[str], parameter[call[name[cl]][<ast.UnaryOp object at 0x7da18ede5600>]]]] variable[nw] assign[=] call[name[NetworkDing0], parameter[]] if compare[name[source] equal[==] constant[pkl]] begin[:] call[name[print], parameter[constant[ ########################################]]] call[name[print], parameter[constant[ Reading data from pickle district], name[cl]]] call[name[print], parameter[constant[########################################]]] <ast.Try object at 0x7da18ede4e50> if name[calc_mv] begin[:] variable[stats] assign[=] call[name[calculate_mvgd_stats], parameter[name[nw]]] call[name[mv_stats].append, parameter[name[stats]]] if name[calc_lv] begin[:] variable[stats] assign[=] call[name[calculate_lvgd_stats], parameter[name[nw]]] call[name[lv_stats].append, parameter[name[stats]]] if <ast.BoolOp object at 0x7da20c993d90> begin[:] variable[stats] assign[=] call[name[calculate_mvgd_voltage_current_stats], parameter[name[nw]]] call[name[mv_crit_nodes].append, parameter[call[name[stats]][constant[0]]]] call[name[mv_crit_edges].append, parameter[call[name[stats]][constant[1]]]] if <ast.BoolOp object at 0x7da20c990eb0> begin[:] variable[stats] assign[=] call[name[calculate_lvgd_voltage_current_stats], parameter[name[nw]]] call[name[lv_crit_nodes].append, parameter[call[name[stats]][constant[0]]]] call[name[lv_crit_edges].append, parameter[call[name[stats]][constant[1]]]] variable[salida] assign[=] tuple[[<ast.Name object at 0x7da20c9926b0>, <ast.Name object at 0x7da20c990880>, <ast.Name object at 0x7da20c991d50>, <ast.Name object at 0x7da20c992f80>, <ast.Name object at 0x7da20c993ee0>, <ast.Name object at 0x7da20c991480>]] call[name[output].put, parameter[name[salida]]]
keyword[def] identifier[process_stats] ( identifier[mv_districts] , identifier[n_of_districts] , identifier[source] , identifier[mode] , identifier[critical] , identifier[filename] , identifier[output] ): literal[string] keyword[if] identifier[mode] == literal[string] : identifier[calc_mv] = keyword[True] identifier[calc_lv] = keyword[False] keyword[elif] identifier[mode] == literal[string] : identifier[calc_mv] = keyword[False] identifier[calc_lv] = keyword[True] keyword[else] : identifier[calc_mv] = keyword[True] identifier[calc_lv] = keyword[True] identifier[clusters] =[ identifier[mv_districts] [ identifier[x] : identifier[x] + identifier[n_of_districts] ] keyword[for] identifier[x] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[mv_districts] ), identifier[n_of_districts] )] identifier[mv_stats] =[] identifier[lv_stats] =[] identifier[mv_crit_nodes] =[] identifier[mv_crit_edges] =[] identifier[lv_crit_nodes] =[] identifier[lv_crit_edges] =[] keyword[for] identifier[cl] keyword[in] identifier[clusters] : identifier[nw_name] = identifier[filename] + identifier[str] ( identifier[cl] [ literal[int] ]) keyword[if] keyword[not] identifier[cl] [ literal[int] ]== identifier[cl] [- literal[int] ]: identifier[nw_name] = identifier[nw_name] + literal[string] + identifier[str] ( identifier[cl] [- literal[int] ]) identifier[nw] = identifier[NetworkDing0] ( identifier[name] = identifier[nw_name] ) keyword[if] identifier[source] == literal[string] : identifier[print] ( literal[string] ) identifier[print] ( literal[string] , identifier[cl] ) identifier[print] ( literal[string] ) keyword[try] : identifier[nw] = identifier[load_nd_from_pickle] ( identifier[nw_name] + literal[string] ) keyword[except] identifier[Exception] : keyword[continue] keyword[else] : identifier[engine] = identifier[db] . identifier[connection] ( identifier[section] = literal[string] ) identifier[session] = identifier[sessionmaker] ( identifier[bind] = identifier[engine] )() identifier[print] ( literal[string] ) identifier[print] ( literal[string] , identifier[cl] ) identifier[print] ( literal[string] ) keyword[try] : identifier[nw] . identifier[run_ding0] ( identifier[session] = identifier[session] , identifier[mv_grid_districts_no] = identifier[cl] ) keyword[try] : identifier[save_nd_to_pickle] ( identifier[nw] , identifier[filename] = identifier[nw_name] + literal[string] ) keyword[except] identifier[Exception] : keyword[continue] keyword[except] identifier[Exception] : keyword[continue] keyword[if] identifier[calc_mv] : identifier[stats] = identifier[calculate_mvgd_stats] ( identifier[nw] ) identifier[mv_stats] . identifier[append] ( identifier[stats] ) keyword[if] identifier[calc_lv] : identifier[stats] = identifier[calculate_lvgd_stats] ( identifier[nw] ) identifier[lv_stats] . identifier[append] ( identifier[stats] ) keyword[if] identifier[critical] keyword[and] identifier[calc_mv] : identifier[stats] = identifier[calculate_mvgd_voltage_current_stats] ( identifier[nw] ) identifier[mv_crit_nodes] . identifier[append] ( identifier[stats] [ literal[int] ]) identifier[mv_crit_edges] . identifier[append] ( identifier[stats] [ literal[int] ]) keyword[if] identifier[critical] keyword[and] identifier[calc_lv] : identifier[stats] = identifier[calculate_lvgd_voltage_current_stats] ( identifier[nw] ) identifier[lv_crit_nodes] . identifier[append] ( identifier[stats] [ literal[int] ]) identifier[lv_crit_edges] . identifier[append] ( identifier[stats] [ literal[int] ]) identifier[salida] =( identifier[mv_stats] , identifier[lv_stats] , identifier[mv_crit_nodes] , identifier[mv_crit_edges] , identifier[lv_crit_nodes] , identifier[lv_crit_edges] ) identifier[output] . identifier[put] ( identifier[salida] )
def process_stats(mv_districts, n_of_districts, source, mode, critical, filename, output): """Generates stats dataframes for districts in mv_districts. If source=='ding0', then runned districts are saved to a pickle named filename+str(n_of_districts[0])+'_to_'+str(n_of_districts[-1])+'.pkl' Parameters ---------- districts_list: list of int List with all districts to be run. n_of_districts: int Number of districts to be run in each cluster source: str If 'pkl', pickle files are read. If 'ding0', ding0 is run over the districts. mode: str If 'MV', medium voltage stats are calculated. If 'LV', low voltage stats are calculated. If empty, medium and low voltage stats are calculated. critical: bool If True, critical nodes and branches are returned filename: str filename prefix for saving pickles output: outer variable where the output is stored as a tuple of 6 lists:: * mv_stats: MV stats DataFrames. If mode=='LV', then DataFrame is empty. * lv_stats: LV stats DataFrames. If mode=='MV', then DataFrame is empty. * mv_crit_nodes: MV critical nodes stats DataFrames. If mode=='LV', then DataFrame is empty. If critical==False, then DataFrame is empty. * mv_crit_edges: MV critical edges stats DataFrames. If mode=='LV', then DataFrame is empty. If critical==False, then DataFrame is empty. * lv_crit_nodes: LV critical nodes stats DataFrames. If mode=='MV', then DataFrame is empty. If critical==False, then DataFrame is empty. * lv_crit_edges: LV critical edges stats DataFrames. If mode=='MV', then DataFrame is empty. If critical==False, then DataFrame is empty. """ ####################################################################### # decide what exactly to do with MV LV if mode == 'MV': calc_mv = True calc_lv = False # depends on [control=['if'], data=[]] elif mode == 'LV': calc_mv = False calc_lv = True # depends on [control=['if'], data=[]] else: calc_mv = True calc_lv = True ####################################################################### clusters = [mv_districts[x:x + n_of_districts] for x in range(0, len(mv_districts), n_of_districts)] mv_stats = [] lv_stats = [] mv_crit_nodes = [] mv_crit_edges = [] lv_crit_nodes = [] lv_crit_edges = [] ####################################################################### for cl in clusters: nw_name = filename + str(cl[0]) if not cl[0] == cl[-1]: nw_name = nw_name + '_to_' + str(cl[-1]) # depends on [control=['if'], data=[]] nw = NetworkDing0(name=nw_name) if source == 'pkl': print('\n########################################') print(' Reading data from pickle district', cl) print('########################################') try: nw = load_nd_from_pickle(nw_name + '.pkl') # depends on [control=['try'], data=[]] except Exception: continue # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] else: # database connection/ session engine = db.connection(section='oedb') session = sessionmaker(bind=engine)() print('\n########################################') print(' Running ding0 for district', cl) print('########################################') try: nw.run_ding0(session=session, mv_grid_districts_no=cl) try: save_nd_to_pickle(nw, filename=nw_name + '.pkl') # depends on [control=['try'], data=[]] except Exception: continue # depends on [control=['except'], data=[]] # depends on [control=['try'], data=[]] except Exception: continue # depends on [control=['except'], data=[]] # Close database connection if calc_mv: stats = calculate_mvgd_stats(nw) mv_stats.append(stats) # depends on [control=['if'], data=[]] if calc_lv: stats = calculate_lvgd_stats(nw) lv_stats.append(stats) # depends on [control=['if'], data=[]] if critical and calc_mv: stats = calculate_mvgd_voltage_current_stats(nw) mv_crit_nodes.append(stats[0]) mv_crit_edges.append(stats[1]) # depends on [control=['if'], data=[]] if critical and calc_lv: stats = calculate_lvgd_voltage_current_stats(nw) lv_crit_nodes.append(stats[0]) lv_crit_edges.append(stats[1]) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['cl']] ####################################################################### salida = (mv_stats, lv_stats, mv_crit_nodes, mv_crit_edges, lv_crit_nodes, lv_crit_edges) output.put(salida)
def flat_map(self, func=None, name=None): """ Maps and flatterns each tuple from this stream into 0 or more tuples. For each tuple on this stream ``func(tuple)`` is called. If the result is not `None` then the the result is iterated over with each value from the iterator that is not `None` will be submitted to the return stream. If the result is `None` or an empty iterable then no tuples are submitted to the returned stream. Args: func: A callable that takes a single parameter for the tuple. If not supplied then a function equivalent to ``lambda tuple_ : tuple_`` is used. This is suitable when each tuple on this stream is an iterable to be flattened. name(str): Name of the flattened stream, defaults to a generated name. If invoking ``func`` for a tuple on the stream raises an exception then its processing element will terminate. By default the processing element will automatically restart though tuples may be lost. If ``func`` is a callable object then it may suppress exceptions by return a true value from its ``__exit__`` method. When an exception is suppressed no tuples are submitted to the flattened and mapped stream corresponding to the input tuple that caused the exception. Returns: Stream: A Stream containing flattened and mapped tuples. Raises: TypeError: if `func` does not return an iterator nor None .. versionchanged:: 1.11 `func` is optional. """ if func is None: func = streamsx.topology.runtime._identity if name is None: name = 'flatten' sl = _SourceLocation(_source_info(), 'flat_map') _name = self.topology.graph._requested_name(name, action='flat_map', func=func) stateful = self._determine_statefulness(func) op = self.topology.graph.addOperator(self.topology.opnamespace+"::FlatMap", func, name=_name, sl=sl, stateful=stateful) op.addInputPort(outputPort=self.oport) streamsx.topology.schema.StreamSchema._fnop_style(self.oport.schema, op, 'pyStyle') oport = op.addOutputPort(name=_name) return Stream(self.topology, oport)._make_placeable()._layout('FlatMap', name=_name, orig_name=name)
def function[flat_map, parameter[self, func, name]]: constant[ Maps and flatterns each tuple from this stream into 0 or more tuples. For each tuple on this stream ``func(tuple)`` is called. If the result is not `None` then the the result is iterated over with each value from the iterator that is not `None` will be submitted to the return stream. If the result is `None` or an empty iterable then no tuples are submitted to the returned stream. Args: func: A callable that takes a single parameter for the tuple. If not supplied then a function equivalent to ``lambda tuple_ : tuple_`` is used. This is suitable when each tuple on this stream is an iterable to be flattened. name(str): Name of the flattened stream, defaults to a generated name. If invoking ``func`` for a tuple on the stream raises an exception then its processing element will terminate. By default the processing element will automatically restart though tuples may be lost. If ``func`` is a callable object then it may suppress exceptions by return a true value from its ``__exit__`` method. When an exception is suppressed no tuples are submitted to the flattened and mapped stream corresponding to the input tuple that caused the exception. Returns: Stream: A Stream containing flattened and mapped tuples. Raises: TypeError: if `func` does not return an iterator nor None .. versionchanged:: 1.11 `func` is optional. ] if compare[name[func] is constant[None]] begin[:] variable[func] assign[=] name[streamsx].topology.runtime._identity if compare[name[name] is constant[None]] begin[:] variable[name] assign[=] constant[flatten] variable[sl] assign[=] call[name[_SourceLocation], parameter[call[name[_source_info], parameter[]], constant[flat_map]]] variable[_name] assign[=] call[name[self].topology.graph._requested_name, parameter[name[name]]] variable[stateful] assign[=] call[name[self]._determine_statefulness, parameter[name[func]]] variable[op] assign[=] call[name[self].topology.graph.addOperator, parameter[binary_operation[name[self].topology.opnamespace + constant[::FlatMap]], name[func]]] call[name[op].addInputPort, parameter[]] call[name[streamsx].topology.schema.StreamSchema._fnop_style, parameter[name[self].oport.schema, name[op], constant[pyStyle]]] variable[oport] assign[=] call[name[op].addOutputPort, parameter[]] return[call[call[call[name[Stream], parameter[name[self].topology, name[oport]]]._make_placeable, parameter[]]._layout, parameter[constant[FlatMap]]]]
keyword[def] identifier[flat_map] ( identifier[self] , identifier[func] = keyword[None] , identifier[name] = keyword[None] ): literal[string] keyword[if] identifier[func] keyword[is] keyword[None] : identifier[func] = identifier[streamsx] . identifier[topology] . identifier[runtime] . identifier[_identity] keyword[if] identifier[name] keyword[is] keyword[None] : identifier[name] = literal[string] identifier[sl] = identifier[_SourceLocation] ( identifier[_source_info] (), literal[string] ) identifier[_name] = identifier[self] . identifier[topology] . identifier[graph] . identifier[_requested_name] ( identifier[name] , identifier[action] = literal[string] , identifier[func] = identifier[func] ) identifier[stateful] = identifier[self] . identifier[_determine_statefulness] ( identifier[func] ) identifier[op] = identifier[self] . identifier[topology] . identifier[graph] . identifier[addOperator] ( identifier[self] . identifier[topology] . identifier[opnamespace] + literal[string] , identifier[func] , identifier[name] = identifier[_name] , identifier[sl] = identifier[sl] , identifier[stateful] = identifier[stateful] ) identifier[op] . identifier[addInputPort] ( identifier[outputPort] = identifier[self] . identifier[oport] ) identifier[streamsx] . identifier[topology] . identifier[schema] . identifier[StreamSchema] . identifier[_fnop_style] ( identifier[self] . identifier[oport] . identifier[schema] , identifier[op] , literal[string] ) identifier[oport] = identifier[op] . identifier[addOutputPort] ( identifier[name] = identifier[_name] ) keyword[return] identifier[Stream] ( identifier[self] . identifier[topology] , identifier[oport] ). identifier[_make_placeable] (). identifier[_layout] ( literal[string] , identifier[name] = identifier[_name] , identifier[orig_name] = identifier[name] )
def flat_map(self, func=None, name=None): """ Maps and flatterns each tuple from this stream into 0 or more tuples. For each tuple on this stream ``func(tuple)`` is called. If the result is not `None` then the the result is iterated over with each value from the iterator that is not `None` will be submitted to the return stream. If the result is `None` or an empty iterable then no tuples are submitted to the returned stream. Args: func: A callable that takes a single parameter for the tuple. If not supplied then a function equivalent to ``lambda tuple_ : tuple_`` is used. This is suitable when each tuple on this stream is an iterable to be flattened. name(str): Name of the flattened stream, defaults to a generated name. If invoking ``func`` for a tuple on the stream raises an exception then its processing element will terminate. By default the processing element will automatically restart though tuples may be lost. If ``func`` is a callable object then it may suppress exceptions by return a true value from its ``__exit__`` method. When an exception is suppressed no tuples are submitted to the flattened and mapped stream corresponding to the input tuple that caused the exception. Returns: Stream: A Stream containing flattened and mapped tuples. Raises: TypeError: if `func` does not return an iterator nor None .. versionchanged:: 1.11 `func` is optional. """ if func is None: func = streamsx.topology.runtime._identity if name is None: name = 'flatten' # depends on [control=['if'], data=['name']] # depends on [control=['if'], data=['func']] sl = _SourceLocation(_source_info(), 'flat_map') _name = self.topology.graph._requested_name(name, action='flat_map', func=func) stateful = self._determine_statefulness(func) op = self.topology.graph.addOperator(self.topology.opnamespace + '::FlatMap', func, name=_name, sl=sl, stateful=stateful) op.addInputPort(outputPort=self.oport) streamsx.topology.schema.StreamSchema._fnop_style(self.oport.schema, op, 'pyStyle') oport = op.addOutputPort(name=_name) return Stream(self.topology, oport)._make_placeable()._layout('FlatMap', name=_name, orig_name=name)
def copy_files(source_files, target_directory, source_directory=None): """Copies a list of files to the specified directory. If source_directory is provided, it will be prepended to each source file.""" try: os.makedirs(target_directory) except: # TODO: specific exception? pass for f in source_files: source = os.path.join(source_directory, f) if source_directory else f target = os.path.join(target_directory, f) shutil.copy2(source, target)
def function[copy_files, parameter[source_files, target_directory, source_directory]]: constant[Copies a list of files to the specified directory. If source_directory is provided, it will be prepended to each source file.] <ast.Try object at 0x7da18f58cee0> for taget[name[f]] in starred[name[source_files]] begin[:] variable[source] assign[=] <ast.IfExp object at 0x7da207f98070> variable[target] assign[=] call[name[os].path.join, parameter[name[target_directory], name[f]]] call[name[shutil].copy2, parameter[name[source], name[target]]]
keyword[def] identifier[copy_files] ( identifier[source_files] , identifier[target_directory] , identifier[source_directory] = keyword[None] ): literal[string] keyword[try] : identifier[os] . identifier[makedirs] ( identifier[target_directory] ) keyword[except] : keyword[pass] keyword[for] identifier[f] keyword[in] identifier[source_files] : identifier[source] = identifier[os] . identifier[path] . identifier[join] ( identifier[source_directory] , identifier[f] ) keyword[if] identifier[source_directory] keyword[else] identifier[f] identifier[target] = identifier[os] . identifier[path] . identifier[join] ( identifier[target_directory] , identifier[f] ) identifier[shutil] . identifier[copy2] ( identifier[source] , identifier[target] )
def copy_files(source_files, target_directory, source_directory=None): """Copies a list of files to the specified directory. If source_directory is provided, it will be prepended to each source file.""" try: os.makedirs(target_directory) # depends on [control=['try'], data=[]] except: # TODO: specific exception? pass # depends on [control=['except'], data=[]] for f in source_files: source = os.path.join(source_directory, f) if source_directory else f target = os.path.join(target_directory, f) shutil.copy2(source, target) # depends on [control=['for'], data=['f']]
def render_author(**kwargs): """ Unstrict template block for rendering authors: <div class="author"> <img class="author-avatar" src="{author_avatar}"> <p class="author-name"> <a href="{author_link}">{author_name}</a> </p> <p class="user-handle">{author_handle}</p> </div> """ html = '<div class="user">' author_avatar = kwargs.get('author_avatar', None) if author_avatar: html += '<img class="user-avatar" src="{}">'.format(author_avatar) author_name = kwargs.get('author_name', None) if author_name: html += '<p class="user-name">' author_link = kwargs.get('author_link', None) if author_link: html += '<a href="{author_link}">{author_name}</a>'.format( author_link=author_link, author_name=author_name ) else: html += author_name html += '</p>' author_handle = kwargs.get('author_handle', None) if author_handle: html += '<p class="user-handle">{}</p>'.format(author_handle) html += '</div>'
def function[render_author, parameter[]]: constant[ Unstrict template block for rendering authors: <div class="author"> <img class="author-avatar" src="{author_avatar}"> <p class="author-name"> <a href="{author_link}">{author_name}</a> </p> <p class="user-handle">{author_handle}</p> </div> ] variable[html] assign[=] constant[<div class="user">] variable[author_avatar] assign[=] call[name[kwargs].get, parameter[constant[author_avatar], constant[None]]] if name[author_avatar] begin[:] <ast.AugAssign object at 0x7da18f58e7d0> variable[author_name] assign[=] call[name[kwargs].get, parameter[constant[author_name], constant[None]]] if name[author_name] begin[:] <ast.AugAssign object at 0x7da18f58f970> variable[author_link] assign[=] call[name[kwargs].get, parameter[constant[author_link], constant[None]]] if name[author_link] begin[:] <ast.AugAssign object at 0x7da18f58c6a0> <ast.AugAssign object at 0x7da18f58c340> variable[author_handle] assign[=] call[name[kwargs].get, parameter[constant[author_handle], constant[None]]] if name[author_handle] begin[:] <ast.AugAssign object at 0x7da18f58de70> <ast.AugAssign object at 0x7da18f58f7f0>
keyword[def] identifier[render_author] (** identifier[kwargs] ): literal[string] identifier[html] = literal[string] identifier[author_avatar] = identifier[kwargs] . identifier[get] ( literal[string] , keyword[None] ) keyword[if] identifier[author_avatar] : identifier[html] += literal[string] . identifier[format] ( identifier[author_avatar] ) identifier[author_name] = identifier[kwargs] . identifier[get] ( literal[string] , keyword[None] ) keyword[if] identifier[author_name] : identifier[html] += literal[string] identifier[author_link] = identifier[kwargs] . identifier[get] ( literal[string] , keyword[None] ) keyword[if] identifier[author_link] : identifier[html] += literal[string] . identifier[format] ( identifier[author_link] = identifier[author_link] , identifier[author_name] = identifier[author_name] ) keyword[else] : identifier[html] += identifier[author_name] identifier[html] += literal[string] identifier[author_handle] = identifier[kwargs] . identifier[get] ( literal[string] , keyword[None] ) keyword[if] identifier[author_handle] : identifier[html] += literal[string] . identifier[format] ( identifier[author_handle] ) identifier[html] += literal[string]
def render_author(**kwargs): """ Unstrict template block for rendering authors: <div class="author"> <img class="author-avatar" src="{author_avatar}"> <p class="author-name"> <a href="{author_link}">{author_name}</a> </p> <p class="user-handle">{author_handle}</p> </div> """ html = '<div class="user">' author_avatar = kwargs.get('author_avatar', None) if author_avatar: html += '<img class="user-avatar" src="{}">'.format(author_avatar) # depends on [control=['if'], data=[]] author_name = kwargs.get('author_name', None) if author_name: html += '<p class="user-name">' author_link = kwargs.get('author_link', None) if author_link: html += '<a href="{author_link}">{author_name}</a>'.format(author_link=author_link, author_name=author_name) # depends on [control=['if'], data=[]] else: html += author_name html += '</p>' # depends on [control=['if'], data=[]] author_handle = kwargs.get('author_handle', None) if author_handle: html += '<p class="user-handle">{}</p>'.format(author_handle) # depends on [control=['if'], data=[]] html += '</div>'
def write_fp(self, outfp, blocksize=32768, progress_cb=None, progress_opaque=None): # type: (BinaryIO, int, Optional[Callable[[int, int, Any], None]], Optional[Any]) -> None ''' Write a properly formatted ISO out to the file object passed in. This also goes by the name of 'mastering'. Parameters: outfp - The file object to write the data to. blocksize - The blocksize to use when copying data; set to 32768 by default. progress_cb - If not None, a function to call as the write call does its work. The callback function must have a signature of: def func(done, total, opaque). progress_opaque - User data to be passed to the progress callback. Returns: Nothing. ''' if not self._initialized: raise pycdlibexception.PyCdlibInvalidInput('This object is not yet initialized; call either open() or new() to create an ISO') self._write_fp(outfp, blocksize, progress_cb, progress_opaque)
def function[write_fp, parameter[self, outfp, blocksize, progress_cb, progress_opaque]]: constant[ Write a properly formatted ISO out to the file object passed in. This also goes by the name of 'mastering'. Parameters: outfp - The file object to write the data to. blocksize - The blocksize to use when copying data; set to 32768 by default. progress_cb - If not None, a function to call as the write call does its work. The callback function must have a signature of: def func(done, total, opaque). progress_opaque - User data to be passed to the progress callback. Returns: Nothing. ] if <ast.UnaryOp object at 0x7da20e954520> begin[:] <ast.Raise object at 0x7da20e957e50> call[name[self]._write_fp, parameter[name[outfp], name[blocksize], name[progress_cb], name[progress_opaque]]]
keyword[def] identifier[write_fp] ( identifier[self] , identifier[outfp] , identifier[blocksize] = literal[int] , identifier[progress_cb] = keyword[None] , identifier[progress_opaque] = keyword[None] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[_initialized] : keyword[raise] identifier[pycdlibexception] . identifier[PyCdlibInvalidInput] ( literal[string] ) identifier[self] . identifier[_write_fp] ( identifier[outfp] , identifier[blocksize] , identifier[progress_cb] , identifier[progress_opaque] )
def write_fp(self, outfp, blocksize=32768, progress_cb=None, progress_opaque=None): # type: (BinaryIO, int, Optional[Callable[[int, int, Any], None]], Optional[Any]) -> None "\n Write a properly formatted ISO out to the file object passed in. This\n also goes by the name of 'mastering'.\n\n Parameters:\n outfp - The file object to write the data to.\n blocksize - The blocksize to use when copying data; set to 32768 by default.\n progress_cb - If not None, a function to call as the write call does its\n work. The callback function must have a signature of:\n def func(done, total, opaque).\n progress_opaque - User data to be passed to the progress callback.\n Returns:\n Nothing.\n " if not self._initialized: raise pycdlibexception.PyCdlibInvalidInput('This object is not yet initialized; call either open() or new() to create an ISO') # depends on [control=['if'], data=[]] self._write_fp(outfp, blocksize, progress_cb, progress_opaque)
def remove(self, stuff): """Remove variables and constraints. Parameters ---------- stuff : iterable, str, Variable, Constraint Either an iterable containing variables and constraints to be removed from the model or a single variable or contstraint (or their names). Returns ------- None """ if self._pending_modifications.toggle == 'add': self.update() self._pending_modifications.toggle = 'remove' if isinstance(stuff, str): try: variable = self.variables[stuff] self._pending_modifications.rm_var.append(variable) except KeyError: try: constraint = self.constraints[stuff] self._pending_modifications.rm_constr.append(constraint) except KeyError: raise LookupError( "%s is neither a variable nor a constraint in the current solver instance." % stuff) elif isinstance(stuff, Variable): self._pending_modifications.rm_var.append(stuff) elif isinstance(stuff, Constraint): self._pending_modifications.rm_constr.append(stuff) elif isinstance(stuff, collections.Iterable): for elem in stuff: self.remove(elem) elif isinstance(stuff, Objective): raise TypeError( "Cannot remove objective %s. Use model.objective = Objective(...) to change the current objective." % stuff) else: raise TypeError( "Cannot remove %s. It neither a variable or constraint." % stuff)
def function[remove, parameter[self, stuff]]: constant[Remove variables and constraints. Parameters ---------- stuff : iterable, str, Variable, Constraint Either an iterable containing variables and constraints to be removed from the model or a single variable or contstraint (or their names). Returns ------- None ] if compare[name[self]._pending_modifications.toggle equal[==] constant[add]] begin[:] call[name[self].update, parameter[]] name[self]._pending_modifications.toggle assign[=] constant[remove] if call[name[isinstance], parameter[name[stuff], name[str]]] begin[:] <ast.Try object at 0x7da1b0b9ebc0>
keyword[def] identifier[remove] ( identifier[self] , identifier[stuff] ): literal[string] keyword[if] identifier[self] . identifier[_pending_modifications] . identifier[toggle] == literal[string] : identifier[self] . identifier[update] () identifier[self] . identifier[_pending_modifications] . identifier[toggle] = literal[string] keyword[if] identifier[isinstance] ( identifier[stuff] , identifier[str] ): keyword[try] : identifier[variable] = identifier[self] . identifier[variables] [ identifier[stuff] ] identifier[self] . identifier[_pending_modifications] . identifier[rm_var] . identifier[append] ( identifier[variable] ) keyword[except] identifier[KeyError] : keyword[try] : identifier[constraint] = identifier[self] . identifier[constraints] [ identifier[stuff] ] identifier[self] . identifier[_pending_modifications] . identifier[rm_constr] . identifier[append] ( identifier[constraint] ) keyword[except] identifier[KeyError] : keyword[raise] identifier[LookupError] ( literal[string] % identifier[stuff] ) keyword[elif] identifier[isinstance] ( identifier[stuff] , identifier[Variable] ): identifier[self] . identifier[_pending_modifications] . identifier[rm_var] . identifier[append] ( identifier[stuff] ) keyword[elif] identifier[isinstance] ( identifier[stuff] , identifier[Constraint] ): identifier[self] . identifier[_pending_modifications] . identifier[rm_constr] . identifier[append] ( identifier[stuff] ) keyword[elif] identifier[isinstance] ( identifier[stuff] , identifier[collections] . identifier[Iterable] ): keyword[for] identifier[elem] keyword[in] identifier[stuff] : identifier[self] . identifier[remove] ( identifier[elem] ) keyword[elif] identifier[isinstance] ( identifier[stuff] , identifier[Objective] ): keyword[raise] identifier[TypeError] ( literal[string] % identifier[stuff] ) keyword[else] : keyword[raise] identifier[TypeError] ( literal[string] % identifier[stuff] )
def remove(self, stuff): """Remove variables and constraints. Parameters ---------- stuff : iterable, str, Variable, Constraint Either an iterable containing variables and constraints to be removed from the model or a single variable or contstraint (or their names). Returns ------- None """ if self._pending_modifications.toggle == 'add': self.update() self._pending_modifications.toggle = 'remove' # depends on [control=['if'], data=[]] if isinstance(stuff, str): try: variable = self.variables[stuff] self._pending_modifications.rm_var.append(variable) # depends on [control=['try'], data=[]] except KeyError: try: constraint = self.constraints[stuff] self._pending_modifications.rm_constr.append(constraint) # depends on [control=['try'], data=[]] except KeyError: raise LookupError('%s is neither a variable nor a constraint in the current solver instance.' % stuff) # depends on [control=['except'], data=[]] # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] elif isinstance(stuff, Variable): self._pending_modifications.rm_var.append(stuff) # depends on [control=['if'], data=[]] elif isinstance(stuff, Constraint): self._pending_modifications.rm_constr.append(stuff) # depends on [control=['if'], data=[]] elif isinstance(stuff, collections.Iterable): for elem in stuff: self.remove(elem) # depends on [control=['for'], data=['elem']] # depends on [control=['if'], data=[]] elif isinstance(stuff, Objective): raise TypeError('Cannot remove objective %s. Use model.objective = Objective(...) to change the current objective.' % stuff) # depends on [control=['if'], data=[]] else: raise TypeError('Cannot remove %s. It neither a variable or constraint.' % stuff)
def observation_placeholder(ob_space, batch_size=None, name='Ob'): ''' Create placeholder to feed observations into of the size appropriate to the observation space Parameters: ---------- ob_space: gym.Space observation space batch_size: int size of the batch to be fed into input. Can be left None in most cases. name: str name of the placeholder Returns: ------- tensorflow placeholder tensor ''' assert isinstance(ob_space, Discrete) or isinstance(ob_space, Box) or isinstance(ob_space, MultiDiscrete), \ 'Can only deal with Discrete and Box observation spaces for now' dtype = ob_space.dtype if dtype == np.int8: dtype = np.uint8 return tf.placeholder(shape=(batch_size,) + ob_space.shape, dtype=dtype, name=name)
def function[observation_placeholder, parameter[ob_space, batch_size, name]]: constant[ Create placeholder to feed observations into of the size appropriate to the observation space Parameters: ---------- ob_space: gym.Space observation space batch_size: int size of the batch to be fed into input. Can be left None in most cases. name: str name of the placeholder Returns: ------- tensorflow placeholder tensor ] assert[<ast.BoolOp object at 0x7da18ede4bb0>] variable[dtype] assign[=] name[ob_space].dtype if compare[name[dtype] equal[==] name[np].int8] begin[:] variable[dtype] assign[=] name[np].uint8 return[call[name[tf].placeholder, parameter[]]]
keyword[def] identifier[observation_placeholder] ( identifier[ob_space] , identifier[batch_size] = keyword[None] , identifier[name] = literal[string] ): literal[string] keyword[assert] identifier[isinstance] ( identifier[ob_space] , identifier[Discrete] ) keyword[or] identifier[isinstance] ( identifier[ob_space] , identifier[Box] ) keyword[or] identifier[isinstance] ( identifier[ob_space] , identifier[MultiDiscrete] ), literal[string] identifier[dtype] = identifier[ob_space] . identifier[dtype] keyword[if] identifier[dtype] == identifier[np] . identifier[int8] : identifier[dtype] = identifier[np] . identifier[uint8] keyword[return] identifier[tf] . identifier[placeholder] ( identifier[shape] =( identifier[batch_size] ,)+ identifier[ob_space] . identifier[shape] , identifier[dtype] = identifier[dtype] , identifier[name] = identifier[name] )
def observation_placeholder(ob_space, batch_size=None, name='Ob'): """ Create placeholder to feed observations into of the size appropriate to the observation space Parameters: ---------- ob_space: gym.Space observation space batch_size: int size of the batch to be fed into input. Can be left None in most cases. name: str name of the placeholder Returns: ------- tensorflow placeholder tensor """ assert isinstance(ob_space, Discrete) or isinstance(ob_space, Box) or isinstance(ob_space, MultiDiscrete), 'Can only deal with Discrete and Box observation spaces for now' dtype = ob_space.dtype if dtype == np.int8: dtype = np.uint8 # depends on [control=['if'], data=['dtype']] return tf.placeholder(shape=(batch_size,) + ob_space.shape, dtype=dtype, name=name)
def mk_auth_token(self, account, admin=False, duration=0): """ Builds an authentification token, using preauth mechanism. See http://wiki.zimbra.com/wiki/Preauth :param duration: in seconds defaults to 0, which means "use account default" :param account: an account object to be used as a selector :returns: the auth string """ domain = account.get_domain() try: preauth_key = self.get_domain(domain)['zimbraPreAuthKey'] except KeyError: raise DomainHasNoPreAuthKey(domain) timestamp = int(time.time())*1000 expires = duration*1000 return utils.build_preauth_str(preauth_key, account.name, timestamp, expires, admin)
def function[mk_auth_token, parameter[self, account, admin, duration]]: constant[ Builds an authentification token, using preauth mechanism. See http://wiki.zimbra.com/wiki/Preauth :param duration: in seconds defaults to 0, which means "use account default" :param account: an account object to be used as a selector :returns: the auth string ] variable[domain] assign[=] call[name[account].get_domain, parameter[]] <ast.Try object at 0x7da18dc05db0> variable[timestamp] assign[=] binary_operation[call[name[int], parameter[call[name[time].time, parameter[]]]] * constant[1000]] variable[expires] assign[=] binary_operation[name[duration] * constant[1000]] return[call[name[utils].build_preauth_str, parameter[name[preauth_key], name[account].name, name[timestamp], name[expires], name[admin]]]]
keyword[def] identifier[mk_auth_token] ( identifier[self] , identifier[account] , identifier[admin] = keyword[False] , identifier[duration] = literal[int] ): literal[string] identifier[domain] = identifier[account] . identifier[get_domain] () keyword[try] : identifier[preauth_key] = identifier[self] . identifier[get_domain] ( identifier[domain] )[ literal[string] ] keyword[except] identifier[KeyError] : keyword[raise] identifier[DomainHasNoPreAuthKey] ( identifier[domain] ) identifier[timestamp] = identifier[int] ( identifier[time] . identifier[time] ())* literal[int] identifier[expires] = identifier[duration] * literal[int] keyword[return] identifier[utils] . identifier[build_preauth_str] ( identifier[preauth_key] , identifier[account] . identifier[name] , identifier[timestamp] , identifier[expires] , identifier[admin] )
def mk_auth_token(self, account, admin=False, duration=0): """ Builds an authentification token, using preauth mechanism. See http://wiki.zimbra.com/wiki/Preauth :param duration: in seconds defaults to 0, which means "use account default" :param account: an account object to be used as a selector :returns: the auth string """ domain = account.get_domain() try: preauth_key = self.get_domain(domain)['zimbraPreAuthKey'] # depends on [control=['try'], data=[]] except KeyError: raise DomainHasNoPreAuthKey(domain) # depends on [control=['except'], data=[]] timestamp = int(time.time()) * 1000 expires = duration * 1000 return utils.build_preauth_str(preauth_key, account.name, timestamp, expires, admin)
def get_part_name(self, undefined=""): """ Args: undefined (optional): Argument, which will be returned if the `part_name` record is not found. Returns: str: Name of the part of the series. or `undefined` if `part_name`\ is not found. """ return _undefined_pattern( "".join(self.get_subfields("245", "n")), lambda x: x.strip() == "", undefined )
def function[get_part_name, parameter[self, undefined]]: constant[ Args: undefined (optional): Argument, which will be returned if the `part_name` record is not found. Returns: str: Name of the part of the series. or `undefined` if `part_name` is not found. ] return[call[name[_undefined_pattern], parameter[call[constant[].join, parameter[call[name[self].get_subfields, parameter[constant[245], constant[n]]]]], <ast.Lambda object at 0x7da1afef8190>, name[undefined]]]]
keyword[def] identifier[get_part_name] ( identifier[self] , identifier[undefined] = literal[string] ): literal[string] keyword[return] identifier[_undefined_pattern] ( literal[string] . identifier[join] ( identifier[self] . identifier[get_subfields] ( literal[string] , literal[string] )), keyword[lambda] identifier[x] : identifier[x] . identifier[strip] ()== literal[string] , identifier[undefined] )
def get_part_name(self, undefined=''): """ Args: undefined (optional): Argument, which will be returned if the `part_name` record is not found. Returns: str: Name of the part of the series. or `undefined` if `part_name` is not found. """ return _undefined_pattern(''.join(self.get_subfields('245', 'n')), lambda x: x.strip() == '', undefined)
def waitforcard(self): """Wait for card insertion and returns a card service.""" AbstractCardRequest.waitforcard(self) cardfound = False # for non infinite timeout, a timer will signal # the end of the time-out by setting the evt event evt = threading.Event() if INFINITE == self.timeout: timertimeout = 1 else: timertimeout = self.timeout timer = threading.Timer( timertimeout, signalEvent, [evt, INFINITE == self.timeout]) # create a dictionary entry for new readers readerstates = {} readernames = self.getReaderNames() for reader in readernames: if not reader in readerstates: readerstates[reader] = (reader, SCARD_STATE_UNAWARE) # remove dictionary entry for readers that disappeared for oldreader in list(readerstates.keys()): if oldreader not in readernames: del readerstates[oldreader] # call SCardGetStatusChange only if we have some readers if {} != readerstates: hresult, newstates = SCardGetStatusChange( self.hcontext, 0, list(readerstates.values())) else: hresult = 0 newstates = [] # we can expect normally time-outs or reader # disappearing just before the call # otherwise, raise execption on error if 0 != hresult and \ SCARD_E_TIMEOUT != hresult and \ SCARD_E_UNKNOWN_READER != hresult: raise CardRequestException( 'Failed to SCardGetStatusChange ' + \ SCardGetErrorMessage(hresult)) # in case of timeout or reader disappearing, # the content of the states is useless # in which case we clear the changed bit if SCARD_E_TIMEOUT == hresult or SCARD_E_UNKNOWN_READER == hresult: for state in newstates: state[1] = state[1] & (0xFFFFFFFF ^ SCARD_STATE_CHANGED) # update readerstate for state in newstates: readername, eventstate, atr = state readerstates[readername] = (readername, eventstate) # if a new card is not requested, just return the first available if not self.newcardonly: for state in newstates: readername, eventstate, atr = state if eventstate & SCARD_STATE_PRESENT: reader = PCSCReader(readername) if self.cardType.matches(atr, reader): if self.cardServiceClass.supports('dummy'): cardfound = True return self.cardServiceClass( reader.createConnection()) timerstarted = False while not evt.isSet() and not cardfound: if not timerstarted: timerstarted = True timer.start() time.sleep(self.pollinginterval) # create a dictionary entry for new readers readernames = self.getReaderNames() for reader in readernames: if not reader in readerstates: readerstates[reader] = (reader, SCARD_STATE_UNAWARE) # remove dictionary entry for readers that disappeared for oldreader in list(readerstates.keys()): if oldreader not in readernames: del readerstates[oldreader] # wait for card insertion if {} != readerstates: hresult, newstates = SCardGetStatusChange( self.hcontext, 0, list(readerstates.values())) else: hresult = SCARD_E_TIMEOUT newstates = [] # time-out if SCARD_E_TIMEOUT == hresult: if evt.isSet(): raise CardRequestTimeoutException() # reader vanished before or during the call elif SCARD_E_UNKNOWN_READER == hresult: pass # some error happened elif 0 != hresult: timer.cancel() raise CardRequestException( 'Failed to get status change ' + \ SCardGetErrorMessage(hresult)) # something changed! else: # check if we have to return a match, i.e. # if no new card in inserted and there is a card found # or if a new card is requested, and there is a change+present for state in newstates: readername, eventstate, atr = state r, oldstate = readerstates[readername] # the status can change on a card already inserted, e.g. # unpowered, in use, ... # if a new card is requested, clear the state changed bit # if the card was already inserted and is still inserted if self.newcardonly: if oldstate & SCARD_STATE_PRESENT and \ eventstate & \ (SCARD_STATE_CHANGED | SCARD_STATE_PRESENT): eventstate = eventstate & \ (0xFFFFFFFF ^ SCARD_STATE_CHANGED) if (self.newcardonly and \ eventstate & SCARD_STATE_PRESENT and \ eventstate & SCARD_STATE_CHANGED) or \ (not self.newcardonly and \ eventstate & SCARD_STATE_PRESENT): reader = PCSCReader(readername) if self.cardType.matches(atr, reader): if self.cardServiceClass.supports('dummy'): cardfound = True timer.cancel() return self.cardServiceClass( reader.createConnection()) # update state dictionary readerstates[readername] = (readername, eventstate) if evt.isSet(): raise CardRequestTimeoutException()
def function[waitforcard, parameter[self]]: constant[Wait for card insertion and returns a card service.] call[name[AbstractCardRequest].waitforcard, parameter[name[self]]] variable[cardfound] assign[=] constant[False] variable[evt] assign[=] call[name[threading].Event, parameter[]] if compare[name[INFINITE] equal[==] name[self].timeout] begin[:] variable[timertimeout] assign[=] constant[1] variable[timer] assign[=] call[name[threading].Timer, parameter[name[timertimeout], name[signalEvent], list[[<ast.Name object at 0x7da1b23d09a0>, <ast.Compare object at 0x7da1b23d0130>]]]] variable[readerstates] assign[=] dictionary[[], []] variable[readernames] assign[=] call[name[self].getReaderNames, parameter[]] for taget[name[reader]] in starred[name[readernames]] begin[:] if <ast.UnaryOp object at 0x7da1b23d0550> begin[:] call[name[readerstates]][name[reader]] assign[=] tuple[[<ast.Name object at 0x7da1b23d0430>, <ast.Name object at 0x7da1b23d1060>]] for taget[name[oldreader]] in starred[call[name[list], parameter[call[name[readerstates].keys, parameter[]]]]] begin[:] if compare[name[oldreader] <ast.NotIn object at 0x7da2590d7190> name[readernames]] begin[:] <ast.Delete object at 0x7da1b23d1390> if compare[dictionary[[], []] not_equal[!=] name[readerstates]] begin[:] <ast.Tuple object at 0x7da1b23f8220> assign[=] call[name[SCardGetStatusChange], parameter[name[self].hcontext, constant[0], call[name[list], parameter[call[name[readerstates].values, parameter[]]]]]] if <ast.BoolOp object at 0x7da1b244c1c0> begin[:] <ast.Raise object at 0x7da1b244d9f0> if <ast.BoolOp object at 0x7da1b244e500> begin[:] for taget[name[state]] in starred[name[newstates]] begin[:] call[name[state]][constant[1]] assign[=] binary_operation[call[name[state]][constant[1]] <ast.BitAnd object at 0x7da2590d6b60> binary_operation[constant[4294967295] <ast.BitXor object at 0x7da2590d6b00> name[SCARD_STATE_CHANGED]]] for taget[name[state]] in starred[name[newstates]] begin[:] <ast.Tuple object at 0x7da1b244ffd0> assign[=] name[state] call[name[readerstates]][name[readername]] assign[=] tuple[[<ast.Name object at 0x7da1b244eb60>, <ast.Name object at 0x7da1b244ed40>]] if <ast.UnaryOp object at 0x7da1b244fcd0> begin[:] for taget[name[state]] in starred[name[newstates]] begin[:] <ast.Tuple object at 0x7da1b244de70> assign[=] name[state] if binary_operation[name[eventstate] <ast.BitAnd object at 0x7da2590d6b60> name[SCARD_STATE_PRESENT]] begin[:] variable[reader] assign[=] call[name[PCSCReader], parameter[name[readername]]] if call[name[self].cardType.matches, parameter[name[atr], name[reader]]] begin[:] if call[name[self].cardServiceClass.supports, parameter[constant[dummy]]] begin[:] variable[cardfound] assign[=] constant[True] return[call[name[self].cardServiceClass, parameter[call[name[reader].createConnection, parameter[]]]]] variable[timerstarted] assign[=] constant[False] while <ast.BoolOp object at 0x7da1b244f2b0> begin[:] if <ast.UnaryOp object at 0x7da1b244ff10> begin[:] variable[timerstarted] assign[=] constant[True] call[name[timer].start, parameter[]] call[name[time].sleep, parameter[name[self].pollinginterval]] variable[readernames] assign[=] call[name[self].getReaderNames, parameter[]] for taget[name[reader]] in starred[name[readernames]] begin[:] if <ast.UnaryOp object at 0x7da1b244f520> begin[:] call[name[readerstates]][name[reader]] assign[=] tuple[[<ast.Name object at 0x7da1b244dcc0>, <ast.Name object at 0x7da1b244fe50>]] for taget[name[oldreader]] in starred[call[name[list], parameter[call[name[readerstates].keys, parameter[]]]]] begin[:] if compare[name[oldreader] <ast.NotIn object at 0x7da2590d7190> name[readernames]] begin[:] <ast.Delete object at 0x7da1b244ee00> if compare[dictionary[[], []] not_equal[!=] name[readerstates]] begin[:] <ast.Tuple object at 0x7da1b244f0d0> assign[=] call[name[SCardGetStatusChange], parameter[name[self].hcontext, constant[0], call[name[list], parameter[call[name[readerstates].values, parameter[]]]]]] if compare[name[SCARD_E_TIMEOUT] equal[==] name[hresult]] begin[:] if call[name[evt].isSet, parameter[]] begin[:] <ast.Raise object at 0x7da1b244d180> if call[name[evt].isSet, parameter[]] begin[:] <ast.Raise object at 0x7da1b2309e70>
keyword[def] identifier[waitforcard] ( identifier[self] ): literal[string] identifier[AbstractCardRequest] . identifier[waitforcard] ( identifier[self] ) identifier[cardfound] = keyword[False] identifier[evt] = identifier[threading] . identifier[Event] () keyword[if] identifier[INFINITE] == identifier[self] . identifier[timeout] : identifier[timertimeout] = literal[int] keyword[else] : identifier[timertimeout] = identifier[self] . identifier[timeout] identifier[timer] = identifier[threading] . identifier[Timer] ( identifier[timertimeout] , identifier[signalEvent] ,[ identifier[evt] , identifier[INFINITE] == identifier[self] . identifier[timeout] ]) identifier[readerstates] ={} identifier[readernames] = identifier[self] . identifier[getReaderNames] () keyword[for] identifier[reader] keyword[in] identifier[readernames] : keyword[if] keyword[not] identifier[reader] keyword[in] identifier[readerstates] : identifier[readerstates] [ identifier[reader] ]=( identifier[reader] , identifier[SCARD_STATE_UNAWARE] ) keyword[for] identifier[oldreader] keyword[in] identifier[list] ( identifier[readerstates] . identifier[keys] ()): keyword[if] identifier[oldreader] keyword[not] keyword[in] identifier[readernames] : keyword[del] identifier[readerstates] [ identifier[oldreader] ] keyword[if] {}!= identifier[readerstates] : identifier[hresult] , identifier[newstates] = identifier[SCardGetStatusChange] ( identifier[self] . identifier[hcontext] , literal[int] , identifier[list] ( identifier[readerstates] . identifier[values] ())) keyword[else] : identifier[hresult] = literal[int] identifier[newstates] =[] keyword[if] literal[int] != identifier[hresult] keyword[and] identifier[SCARD_E_TIMEOUT] != identifier[hresult] keyword[and] identifier[SCARD_E_UNKNOWN_READER] != identifier[hresult] : keyword[raise] identifier[CardRequestException] ( literal[string] + identifier[SCardGetErrorMessage] ( identifier[hresult] )) keyword[if] identifier[SCARD_E_TIMEOUT] == identifier[hresult] keyword[or] identifier[SCARD_E_UNKNOWN_READER] == identifier[hresult] : keyword[for] identifier[state] keyword[in] identifier[newstates] : identifier[state] [ literal[int] ]= identifier[state] [ literal[int] ]&( literal[int] ^ identifier[SCARD_STATE_CHANGED] ) keyword[for] identifier[state] keyword[in] identifier[newstates] : identifier[readername] , identifier[eventstate] , identifier[atr] = identifier[state] identifier[readerstates] [ identifier[readername] ]=( identifier[readername] , identifier[eventstate] ) keyword[if] keyword[not] identifier[self] . identifier[newcardonly] : keyword[for] identifier[state] keyword[in] identifier[newstates] : identifier[readername] , identifier[eventstate] , identifier[atr] = identifier[state] keyword[if] identifier[eventstate] & identifier[SCARD_STATE_PRESENT] : identifier[reader] = identifier[PCSCReader] ( identifier[readername] ) keyword[if] identifier[self] . identifier[cardType] . identifier[matches] ( identifier[atr] , identifier[reader] ): keyword[if] identifier[self] . identifier[cardServiceClass] . identifier[supports] ( literal[string] ): identifier[cardfound] = keyword[True] keyword[return] identifier[self] . identifier[cardServiceClass] ( identifier[reader] . identifier[createConnection] ()) identifier[timerstarted] = keyword[False] keyword[while] keyword[not] identifier[evt] . identifier[isSet] () keyword[and] keyword[not] identifier[cardfound] : keyword[if] keyword[not] identifier[timerstarted] : identifier[timerstarted] = keyword[True] identifier[timer] . identifier[start] () identifier[time] . identifier[sleep] ( identifier[self] . identifier[pollinginterval] ) identifier[readernames] = identifier[self] . identifier[getReaderNames] () keyword[for] identifier[reader] keyword[in] identifier[readernames] : keyword[if] keyword[not] identifier[reader] keyword[in] identifier[readerstates] : identifier[readerstates] [ identifier[reader] ]=( identifier[reader] , identifier[SCARD_STATE_UNAWARE] ) keyword[for] identifier[oldreader] keyword[in] identifier[list] ( identifier[readerstates] . identifier[keys] ()): keyword[if] identifier[oldreader] keyword[not] keyword[in] identifier[readernames] : keyword[del] identifier[readerstates] [ identifier[oldreader] ] keyword[if] {}!= identifier[readerstates] : identifier[hresult] , identifier[newstates] = identifier[SCardGetStatusChange] ( identifier[self] . identifier[hcontext] , literal[int] , identifier[list] ( identifier[readerstates] . identifier[values] ())) keyword[else] : identifier[hresult] = identifier[SCARD_E_TIMEOUT] identifier[newstates] =[] keyword[if] identifier[SCARD_E_TIMEOUT] == identifier[hresult] : keyword[if] identifier[evt] . identifier[isSet] (): keyword[raise] identifier[CardRequestTimeoutException] () keyword[elif] identifier[SCARD_E_UNKNOWN_READER] == identifier[hresult] : keyword[pass] keyword[elif] literal[int] != identifier[hresult] : identifier[timer] . identifier[cancel] () keyword[raise] identifier[CardRequestException] ( literal[string] + identifier[SCardGetErrorMessage] ( identifier[hresult] )) keyword[else] : keyword[for] identifier[state] keyword[in] identifier[newstates] : identifier[readername] , identifier[eventstate] , identifier[atr] = identifier[state] identifier[r] , identifier[oldstate] = identifier[readerstates] [ identifier[readername] ] keyword[if] identifier[self] . identifier[newcardonly] : keyword[if] identifier[oldstate] & identifier[SCARD_STATE_PRESENT] keyword[and] identifier[eventstate] &( identifier[SCARD_STATE_CHANGED] | identifier[SCARD_STATE_PRESENT] ): identifier[eventstate] = identifier[eventstate] &( literal[int] ^ identifier[SCARD_STATE_CHANGED] ) keyword[if] ( identifier[self] . identifier[newcardonly] keyword[and] identifier[eventstate] & identifier[SCARD_STATE_PRESENT] keyword[and] identifier[eventstate] & identifier[SCARD_STATE_CHANGED] ) keyword[or] ( keyword[not] identifier[self] . identifier[newcardonly] keyword[and] identifier[eventstate] & identifier[SCARD_STATE_PRESENT] ): identifier[reader] = identifier[PCSCReader] ( identifier[readername] ) keyword[if] identifier[self] . identifier[cardType] . identifier[matches] ( identifier[atr] , identifier[reader] ): keyword[if] identifier[self] . identifier[cardServiceClass] . identifier[supports] ( literal[string] ): identifier[cardfound] = keyword[True] identifier[timer] . identifier[cancel] () keyword[return] identifier[self] . identifier[cardServiceClass] ( identifier[reader] . identifier[createConnection] ()) identifier[readerstates] [ identifier[readername] ]=( identifier[readername] , identifier[eventstate] ) keyword[if] identifier[evt] . identifier[isSet] (): keyword[raise] identifier[CardRequestTimeoutException] ()
def waitforcard(self): """Wait for card insertion and returns a card service.""" AbstractCardRequest.waitforcard(self) cardfound = False # for non infinite timeout, a timer will signal # the end of the time-out by setting the evt event evt = threading.Event() if INFINITE == self.timeout: timertimeout = 1 # depends on [control=['if'], data=[]] else: timertimeout = self.timeout timer = threading.Timer(timertimeout, signalEvent, [evt, INFINITE == self.timeout]) # create a dictionary entry for new readers readerstates = {} readernames = self.getReaderNames() for reader in readernames: if not reader in readerstates: readerstates[reader] = (reader, SCARD_STATE_UNAWARE) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['reader']] # remove dictionary entry for readers that disappeared for oldreader in list(readerstates.keys()): if oldreader not in readernames: del readerstates[oldreader] # depends on [control=['if'], data=['oldreader']] # depends on [control=['for'], data=['oldreader']] # call SCardGetStatusChange only if we have some readers if {} != readerstates: (hresult, newstates) = SCardGetStatusChange(self.hcontext, 0, list(readerstates.values())) # depends on [control=['if'], data=['readerstates']] else: hresult = 0 newstates = [] # we can expect normally time-outs or reader # disappearing just before the call # otherwise, raise execption on error if 0 != hresult and SCARD_E_TIMEOUT != hresult and (SCARD_E_UNKNOWN_READER != hresult): raise CardRequestException('Failed to SCardGetStatusChange ' + SCardGetErrorMessage(hresult)) # depends on [control=['if'], data=[]] # in case of timeout or reader disappearing, # the content of the states is useless # in which case we clear the changed bit if SCARD_E_TIMEOUT == hresult or SCARD_E_UNKNOWN_READER == hresult: for state in newstates: state[1] = state[1] & (4294967295 ^ SCARD_STATE_CHANGED) # depends on [control=['for'], data=['state']] # depends on [control=['if'], data=[]] # update readerstate for state in newstates: (readername, eventstate, atr) = state readerstates[readername] = (readername, eventstate) # depends on [control=['for'], data=['state']] # if a new card is not requested, just return the first available if not self.newcardonly: for state in newstates: (readername, eventstate, atr) = state if eventstate & SCARD_STATE_PRESENT: reader = PCSCReader(readername) if self.cardType.matches(atr, reader): if self.cardServiceClass.supports('dummy'): cardfound = True return self.cardServiceClass(reader.createConnection()) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['state']] # depends on [control=['if'], data=[]] timerstarted = False while not evt.isSet() and (not cardfound): if not timerstarted: timerstarted = True timer.start() # depends on [control=['if'], data=[]] time.sleep(self.pollinginterval) # create a dictionary entry for new readers readernames = self.getReaderNames() for reader in readernames: if not reader in readerstates: readerstates[reader] = (reader, SCARD_STATE_UNAWARE) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['reader']] # remove dictionary entry for readers that disappeared for oldreader in list(readerstates.keys()): if oldreader not in readernames: del readerstates[oldreader] # depends on [control=['if'], data=['oldreader']] # depends on [control=['for'], data=['oldreader']] # wait for card insertion if {} != readerstates: (hresult, newstates) = SCardGetStatusChange(self.hcontext, 0, list(readerstates.values())) # depends on [control=['if'], data=['readerstates']] else: hresult = SCARD_E_TIMEOUT newstates = [] # time-out if SCARD_E_TIMEOUT == hresult: if evt.isSet(): raise CardRequestTimeoutException() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # reader vanished before or during the call elif SCARD_E_UNKNOWN_READER == hresult: pass # depends on [control=['if'], data=[]] # some error happened elif 0 != hresult: timer.cancel() raise CardRequestException('Failed to get status change ' + SCardGetErrorMessage(hresult)) # depends on [control=['if'], data=['hresult']] else: # something changed! # check if we have to return a match, i.e. # if no new card in inserted and there is a card found # or if a new card is requested, and there is a change+present for state in newstates: (readername, eventstate, atr) = state (r, oldstate) = readerstates[readername] # the status can change on a card already inserted, e.g. # unpowered, in use, ... # if a new card is requested, clear the state changed bit # if the card was already inserted and is still inserted if self.newcardonly: if oldstate & SCARD_STATE_PRESENT and eventstate & (SCARD_STATE_CHANGED | SCARD_STATE_PRESENT): eventstate = eventstate & (4294967295 ^ SCARD_STATE_CHANGED) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] if self.newcardonly and eventstate & SCARD_STATE_PRESENT and eventstate & SCARD_STATE_CHANGED or (not self.newcardonly and eventstate & SCARD_STATE_PRESENT): reader = PCSCReader(readername) if self.cardType.matches(atr, reader): if self.cardServiceClass.supports('dummy'): cardfound = True timer.cancel() return self.cardServiceClass(reader.createConnection()) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # update state dictionary readerstates[readername] = (readername, eventstate) # depends on [control=['for'], data=['state']] if evt.isSet(): raise CardRequestTimeoutException() # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]]
def show_lb(kwargs=None, call=None): ''' Show the details of an existing load-balancer. CLI Example: .. code-block:: bash salt-cloud -f show_lb gce name=lb ''' if call != 'function': raise SaltCloudSystemExit( 'The show_lb function must be called with -f or --function.' ) if not kwargs or 'name' not in kwargs: log.error( 'Must specify name of load-balancer.' ) return False lb_conn = get_lb_conn(get_conn()) return _expand_balancer(lb_conn.get_balancer(kwargs['name']))
def function[show_lb, parameter[kwargs, call]]: constant[ Show the details of an existing load-balancer. CLI Example: .. code-block:: bash salt-cloud -f show_lb gce name=lb ] if compare[name[call] not_equal[!=] constant[function]] begin[:] <ast.Raise object at 0x7da20c7c8070> if <ast.BoolOp object at 0x7da20c7ca6e0> begin[:] call[name[log].error, parameter[constant[Must specify name of load-balancer.]]] return[constant[False]] variable[lb_conn] assign[=] call[name[get_lb_conn], parameter[call[name[get_conn], parameter[]]]] return[call[name[_expand_balancer], parameter[call[name[lb_conn].get_balancer, parameter[call[name[kwargs]][constant[name]]]]]]]
keyword[def] identifier[show_lb] ( identifier[kwargs] = keyword[None] , identifier[call] = keyword[None] ): literal[string] keyword[if] identifier[call] != literal[string] : keyword[raise] identifier[SaltCloudSystemExit] ( literal[string] ) keyword[if] keyword[not] identifier[kwargs] keyword[or] literal[string] keyword[not] keyword[in] identifier[kwargs] : identifier[log] . identifier[error] ( literal[string] ) keyword[return] keyword[False] identifier[lb_conn] = identifier[get_lb_conn] ( identifier[get_conn] ()) keyword[return] identifier[_expand_balancer] ( identifier[lb_conn] . identifier[get_balancer] ( identifier[kwargs] [ literal[string] ]))
def show_lb(kwargs=None, call=None): """ Show the details of an existing load-balancer. CLI Example: .. code-block:: bash salt-cloud -f show_lb gce name=lb """ if call != 'function': raise SaltCloudSystemExit('The show_lb function must be called with -f or --function.') # depends on [control=['if'], data=[]] if not kwargs or 'name' not in kwargs: log.error('Must specify name of load-balancer.') return False # depends on [control=['if'], data=[]] lb_conn = get_lb_conn(get_conn()) return _expand_balancer(lb_conn.get_balancer(kwargs['name']))
def setup_exchange(self, exchange_name): """Setup the exchange on RabbitMQ by invoking the Exchange.Declare RPC command. When it is complete, the on_exchange_declareok method will be invoked by pika. :param str|unicode exchange_name: The name of the exchange to declare """ logger.info('Declaring exchange', name=exchange_name) self._channel.exchange_declare(self.on_exchange_declareok, exchange_name, self._exchange_type)
def function[setup_exchange, parameter[self, exchange_name]]: constant[Setup the exchange on RabbitMQ by invoking the Exchange.Declare RPC command. When it is complete, the on_exchange_declareok method will be invoked by pika. :param str|unicode exchange_name: The name of the exchange to declare ] call[name[logger].info, parameter[constant[Declaring exchange]]] call[name[self]._channel.exchange_declare, parameter[name[self].on_exchange_declareok, name[exchange_name], name[self]._exchange_type]]
keyword[def] identifier[setup_exchange] ( identifier[self] , identifier[exchange_name] ): literal[string] identifier[logger] . identifier[info] ( literal[string] , identifier[name] = identifier[exchange_name] ) identifier[self] . identifier[_channel] . identifier[exchange_declare] ( identifier[self] . identifier[on_exchange_declareok] , identifier[exchange_name] , identifier[self] . identifier[_exchange_type] )
def setup_exchange(self, exchange_name): """Setup the exchange on RabbitMQ by invoking the Exchange.Declare RPC command. When it is complete, the on_exchange_declareok method will be invoked by pika. :param str|unicode exchange_name: The name of the exchange to declare """ logger.info('Declaring exchange', name=exchange_name) self._channel.exchange_declare(self.on_exchange_declareok, exchange_name, self._exchange_type)
def depth(self, local: bool = True) -> int: """Return the circuit depth. Args: local: If True include local one-qubit gates in depth calculation. Else return the multi-qubit gate depth. """ G = self.graph if not local: def remove_local(dagc: DAGCircuit) \ -> Generator[Operation, None, None]: for elem in dagc: if dagc.graph.degree[elem] > 2: yield elem G = DAGCircuit(remove_local(self)).graph return nx.dag_longest_path_length(G) - 1
def function[depth, parameter[self, local]]: constant[Return the circuit depth. Args: local: If True include local one-qubit gates in depth calculation. Else return the multi-qubit gate depth. ] variable[G] assign[=] name[self].graph if <ast.UnaryOp object at 0x7da20c6c62c0> begin[:] def function[remove_local, parameter[dagc]]: for taget[name[elem]] in starred[name[dagc]] begin[:] if compare[call[name[dagc].graph.degree][name[elem]] greater[>] constant[2]] begin[:] <ast.Yield object at 0x7da20c6c6110> variable[G] assign[=] call[name[DAGCircuit], parameter[call[name[remove_local], parameter[name[self]]]]].graph return[binary_operation[call[name[nx].dag_longest_path_length, parameter[name[G]]] - constant[1]]]
keyword[def] identifier[depth] ( identifier[self] , identifier[local] : identifier[bool] = keyword[True] )-> identifier[int] : literal[string] identifier[G] = identifier[self] . identifier[graph] keyword[if] keyword[not] identifier[local] : keyword[def] identifier[remove_local] ( identifier[dagc] : identifier[DAGCircuit] )-> identifier[Generator] [ identifier[Operation] , keyword[None] , keyword[None] ]: keyword[for] identifier[elem] keyword[in] identifier[dagc] : keyword[if] identifier[dagc] . identifier[graph] . identifier[degree] [ identifier[elem] ]> literal[int] : keyword[yield] identifier[elem] identifier[G] = identifier[DAGCircuit] ( identifier[remove_local] ( identifier[self] )). identifier[graph] keyword[return] identifier[nx] . identifier[dag_longest_path_length] ( identifier[G] )- literal[int]
def depth(self, local: bool=True) -> int: """Return the circuit depth. Args: local: If True include local one-qubit gates in depth calculation. Else return the multi-qubit gate depth. """ G = self.graph if not local: def remove_local(dagc: DAGCircuit) -> Generator[Operation, None, None]: for elem in dagc: if dagc.graph.degree[elem] > 2: yield elem # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['elem']] G = DAGCircuit(remove_local(self)).graph # depends on [control=['if'], data=[]] return nx.dag_longest_path_length(G) - 1
def send_velocity_world_setpoint(self, vx, vy, vz, yawrate): """ Send Velocity in the world frame of reference setpoint. vx, vy, vz are in m/s yawrate is in degrees/s """ pk = CRTPPacket() pk.port = CRTPPort.COMMANDER_GENERIC pk.data = struct.pack('<Bffff', TYPE_VELOCITY_WORLD, vx, vy, vz, yawrate) self._cf.send_packet(pk)
def function[send_velocity_world_setpoint, parameter[self, vx, vy, vz, yawrate]]: constant[ Send Velocity in the world frame of reference setpoint. vx, vy, vz are in m/s yawrate is in degrees/s ] variable[pk] assign[=] call[name[CRTPPacket], parameter[]] name[pk].port assign[=] name[CRTPPort].COMMANDER_GENERIC name[pk].data assign[=] call[name[struct].pack, parameter[constant[<Bffff], name[TYPE_VELOCITY_WORLD], name[vx], name[vy], name[vz], name[yawrate]]] call[name[self]._cf.send_packet, parameter[name[pk]]]
keyword[def] identifier[send_velocity_world_setpoint] ( identifier[self] , identifier[vx] , identifier[vy] , identifier[vz] , identifier[yawrate] ): literal[string] identifier[pk] = identifier[CRTPPacket] () identifier[pk] . identifier[port] = identifier[CRTPPort] . identifier[COMMANDER_GENERIC] identifier[pk] . identifier[data] = identifier[struct] . identifier[pack] ( literal[string] , identifier[TYPE_VELOCITY_WORLD] , identifier[vx] , identifier[vy] , identifier[vz] , identifier[yawrate] ) identifier[self] . identifier[_cf] . identifier[send_packet] ( identifier[pk] )
def send_velocity_world_setpoint(self, vx, vy, vz, yawrate): """ Send Velocity in the world frame of reference setpoint. vx, vy, vz are in m/s yawrate is in degrees/s """ pk = CRTPPacket() pk.port = CRTPPort.COMMANDER_GENERIC pk.data = struct.pack('<Bffff', TYPE_VELOCITY_WORLD, vx, vy, vz, yawrate) self._cf.send_packet(pk)
def write_record(self, warc_record): """Adds a warc record to this WARC file. """ warc_record.write_to(self.fileobj) # Each warc record is written as separate member in the gzip file # so that each record can be read independetly. if isinstance(self.fileobj, gzip2.GzipFile): self.fileobj.close_member()
def function[write_record, parameter[self, warc_record]]: constant[Adds a warc record to this WARC file. ] call[name[warc_record].write_to, parameter[name[self].fileobj]] if call[name[isinstance], parameter[name[self].fileobj, name[gzip2].GzipFile]] begin[:] call[name[self].fileobj.close_member, parameter[]]
keyword[def] identifier[write_record] ( identifier[self] , identifier[warc_record] ): literal[string] identifier[warc_record] . identifier[write_to] ( identifier[self] . identifier[fileobj] ) keyword[if] identifier[isinstance] ( identifier[self] . identifier[fileobj] , identifier[gzip2] . identifier[GzipFile] ): identifier[self] . identifier[fileobj] . identifier[close_member] ()
def write_record(self, warc_record): """Adds a warc record to this WARC file. """ warc_record.write_to(self.fileobj) # Each warc record is written as separate member in the gzip file # so that each record can be read independetly. if isinstance(self.fileobj, gzip2.GzipFile): self.fileobj.close_member() # depends on [control=['if'], data=[]]
def _onCompletionListItemSelected(self, index): """Item selected. Insert completion to editor """ model = self._widget.model() selectedWord = model.words[index] textToInsert = selectedWord[len(model.typedText()):] self._qpart.textCursor().insertText(textToInsert) self._closeCompletion()
def function[_onCompletionListItemSelected, parameter[self, index]]: constant[Item selected. Insert completion to editor ] variable[model] assign[=] call[name[self]._widget.model, parameter[]] variable[selectedWord] assign[=] call[name[model].words][name[index]] variable[textToInsert] assign[=] call[name[selectedWord]][<ast.Slice object at 0x7da207f9bdf0>] call[call[name[self]._qpart.textCursor, parameter[]].insertText, parameter[name[textToInsert]]] call[name[self]._closeCompletion, parameter[]]
keyword[def] identifier[_onCompletionListItemSelected] ( identifier[self] , identifier[index] ): literal[string] identifier[model] = identifier[self] . identifier[_widget] . identifier[model] () identifier[selectedWord] = identifier[model] . identifier[words] [ identifier[index] ] identifier[textToInsert] = identifier[selectedWord] [ identifier[len] ( identifier[model] . identifier[typedText] ()):] identifier[self] . identifier[_qpart] . identifier[textCursor] (). identifier[insertText] ( identifier[textToInsert] ) identifier[self] . identifier[_closeCompletion] ()
def _onCompletionListItemSelected(self, index): """Item selected. Insert completion to editor """ model = self._widget.model() selectedWord = model.words[index] textToInsert = selectedWord[len(model.typedText()):] self._qpart.textCursor().insertText(textToInsert) self._closeCompletion()
def _supported_types_for_metadata(metadata): """Returns the types we have metadata for based on the PhoneMetadata object passed in, which must be non-None.""" numtypes = set() for numtype in PhoneNumberType.values(): if numtype in (PhoneNumberType.FIXED_LINE_OR_MOBILE, PhoneNumberType.UNKNOWN): # Never return FIXED_LINE_OR_MOBILE (it is a convenience type, and represents that a # particular number type can't be determined) or UNKNOWN (the non-type). continue if _desc_has_data(_number_desc_by_type(metadata, numtype)): numtypes.add(numtype) return numtypes
def function[_supported_types_for_metadata, parameter[metadata]]: constant[Returns the types we have metadata for based on the PhoneMetadata object passed in, which must be non-None.] variable[numtypes] assign[=] call[name[set], parameter[]] for taget[name[numtype]] in starred[call[name[PhoneNumberType].values, parameter[]]] begin[:] if compare[name[numtype] in tuple[[<ast.Attribute object at 0x7da1b1951090>, <ast.Attribute object at 0x7da1b1950e20>]]] begin[:] continue if call[name[_desc_has_data], parameter[call[name[_number_desc_by_type], parameter[name[metadata], name[numtype]]]]] begin[:] call[name[numtypes].add, parameter[name[numtype]]] return[name[numtypes]]
keyword[def] identifier[_supported_types_for_metadata] ( identifier[metadata] ): literal[string] identifier[numtypes] = identifier[set] () keyword[for] identifier[numtype] keyword[in] identifier[PhoneNumberType] . identifier[values] (): keyword[if] identifier[numtype] keyword[in] ( identifier[PhoneNumberType] . identifier[FIXED_LINE_OR_MOBILE] , identifier[PhoneNumberType] . identifier[UNKNOWN] ): keyword[continue] keyword[if] identifier[_desc_has_data] ( identifier[_number_desc_by_type] ( identifier[metadata] , identifier[numtype] )): identifier[numtypes] . identifier[add] ( identifier[numtype] ) keyword[return] identifier[numtypes]
def _supported_types_for_metadata(metadata): """Returns the types we have metadata for based on the PhoneMetadata object passed in, which must be non-None.""" numtypes = set() for numtype in PhoneNumberType.values(): if numtype in (PhoneNumberType.FIXED_LINE_OR_MOBILE, PhoneNumberType.UNKNOWN): # Never return FIXED_LINE_OR_MOBILE (it is a convenience type, and represents that a # particular number type can't be determined) or UNKNOWN (the non-type). continue # depends on [control=['if'], data=[]] if _desc_has_data(_number_desc_by_type(metadata, numtype)): numtypes.add(numtype) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['numtype']] return numtypes
def _sample_aAt(self,n): """Sampling frequencies, angles, and times part of sampling""" #Sample frequency along largest eigenvalue using ARS dO1s=\ bovy_ars.bovy_ars([0.,0.],[True,False], [self._meandO-numpy.sqrt(self._sortedSigOEig[2]), self._meandO+numpy.sqrt(self._sortedSigOEig[2])], _h_ars,_hp_ars,nsamples=n, hxparams=(self._meandO,self._sortedSigOEig[2]), maxn=100) dO1s= numpy.array(dO1s)*self._sigMeanSign dO2s= numpy.random.normal(size=n)*numpy.sqrt(self._sortedSigOEig[1]) dO3s= numpy.random.normal(size=n)*numpy.sqrt(self._sortedSigOEig[0]) #Rotate into dOs in R,phi,z coordinates dO= numpy.vstack((dO3s,dO2s,dO1s)) dO= numpy.dot(self._sigomatrixEig[1][:,self._sigomatrixEigsortIndx], dO) Om= dO+numpy.tile(self._progenitor_Omega.T,(n,1)).T #Also generate angles da= numpy.random.normal(size=(3,n))*self._sigangle #And a random time dt= self.sample_t(n) #Integrate the orbits relative to the progenitor da+= dO*numpy.tile(dt,(3,1)) angle= da+numpy.tile(self._progenitor_angle.T,(n,1)).T return (Om,angle,dt)
def function[_sample_aAt, parameter[self, n]]: constant[Sampling frequencies, angles, and times part of sampling] variable[dO1s] assign[=] call[name[bovy_ars].bovy_ars, parameter[list[[<ast.Constant object at 0x7da1b0c45450>, <ast.Constant object at 0x7da1b0c44d00>]], list[[<ast.Constant object at 0x7da1b0c46530>, <ast.Constant object at 0x7da1b0c45bd0>]], list[[<ast.BinOp object at 0x7da1b0c46e30>, <ast.BinOp object at 0x7da1b0c47ac0>]], name[_h_ars], name[_hp_ars]]] variable[dO1s] assign[=] binary_operation[call[name[numpy].array, parameter[name[dO1s]]] * name[self]._sigMeanSign] variable[dO2s] assign[=] binary_operation[call[name[numpy].random.normal, parameter[]] * call[name[numpy].sqrt, parameter[call[name[self]._sortedSigOEig][constant[1]]]]] variable[dO3s] assign[=] binary_operation[call[name[numpy].random.normal, parameter[]] * call[name[numpy].sqrt, parameter[call[name[self]._sortedSigOEig][constant[0]]]]] variable[dO] assign[=] call[name[numpy].vstack, parameter[tuple[[<ast.Name object at 0x7da1b0c45cf0>, <ast.Name object at 0x7da1b0c47f10>, <ast.Name object at 0x7da1b0c44730>]]]] variable[dO] assign[=] call[name[numpy].dot, parameter[call[call[name[self]._sigomatrixEig][constant[1]]][tuple[[<ast.Slice object at 0x7da1b0c44070>, <ast.Attribute object at 0x7da1b0c44dc0>]]], name[dO]]] variable[Om] assign[=] binary_operation[name[dO] + call[name[numpy].tile, parameter[name[self]._progenitor_Omega.T, tuple[[<ast.Name object at 0x7da1b0c452d0>, <ast.Constant object at 0x7da1b0c46f20>]]]].T] variable[da] assign[=] binary_operation[call[name[numpy].random.normal, parameter[]] * name[self]._sigangle] variable[dt] assign[=] call[name[self].sample_t, parameter[name[n]]] <ast.AugAssign object at 0x7da1b0c44a00> variable[angle] assign[=] binary_operation[name[da] + call[name[numpy].tile, parameter[name[self]._progenitor_angle.T, tuple[[<ast.Name object at 0x7da1b0c44eb0>, <ast.Constant object at 0x7da1b0c45960>]]]].T] return[tuple[[<ast.Name object at 0x7da1b0c44fa0>, <ast.Name object at 0x7da1b0c46a40>, <ast.Name object at 0x7da1b0c45f90>]]]
keyword[def] identifier[_sample_aAt] ( identifier[self] , identifier[n] ): literal[string] identifier[dO1s] = identifier[bovy_ars] . identifier[bovy_ars] ([ literal[int] , literal[int] ],[ keyword[True] , keyword[False] ], [ identifier[self] . identifier[_meandO] - identifier[numpy] . identifier[sqrt] ( identifier[self] . identifier[_sortedSigOEig] [ literal[int] ]), identifier[self] . identifier[_meandO] + identifier[numpy] . identifier[sqrt] ( identifier[self] . identifier[_sortedSigOEig] [ literal[int] ])], identifier[_h_ars] , identifier[_hp_ars] , identifier[nsamples] = identifier[n] , identifier[hxparams] =( identifier[self] . identifier[_meandO] , identifier[self] . identifier[_sortedSigOEig] [ literal[int] ]), identifier[maxn] = literal[int] ) identifier[dO1s] = identifier[numpy] . identifier[array] ( identifier[dO1s] )* identifier[self] . identifier[_sigMeanSign] identifier[dO2s] = identifier[numpy] . identifier[random] . identifier[normal] ( identifier[size] = identifier[n] )* identifier[numpy] . identifier[sqrt] ( identifier[self] . identifier[_sortedSigOEig] [ literal[int] ]) identifier[dO3s] = identifier[numpy] . identifier[random] . identifier[normal] ( identifier[size] = identifier[n] )* identifier[numpy] . identifier[sqrt] ( identifier[self] . identifier[_sortedSigOEig] [ literal[int] ]) identifier[dO] = identifier[numpy] . identifier[vstack] (( identifier[dO3s] , identifier[dO2s] , identifier[dO1s] )) identifier[dO] = identifier[numpy] . identifier[dot] ( identifier[self] . identifier[_sigomatrixEig] [ literal[int] ][:, identifier[self] . identifier[_sigomatrixEigsortIndx] ], identifier[dO] ) identifier[Om] = identifier[dO] + identifier[numpy] . identifier[tile] ( identifier[self] . identifier[_progenitor_Omega] . identifier[T] ,( identifier[n] , literal[int] )). identifier[T] identifier[da] = identifier[numpy] . identifier[random] . identifier[normal] ( identifier[size] =( literal[int] , identifier[n] ))* identifier[self] . identifier[_sigangle] identifier[dt] = identifier[self] . identifier[sample_t] ( identifier[n] ) identifier[da] += identifier[dO] * identifier[numpy] . identifier[tile] ( identifier[dt] ,( literal[int] , literal[int] )) identifier[angle] = identifier[da] + identifier[numpy] . identifier[tile] ( identifier[self] . identifier[_progenitor_angle] . identifier[T] ,( identifier[n] , literal[int] )). identifier[T] keyword[return] ( identifier[Om] , identifier[angle] , identifier[dt] )
def _sample_aAt(self, n): """Sampling frequencies, angles, and times part of sampling""" #Sample frequency along largest eigenvalue using ARS dO1s = bovy_ars.bovy_ars([0.0, 0.0], [True, False], [self._meandO - numpy.sqrt(self._sortedSigOEig[2]), self._meandO + numpy.sqrt(self._sortedSigOEig[2])], _h_ars, _hp_ars, nsamples=n, hxparams=(self._meandO, self._sortedSigOEig[2]), maxn=100) dO1s = numpy.array(dO1s) * self._sigMeanSign dO2s = numpy.random.normal(size=n) * numpy.sqrt(self._sortedSigOEig[1]) dO3s = numpy.random.normal(size=n) * numpy.sqrt(self._sortedSigOEig[0]) #Rotate into dOs in R,phi,z coordinates dO = numpy.vstack((dO3s, dO2s, dO1s)) dO = numpy.dot(self._sigomatrixEig[1][:, self._sigomatrixEigsortIndx], dO) Om = dO + numpy.tile(self._progenitor_Omega.T, (n, 1)).T #Also generate angles da = numpy.random.normal(size=(3, n)) * self._sigangle #And a random time dt = self.sample_t(n) #Integrate the orbits relative to the progenitor da += dO * numpy.tile(dt, (3, 1)) angle = da + numpy.tile(self._progenitor_angle.T, (n, 1)).T return (Om, angle, dt)
def html(self, url, timeout=None): """High level method to get http request response in text. smartly handle the encoding problem. """ response = self.get_response(url, timeout=timeout) if response: domain = self.get_domain(url) if domain in self.domain_encoding_map: # domain have been visited try: # apply extreme decoding html = self.decoder.decode(response.content, self.domain_encoding_map[domain])[0] return html except Exception as e: print(e) return None else: # never visit this domain try: html, encoding = self.decoder.autodecode(response.content) # save chardet analysis result self.domain_encoding_map[domain] = encoding return html except Exception as e: print(e) return None else: return None
def function[html, parameter[self, url, timeout]]: constant[High level method to get http request response in text. smartly handle the encoding problem. ] variable[response] assign[=] call[name[self].get_response, parameter[name[url]]] if name[response] begin[:] variable[domain] assign[=] call[name[self].get_domain, parameter[name[url]]] if compare[name[domain] in name[self].domain_encoding_map] begin[:] <ast.Try object at 0x7da18bc71ff0>
keyword[def] identifier[html] ( identifier[self] , identifier[url] , identifier[timeout] = keyword[None] ): literal[string] identifier[response] = identifier[self] . identifier[get_response] ( identifier[url] , identifier[timeout] = identifier[timeout] ) keyword[if] identifier[response] : identifier[domain] = identifier[self] . identifier[get_domain] ( identifier[url] ) keyword[if] identifier[domain] keyword[in] identifier[self] . identifier[domain_encoding_map] : keyword[try] : identifier[html] = identifier[self] . identifier[decoder] . identifier[decode] ( identifier[response] . identifier[content] , identifier[self] . identifier[domain_encoding_map] [ identifier[domain] ])[ literal[int] ] keyword[return] identifier[html] keyword[except] identifier[Exception] keyword[as] identifier[e] : identifier[print] ( identifier[e] ) keyword[return] keyword[None] keyword[else] : keyword[try] : identifier[html] , identifier[encoding] = identifier[self] . identifier[decoder] . identifier[autodecode] ( identifier[response] . identifier[content] ) identifier[self] . identifier[domain_encoding_map] [ identifier[domain] ]= identifier[encoding] keyword[return] identifier[html] keyword[except] identifier[Exception] keyword[as] identifier[e] : identifier[print] ( identifier[e] ) keyword[return] keyword[None] keyword[else] : keyword[return] keyword[None]
def html(self, url, timeout=None): """High level method to get http request response in text. smartly handle the encoding problem. """ response = self.get_response(url, timeout=timeout) if response: domain = self.get_domain(url) if domain in self.domain_encoding_map: # domain have been visited try: # apply extreme decoding html = self.decoder.decode(response.content, self.domain_encoding_map[domain])[0] return html # depends on [control=['try'], data=[]] except Exception as e: print(e) return None # depends on [control=['except'], data=['e']] # depends on [control=['if'], data=['domain']] else: # never visit this domain try: (html, encoding) = self.decoder.autodecode(response.content) # save chardet analysis result self.domain_encoding_map[domain] = encoding return html # depends on [control=['try'], data=[]] except Exception as e: print(e) return None # depends on [control=['except'], data=['e']] # depends on [control=['if'], data=[]] else: return None
def send(self, name, sender=None, **kwargs): """ Sends the signal. Return every function response\ that was hooked to hook-name as a list: [(func, response), ] :param str name: The hook name :param class sender: Optional sender __class__ to which\ registered callback should match (see :py:func:`.connect` method) :return: Signal responses as a sequence of tuples (func, response) :rtype: list """ try: signal = self._registry[name] except KeyError: return [] return signal.send(sender=sender, **kwargs)
def function[send, parameter[self, name, sender]]: constant[ Sends the signal. Return every function response that was hooked to hook-name as a list: [(func, response), ] :param str name: The hook name :param class sender: Optional sender __class__ to which registered callback should match (see :py:func:`.connect` method) :return: Signal responses as a sequence of tuples (func, response) :rtype: list ] <ast.Try object at 0x7da1b25d3010> return[call[name[signal].send, parameter[]]]
keyword[def] identifier[send] ( identifier[self] , identifier[name] , identifier[sender] = keyword[None] ,** identifier[kwargs] ): literal[string] keyword[try] : identifier[signal] = identifier[self] . identifier[_registry] [ identifier[name] ] keyword[except] identifier[KeyError] : keyword[return] [] keyword[return] identifier[signal] . identifier[send] ( identifier[sender] = identifier[sender] ,** identifier[kwargs] )
def send(self, name, sender=None, **kwargs): """ Sends the signal. Return every function response that was hooked to hook-name as a list: [(func, response), ] :param str name: The hook name :param class sender: Optional sender __class__ to which registered callback should match (see :py:func:`.connect` method) :return: Signal responses as a sequence of tuples (func, response) :rtype: list """ try: signal = self._registry[name] # depends on [control=['try'], data=[]] except KeyError: return [] # depends on [control=['except'], data=[]] return signal.send(sender=sender, **kwargs)
def verify(self, data, signature=None, keyrings=None, homedir=None): ''' `data` <string> the data to verify. `signature` <string> The signature, if detached from the data. `keyrings` <list of string> Additional keyrings to search in. `homedir` <string> Override the configured homedir. ''' if isinstance(data, six.text_type): data = data.encode('utf-8') tmpdir = tempfile.mkdtemp() data_file, data_path = tempfile.mkstemp(dir=tmpdir) data_file = os.fdopen(data_file, 'wb') data_file.write(data) data_file.close() if signature: sig_file, sig_path = tempfile.mkstemp(dir=tmpdir) sig_file = os.fdopen(sig_file, 'wb') sig_file.write(signature) sig_file.close() else: sig_path = None try: return self.verify_from_file( data_path, sig_path=sig_path, keyrings=keyrings, homedir=homedir ) finally: shutil.rmtree(tmpdir)
def function[verify, parameter[self, data, signature, keyrings, homedir]]: constant[ `data` <string> the data to verify. `signature` <string> The signature, if detached from the data. `keyrings` <list of string> Additional keyrings to search in. `homedir` <string> Override the configured homedir. ] if call[name[isinstance], parameter[name[data], name[six].text_type]] begin[:] variable[data] assign[=] call[name[data].encode, parameter[constant[utf-8]]] variable[tmpdir] assign[=] call[name[tempfile].mkdtemp, parameter[]] <ast.Tuple object at 0x7da18f09fdf0> assign[=] call[name[tempfile].mkstemp, parameter[]] variable[data_file] assign[=] call[name[os].fdopen, parameter[name[data_file], constant[wb]]] call[name[data_file].write, parameter[name[data]]] call[name[data_file].close, parameter[]] if name[signature] begin[:] <ast.Tuple object at 0x7da18f09e770> assign[=] call[name[tempfile].mkstemp, parameter[]] variable[sig_file] assign[=] call[name[os].fdopen, parameter[name[sig_file], constant[wb]]] call[name[sig_file].write, parameter[name[signature]]] call[name[sig_file].close, parameter[]] <ast.Try object at 0x7da1b0698580>
keyword[def] identifier[verify] ( identifier[self] , identifier[data] , identifier[signature] = keyword[None] , identifier[keyrings] = keyword[None] , identifier[homedir] = keyword[None] ): literal[string] keyword[if] identifier[isinstance] ( identifier[data] , identifier[six] . identifier[text_type] ): identifier[data] = identifier[data] . identifier[encode] ( literal[string] ) identifier[tmpdir] = identifier[tempfile] . identifier[mkdtemp] () identifier[data_file] , identifier[data_path] = identifier[tempfile] . identifier[mkstemp] ( identifier[dir] = identifier[tmpdir] ) identifier[data_file] = identifier[os] . identifier[fdopen] ( identifier[data_file] , literal[string] ) identifier[data_file] . identifier[write] ( identifier[data] ) identifier[data_file] . identifier[close] () keyword[if] identifier[signature] : identifier[sig_file] , identifier[sig_path] = identifier[tempfile] . identifier[mkstemp] ( identifier[dir] = identifier[tmpdir] ) identifier[sig_file] = identifier[os] . identifier[fdopen] ( identifier[sig_file] , literal[string] ) identifier[sig_file] . identifier[write] ( identifier[signature] ) identifier[sig_file] . identifier[close] () keyword[else] : identifier[sig_path] = keyword[None] keyword[try] : keyword[return] identifier[self] . identifier[verify_from_file] ( identifier[data_path] , identifier[sig_path] = identifier[sig_path] , identifier[keyrings] = identifier[keyrings] , identifier[homedir] = identifier[homedir] ) keyword[finally] : identifier[shutil] . identifier[rmtree] ( identifier[tmpdir] )
def verify(self, data, signature=None, keyrings=None, homedir=None): """ `data` <string> the data to verify. `signature` <string> The signature, if detached from the data. `keyrings` <list of string> Additional keyrings to search in. `homedir` <string> Override the configured homedir. """ if isinstance(data, six.text_type): data = data.encode('utf-8') # depends on [control=['if'], data=[]] tmpdir = tempfile.mkdtemp() (data_file, data_path) = tempfile.mkstemp(dir=tmpdir) data_file = os.fdopen(data_file, 'wb') data_file.write(data) data_file.close() if signature: (sig_file, sig_path) = tempfile.mkstemp(dir=tmpdir) sig_file = os.fdopen(sig_file, 'wb') sig_file.write(signature) sig_file.close() # depends on [control=['if'], data=[]] else: sig_path = None try: return self.verify_from_file(data_path, sig_path=sig_path, keyrings=keyrings, homedir=homedir) # depends on [control=['try'], data=[]] finally: shutil.rmtree(tmpdir)
def get(self, uid: int) -> FrozenSet[Flag]: """Return the session flags for the mailbox session. Args: uid: The message UID value. """ recent = _recent_set if uid in self._recent else frozenset() flags = self._flags.get(uid) return recent if flags is None else (flags | recent)
def function[get, parameter[self, uid]]: constant[Return the session flags for the mailbox session. Args: uid: The message UID value. ] variable[recent] assign[=] <ast.IfExp object at 0x7da18f58ee00> variable[flags] assign[=] call[name[self]._flags.get, parameter[name[uid]]] return[<ast.IfExp object at 0x7da18f58d480>]
keyword[def] identifier[get] ( identifier[self] , identifier[uid] : identifier[int] )-> identifier[FrozenSet] [ identifier[Flag] ]: literal[string] identifier[recent] = identifier[_recent_set] keyword[if] identifier[uid] keyword[in] identifier[self] . identifier[_recent] keyword[else] identifier[frozenset] () identifier[flags] = identifier[self] . identifier[_flags] . identifier[get] ( identifier[uid] ) keyword[return] identifier[recent] keyword[if] identifier[flags] keyword[is] keyword[None] keyword[else] ( identifier[flags] | identifier[recent] )
def get(self, uid: int) -> FrozenSet[Flag]: """Return the session flags for the mailbox session. Args: uid: The message UID value. """ recent = _recent_set if uid in self._recent else frozenset() flags = self._flags.get(uid) return recent if flags is None else flags | recent
def update_workspace_config(namespace, workspace, cnamespace, configname, body): """Update method configuration in workspace. Args: namespace (str): project to which workspace belongs workspace (str): Workspace name cnamespace (str): Configuration namespace configname (str): Configuration name body (json): new body (definition) of the method config Swagger: https://api.firecloud.org/#!/Method_Configurations/updateWorkspaceMethodConfig """ uri = "workspaces/{0}/{1}/method_configs/{2}/{3}".format(namespace, workspace, cnamespace, configname) return __post(uri, json=body)
def function[update_workspace_config, parameter[namespace, workspace, cnamespace, configname, body]]: constant[Update method configuration in workspace. Args: namespace (str): project to which workspace belongs workspace (str): Workspace name cnamespace (str): Configuration namespace configname (str): Configuration name body (json): new body (definition) of the method config Swagger: https://api.firecloud.org/#!/Method_Configurations/updateWorkspaceMethodConfig ] variable[uri] assign[=] call[constant[workspaces/{0}/{1}/method_configs/{2}/{3}].format, parameter[name[namespace], name[workspace], name[cnamespace], name[configname]]] return[call[name[__post], parameter[name[uri]]]]
keyword[def] identifier[update_workspace_config] ( identifier[namespace] , identifier[workspace] , identifier[cnamespace] , identifier[configname] , identifier[body] ): literal[string] identifier[uri] = literal[string] . identifier[format] ( identifier[namespace] , identifier[workspace] , identifier[cnamespace] , identifier[configname] ) keyword[return] identifier[__post] ( identifier[uri] , identifier[json] = identifier[body] )
def update_workspace_config(namespace, workspace, cnamespace, configname, body): """Update method configuration in workspace. Args: namespace (str): project to which workspace belongs workspace (str): Workspace name cnamespace (str): Configuration namespace configname (str): Configuration name body (json): new body (definition) of the method config Swagger: https://api.firecloud.org/#!/Method_Configurations/updateWorkspaceMethodConfig """ uri = 'workspaces/{0}/{1}/method_configs/{2}/{3}'.format(namespace, workspace, cnamespace, configname) return __post(uri, json=body)
def send_msg(from_addr: str, to_addrs: Union[str, List[str]], host: str, user: str, password: str, port: int = None, use_tls: bool = True, msg: email.mime.multipart.MIMEMultipart = None, msg_string: str = None) -> None: """ Sends a pre-built e-mail message. Args: from_addr: e-mail address for 'From:' field to_addrs: address or list of addresses to transmit to host: mail server host user: username on mail server password: password for username on mail server port: port to use, or ``None`` for protocol default use_tls: use TLS, rather than plain SMTP? msg: a :class:`email.mime.multipart.MIMEMultipart` msg_string: alternative: specify the message as a raw string Raises: :exc:`RuntimeError` See also: - https://tools.ietf.org/html/rfc3207 """ assert bool(msg) != bool(msg_string), "Specify either msg or msg_string" # Connect try: session = smtplib.SMTP(host, port) except smtplib.SMTPException as e: raise RuntimeError( "send_msg: Failed to connect to host {}, port {}: {}".format( host, port, e)) try: session.ehlo() except smtplib.SMTPException as e: raise RuntimeError("send_msg: Failed to issue EHLO: {}".format(e)) if use_tls: try: session.starttls() session.ehlo() except smtplib.SMTPException as e: raise RuntimeError( "send_msg: Failed to initiate TLS: {}".format(e)) # Log in if user: try: session.login(user, password) except smtplib.SMTPException as e: raise RuntimeError( "send_msg: Failed to login as user {}: {}".format(user, e)) else: log.debug("Not using SMTP AUTH; no user specified") # For systems with... lax... security requirements # Send try: session.sendmail(from_addr, to_addrs, msg.as_string()) except smtplib.SMTPException as e: raise RuntimeError("send_msg: Failed to send e-mail: {}".format(e)) # Log out session.quit()
def function[send_msg, parameter[from_addr, to_addrs, host, user, password, port, use_tls, msg, msg_string]]: constant[ Sends a pre-built e-mail message. Args: from_addr: e-mail address for 'From:' field to_addrs: address or list of addresses to transmit to host: mail server host user: username on mail server password: password for username on mail server port: port to use, or ``None`` for protocol default use_tls: use TLS, rather than plain SMTP? msg: a :class:`email.mime.multipart.MIMEMultipart` msg_string: alternative: specify the message as a raw string Raises: :exc:`RuntimeError` See also: - https://tools.ietf.org/html/rfc3207 ] assert[compare[call[name[bool], parameter[name[msg]]] not_equal[!=] call[name[bool], parameter[name[msg_string]]]]] <ast.Try object at 0x7da1b190f610> <ast.Try object at 0x7da1b190f2e0> if name[use_tls] begin[:] <ast.Try object at 0x7da1b190d7e0> if name[user] begin[:] <ast.Try object at 0x7da1b190d0c0> <ast.Try object at 0x7da1b190c8e0> call[name[session].quit, parameter[]]
keyword[def] identifier[send_msg] ( identifier[from_addr] : identifier[str] , identifier[to_addrs] : identifier[Union] [ identifier[str] , identifier[List] [ identifier[str] ]], identifier[host] : identifier[str] , identifier[user] : identifier[str] , identifier[password] : identifier[str] , identifier[port] : identifier[int] = keyword[None] , identifier[use_tls] : identifier[bool] = keyword[True] , identifier[msg] : identifier[email] . identifier[mime] . identifier[multipart] . identifier[MIMEMultipart] = keyword[None] , identifier[msg_string] : identifier[str] = keyword[None] )-> keyword[None] : literal[string] keyword[assert] identifier[bool] ( identifier[msg] )!= identifier[bool] ( identifier[msg_string] ), literal[string] keyword[try] : identifier[session] = identifier[smtplib] . identifier[SMTP] ( identifier[host] , identifier[port] ) keyword[except] identifier[smtplib] . identifier[SMTPException] keyword[as] identifier[e] : keyword[raise] identifier[RuntimeError] ( literal[string] . identifier[format] ( identifier[host] , identifier[port] , identifier[e] )) keyword[try] : identifier[session] . identifier[ehlo] () keyword[except] identifier[smtplib] . identifier[SMTPException] keyword[as] identifier[e] : keyword[raise] identifier[RuntimeError] ( literal[string] . identifier[format] ( identifier[e] )) keyword[if] identifier[use_tls] : keyword[try] : identifier[session] . identifier[starttls] () identifier[session] . identifier[ehlo] () keyword[except] identifier[smtplib] . identifier[SMTPException] keyword[as] identifier[e] : keyword[raise] identifier[RuntimeError] ( literal[string] . identifier[format] ( identifier[e] )) keyword[if] identifier[user] : keyword[try] : identifier[session] . identifier[login] ( identifier[user] , identifier[password] ) keyword[except] identifier[smtplib] . identifier[SMTPException] keyword[as] identifier[e] : keyword[raise] identifier[RuntimeError] ( literal[string] . identifier[format] ( identifier[user] , identifier[e] )) keyword[else] : identifier[log] . identifier[debug] ( literal[string] ) keyword[try] : identifier[session] . identifier[sendmail] ( identifier[from_addr] , identifier[to_addrs] , identifier[msg] . identifier[as_string] ()) keyword[except] identifier[smtplib] . identifier[SMTPException] keyword[as] identifier[e] : keyword[raise] identifier[RuntimeError] ( literal[string] . identifier[format] ( identifier[e] )) identifier[session] . identifier[quit] ()
def send_msg(from_addr: str, to_addrs: Union[str, List[str]], host: str, user: str, password: str, port: int=None, use_tls: bool=True, msg: email.mime.multipart.MIMEMultipart=None, msg_string: str=None) -> None: """ Sends a pre-built e-mail message. Args: from_addr: e-mail address for 'From:' field to_addrs: address or list of addresses to transmit to host: mail server host user: username on mail server password: password for username on mail server port: port to use, or ``None`` for protocol default use_tls: use TLS, rather than plain SMTP? msg: a :class:`email.mime.multipart.MIMEMultipart` msg_string: alternative: specify the message as a raw string Raises: :exc:`RuntimeError` See also: - https://tools.ietf.org/html/rfc3207 """ assert bool(msg) != bool(msg_string), 'Specify either msg or msg_string' # Connect try: session = smtplib.SMTP(host, port) # depends on [control=['try'], data=[]] except smtplib.SMTPException as e: raise RuntimeError('send_msg: Failed to connect to host {}, port {}: {}'.format(host, port, e)) # depends on [control=['except'], data=['e']] try: session.ehlo() # depends on [control=['try'], data=[]] except smtplib.SMTPException as e: raise RuntimeError('send_msg: Failed to issue EHLO: {}'.format(e)) # depends on [control=['except'], data=['e']] if use_tls: try: session.starttls() session.ehlo() # depends on [control=['try'], data=[]] except smtplib.SMTPException as e: raise RuntimeError('send_msg: Failed to initiate TLS: {}'.format(e)) # depends on [control=['except'], data=['e']] # depends on [control=['if'], data=[]] # Log in if user: try: session.login(user, password) # depends on [control=['try'], data=[]] except smtplib.SMTPException as e: raise RuntimeError('send_msg: Failed to login as user {}: {}'.format(user, e)) # depends on [control=['except'], data=['e']] # depends on [control=['if'], data=[]] else: log.debug('Not using SMTP AUTH; no user specified') # For systems with... lax... security requirements # Send try: session.sendmail(from_addr, to_addrs, msg.as_string()) # depends on [control=['try'], data=[]] except smtplib.SMTPException as e: raise RuntimeError('send_msg: Failed to send e-mail: {}'.format(e)) # depends on [control=['except'], data=['e']] # Log out session.quit()
def printStatus(self): """Dumps different debug info about cluster to default logger""" status = self.getStatus() for k, v in iteritems(status): logging.info('%s: %s' % (str(k), str(v)))
def function[printStatus, parameter[self]]: constant[Dumps different debug info about cluster to default logger] variable[status] assign[=] call[name[self].getStatus, parameter[]] for taget[tuple[[<ast.Name object at 0x7da20c6c5f60>, <ast.Name object at 0x7da20c6c7490>]]] in starred[call[name[iteritems], parameter[name[status]]]] begin[:] call[name[logging].info, parameter[binary_operation[constant[%s: %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da20c6c6ec0>, <ast.Call object at 0x7da1b027dcc0>]]]]]
keyword[def] identifier[printStatus] ( identifier[self] ): literal[string] identifier[status] = identifier[self] . identifier[getStatus] () keyword[for] identifier[k] , identifier[v] keyword[in] identifier[iteritems] ( identifier[status] ): identifier[logging] . identifier[info] ( literal[string] %( identifier[str] ( identifier[k] ), identifier[str] ( identifier[v] )))
def printStatus(self): """Dumps different debug info about cluster to default logger""" status = self.getStatus() for (k, v) in iteritems(status): logging.info('%s: %s' % (str(k), str(v))) # depends on [control=['for'], data=[]]
def make_http_credentials(username=None, password=None): """Build auth part for api_url.""" credentials = '' if username is None: return credentials if username is not None: if ':' in username: return credentials credentials += username if credentials and password is not None: credentials += ":%s" % password return "%s@" % credentials
def function[make_http_credentials, parameter[username, password]]: constant[Build auth part for api_url.] variable[credentials] assign[=] constant[] if compare[name[username] is constant[None]] begin[:] return[name[credentials]] if compare[name[username] is_not constant[None]] begin[:] if compare[constant[:] in name[username]] begin[:] return[name[credentials]] <ast.AugAssign object at 0x7da1b07adff0> if <ast.BoolOp object at 0x7da1b07ac970> begin[:] <ast.AugAssign object at 0x7da1b07accd0> return[binary_operation[constant[%s@] <ast.Mod object at 0x7da2590d6920> name[credentials]]]
keyword[def] identifier[make_http_credentials] ( identifier[username] = keyword[None] , identifier[password] = keyword[None] ): literal[string] identifier[credentials] = literal[string] keyword[if] identifier[username] keyword[is] keyword[None] : keyword[return] identifier[credentials] keyword[if] identifier[username] keyword[is] keyword[not] keyword[None] : keyword[if] literal[string] keyword[in] identifier[username] : keyword[return] identifier[credentials] identifier[credentials] += identifier[username] keyword[if] identifier[credentials] keyword[and] identifier[password] keyword[is] keyword[not] keyword[None] : identifier[credentials] += literal[string] % identifier[password] keyword[return] literal[string] % identifier[credentials]
def make_http_credentials(username=None, password=None): """Build auth part for api_url.""" credentials = '' if username is None: return credentials # depends on [control=['if'], data=[]] if username is not None: if ':' in username: return credentials # depends on [control=['if'], data=[]] credentials += username # depends on [control=['if'], data=['username']] if credentials and password is not None: credentials += ':%s' % password # depends on [control=['if'], data=[]] return '%s@' % credentials
def on_error(e): # pragma: no cover """Error handler RuntimeError or ValueError exceptions raised by commands will be handled by this function. """ exname = {'RuntimeError': 'Runtime error', 'Value Error': 'Value error'} sys.stderr.write('{}: {}\n'.format(exname[e.__class__.__name__], str(e))) sys.stderr.write('See file slam_error.log for additional details.\n') sys.exit(1)
def function[on_error, parameter[e]]: constant[Error handler RuntimeError or ValueError exceptions raised by commands will be handled by this function. ] variable[exname] assign[=] dictionary[[<ast.Constant object at 0x7da20c993fa0>, <ast.Constant object at 0x7da20c9905e0>], [<ast.Constant object at 0x7da20c990ca0>, <ast.Constant object at 0x7da18f09f040>]] call[name[sys].stderr.write, parameter[call[constant[{}: {} ].format, parameter[call[name[exname]][name[e].__class__.__name__], call[name[str], parameter[name[e]]]]]]] call[name[sys].stderr.write, parameter[constant[See file slam_error.log for additional details. ]]] call[name[sys].exit, parameter[constant[1]]]
keyword[def] identifier[on_error] ( identifier[e] ): literal[string] identifier[exname] ={ literal[string] : literal[string] , literal[string] : literal[string] } identifier[sys] . identifier[stderr] . identifier[write] ( literal[string] . identifier[format] ( identifier[exname] [ identifier[e] . identifier[__class__] . identifier[__name__] ], identifier[str] ( identifier[e] ))) identifier[sys] . identifier[stderr] . identifier[write] ( literal[string] ) identifier[sys] . identifier[exit] ( literal[int] )
def on_error(e): # pragma: no cover 'Error handler\n\n RuntimeError or ValueError exceptions raised by commands will be handled\n by this function.\n ' exname = {'RuntimeError': 'Runtime error', 'Value Error': 'Value error'} sys.stderr.write('{}: {}\n'.format(exname[e.__class__.__name__], str(e))) sys.stderr.write('See file slam_error.log for additional details.\n') sys.exit(1)
def to_type(f, type_map=CONSTRUCT_CODE): """ Format a the proper type. """ name = f.type_id if name.startswith('GPSTime'): name = 'Gps' + name[3:] if type_map.get(name, None): return type_map.get(name, None) elif name == 'array': fill = f.options['fill'].value f_ = copy.copy(f) f_.type_id = fill return "[%s]" % to_type(f_) return name
def function[to_type, parameter[f, type_map]]: constant[ Format a the proper type. ] variable[name] assign[=] name[f].type_id if call[name[name].startswith, parameter[constant[GPSTime]]] begin[:] variable[name] assign[=] binary_operation[constant[Gps] + call[name[name]][<ast.Slice object at 0x7da1b056c1f0>]] if call[name[type_map].get, parameter[name[name], constant[None]]] begin[:] return[call[name[type_map].get, parameter[name[name], constant[None]]]] return[name[name]]
keyword[def] identifier[to_type] ( identifier[f] , identifier[type_map] = identifier[CONSTRUCT_CODE] ): literal[string] identifier[name] = identifier[f] . identifier[type_id] keyword[if] identifier[name] . identifier[startswith] ( literal[string] ): identifier[name] = literal[string] + identifier[name] [ literal[int] :] keyword[if] identifier[type_map] . identifier[get] ( identifier[name] , keyword[None] ): keyword[return] identifier[type_map] . identifier[get] ( identifier[name] , keyword[None] ) keyword[elif] identifier[name] == literal[string] : identifier[fill] = identifier[f] . identifier[options] [ literal[string] ]. identifier[value] identifier[f_] = identifier[copy] . identifier[copy] ( identifier[f] ) identifier[f_] . identifier[type_id] = identifier[fill] keyword[return] literal[string] % identifier[to_type] ( identifier[f_] ) keyword[return] identifier[name]
def to_type(f, type_map=CONSTRUCT_CODE): """ Format a the proper type. """ name = f.type_id if name.startswith('GPSTime'): name = 'Gps' + name[3:] # depends on [control=['if'], data=[]] if type_map.get(name, None): return type_map.get(name, None) # depends on [control=['if'], data=[]] elif name == 'array': fill = f.options['fill'].value f_ = copy.copy(f) f_.type_id = fill return '[%s]' % to_type(f_) # depends on [control=['if'], data=[]] return name
def get_runtime_value(self, ihcid: int): """ Get runtime value with re-authenticate if needed""" if self.client.get_runtime_value(ihcid): return True self.re_authenticate() return self.client.get_runtime_value(ihcid)
def function[get_runtime_value, parameter[self, ihcid]]: constant[ Get runtime value with re-authenticate if needed] if call[name[self].client.get_runtime_value, parameter[name[ihcid]]] begin[:] return[constant[True]] call[name[self].re_authenticate, parameter[]] return[call[name[self].client.get_runtime_value, parameter[name[ihcid]]]]
keyword[def] identifier[get_runtime_value] ( identifier[self] , identifier[ihcid] : identifier[int] ): literal[string] keyword[if] identifier[self] . identifier[client] . identifier[get_runtime_value] ( identifier[ihcid] ): keyword[return] keyword[True] identifier[self] . identifier[re_authenticate] () keyword[return] identifier[self] . identifier[client] . identifier[get_runtime_value] ( identifier[ihcid] )
def get_runtime_value(self, ihcid: int): """ Get runtime value with re-authenticate if needed""" if self.client.get_runtime_value(ihcid): return True # depends on [control=['if'], data=[]] self.re_authenticate() return self.client.get_runtime_value(ihcid)
def add_class(self, c: 'Class') -> None: """ Add a (sub)class to this class. :param c: (Sub)class to add. """ self.classes[c.name] = c c.parent = self
def function[add_class, parameter[self, c]]: constant[ Add a (sub)class to this class. :param c: (Sub)class to add. ] call[name[self].classes][name[c].name] assign[=] name[c] name[c].parent assign[=] name[self]
keyword[def] identifier[add_class] ( identifier[self] , identifier[c] : literal[string] )-> keyword[None] : literal[string] identifier[self] . identifier[classes] [ identifier[c] . identifier[name] ]= identifier[c] identifier[c] . identifier[parent] = identifier[self]
def add_class(self, c: 'Class') -> None: """ Add a (sub)class to this class. :param c: (Sub)class to add. """ self.classes[c.name] = c c.parent = self
def max_dim(elements, coordinates): """ Return the maximum diameter of a molecule. Parameters ---------- elements : numpy.ndarray An array of all elements (type: str) in a molecule. coordinates : numpy.ndarray An array containing molecule's coordinates. Returns ------- """ atom_vdw_vertical = np.matrix( [[atomic_vdw_radius[i.upper()]] for i in elements]) atom_vdw_horizontal = np.matrix( [atomic_vdw_radius[i.upper()] for i in elements]) dist_matrix = euclidean_distances(coordinates, coordinates) vdw_matrix = atom_vdw_vertical + atom_vdw_horizontal re_dist_matrix = dist_matrix + vdw_matrix final_matrix = np.triu(re_dist_matrix) i1, i2 = np.unravel_index(final_matrix.argmax(), final_matrix.shape) maxdim = final_matrix[i1, i2] return i1, i2, maxdim
def function[max_dim, parameter[elements, coordinates]]: constant[ Return the maximum diameter of a molecule. Parameters ---------- elements : numpy.ndarray An array of all elements (type: str) in a molecule. coordinates : numpy.ndarray An array containing molecule's coordinates. Returns ------- ] variable[atom_vdw_vertical] assign[=] call[name[np].matrix, parameter[<ast.ListComp object at 0x7da20e9b3340>]] variable[atom_vdw_horizontal] assign[=] call[name[np].matrix, parameter[<ast.ListComp object at 0x7da20e9b0f40>]] variable[dist_matrix] assign[=] call[name[euclidean_distances], parameter[name[coordinates], name[coordinates]]] variable[vdw_matrix] assign[=] binary_operation[name[atom_vdw_vertical] + name[atom_vdw_horizontal]] variable[re_dist_matrix] assign[=] binary_operation[name[dist_matrix] + name[vdw_matrix]] variable[final_matrix] assign[=] call[name[np].triu, parameter[name[re_dist_matrix]]] <ast.Tuple object at 0x7da2047e8c10> assign[=] call[name[np].unravel_index, parameter[call[name[final_matrix].argmax, parameter[]], name[final_matrix].shape]] variable[maxdim] assign[=] call[name[final_matrix]][tuple[[<ast.Name object at 0x7da2047ea560>, <ast.Name object at 0x7da2047e86d0>]]] return[tuple[[<ast.Name object at 0x7da2047ebe20>, <ast.Name object at 0x7da2047eb4c0>, <ast.Name object at 0x7da2047e9ae0>]]]
keyword[def] identifier[max_dim] ( identifier[elements] , identifier[coordinates] ): literal[string] identifier[atom_vdw_vertical] = identifier[np] . identifier[matrix] ( [[ identifier[atomic_vdw_radius] [ identifier[i] . identifier[upper] ()]] keyword[for] identifier[i] keyword[in] identifier[elements] ]) identifier[atom_vdw_horizontal] = identifier[np] . identifier[matrix] ( [ identifier[atomic_vdw_radius] [ identifier[i] . identifier[upper] ()] keyword[for] identifier[i] keyword[in] identifier[elements] ]) identifier[dist_matrix] = identifier[euclidean_distances] ( identifier[coordinates] , identifier[coordinates] ) identifier[vdw_matrix] = identifier[atom_vdw_vertical] + identifier[atom_vdw_horizontal] identifier[re_dist_matrix] = identifier[dist_matrix] + identifier[vdw_matrix] identifier[final_matrix] = identifier[np] . identifier[triu] ( identifier[re_dist_matrix] ) identifier[i1] , identifier[i2] = identifier[np] . identifier[unravel_index] ( identifier[final_matrix] . identifier[argmax] (), identifier[final_matrix] . identifier[shape] ) identifier[maxdim] = identifier[final_matrix] [ identifier[i1] , identifier[i2] ] keyword[return] identifier[i1] , identifier[i2] , identifier[maxdim]
def max_dim(elements, coordinates): """ Return the maximum diameter of a molecule. Parameters ---------- elements : numpy.ndarray An array of all elements (type: str) in a molecule. coordinates : numpy.ndarray An array containing molecule's coordinates. Returns ------- """ atom_vdw_vertical = np.matrix([[atomic_vdw_radius[i.upper()]] for i in elements]) atom_vdw_horizontal = np.matrix([atomic_vdw_radius[i.upper()] for i in elements]) dist_matrix = euclidean_distances(coordinates, coordinates) vdw_matrix = atom_vdw_vertical + atom_vdw_horizontal re_dist_matrix = dist_matrix + vdw_matrix final_matrix = np.triu(re_dist_matrix) (i1, i2) = np.unravel_index(final_matrix.argmax(), final_matrix.shape) maxdim = final_matrix[i1, i2] return (i1, i2, maxdim)
def _dev_by_id(self, device_type): """! Get a dict, USBID -> device, for a device class @param device_type The type of devices to search. For exmaple, "serial" looks for all serial devices connected to this computer @return A dict: Device USBID -> device file in /dev """ dir = os.path.join("/dev", device_type, "by-id") if os.path.isdir(dir): to_ret = dict( self._hex_ids([os.path.join(dir, f) for f in os.listdir(dir)]) ) return to_ret else: logger.error( "Could not get %s devices by id. " "This could be because your Linux distribution " "does not use udev, or does not create /dev/%s/by-id " "symlinks. Please submit an issue to github.com/" "armmbed/mbed-ls.", device_type, device_type, ) return {}
def function[_dev_by_id, parameter[self, device_type]]: constant[! Get a dict, USBID -> device, for a device class @param device_type The type of devices to search. For exmaple, "serial" looks for all serial devices connected to this computer @return A dict: Device USBID -> device file in /dev ] variable[dir] assign[=] call[name[os].path.join, parameter[constant[/dev], name[device_type], constant[by-id]]] if call[name[os].path.isdir, parameter[name[dir]]] begin[:] variable[to_ret] assign[=] call[name[dict], parameter[call[name[self]._hex_ids, parameter[<ast.ListComp object at 0x7da1b188c6a0>]]]] return[name[to_ret]]
keyword[def] identifier[_dev_by_id] ( identifier[self] , identifier[device_type] ): literal[string] identifier[dir] = identifier[os] . identifier[path] . identifier[join] ( literal[string] , identifier[device_type] , literal[string] ) keyword[if] identifier[os] . identifier[path] . identifier[isdir] ( identifier[dir] ): identifier[to_ret] = identifier[dict] ( identifier[self] . identifier[_hex_ids] ([ identifier[os] . identifier[path] . identifier[join] ( identifier[dir] , identifier[f] ) keyword[for] identifier[f] keyword[in] identifier[os] . identifier[listdir] ( identifier[dir] )]) ) keyword[return] identifier[to_ret] keyword[else] : identifier[logger] . identifier[error] ( literal[string] literal[string] literal[string] literal[string] literal[string] , identifier[device_type] , identifier[device_type] , ) keyword[return] {}
def _dev_by_id(self, device_type): """! Get a dict, USBID -> device, for a device class @param device_type The type of devices to search. For exmaple, "serial" looks for all serial devices connected to this computer @return A dict: Device USBID -> device file in /dev """ dir = os.path.join('/dev', device_type, 'by-id') if os.path.isdir(dir): to_ret = dict(self._hex_ids([os.path.join(dir, f) for f in os.listdir(dir)])) return to_ret # depends on [control=['if'], data=[]] else: logger.error('Could not get %s devices by id. This could be because your Linux distribution does not use udev, or does not create /dev/%s/by-id symlinks. Please submit an issue to github.com/armmbed/mbed-ls.', device_type, device_type) return {}
def stsci(hdulist): """For STScI GEIS files, need to do extra steps.""" instrument = hdulist[0].header.get('INSTRUME', '') # Update extension header keywords if instrument in ("WFPC2", "FOC"): rootname = hdulist[0].header.get('ROOTNAME', '') filetype = hdulist[0].header.get('FILETYPE', '') for i in range(1, len(hdulist)): # Add name and extver attributes to match PyFITS data structure hdulist[i].name = filetype hdulist[i]._extver = i # Add extension keywords for this chip to extension hdulist[i].header['EXPNAME'] = (rootname, "9 character exposure identifier") hdulist[i].header['EXTVER']= (i, "extension version number") hdulist[i].header['EXTNAME'] = (filetype, "extension name") hdulist[i].header['INHERIT'] = (True, "inherit the primary header") hdulist[i].header['ROOTNAME'] = (rootname, "rootname of the observation set")
def function[stsci, parameter[hdulist]]: constant[For STScI GEIS files, need to do extra steps.] variable[instrument] assign[=] call[call[name[hdulist]][constant[0]].header.get, parameter[constant[INSTRUME], constant[]]] if compare[name[instrument] in tuple[[<ast.Constant object at 0x7da18f810100>, <ast.Constant object at 0x7da18f812260>]]] begin[:] variable[rootname] assign[=] call[call[name[hdulist]][constant[0]].header.get, parameter[constant[ROOTNAME], constant[]]] variable[filetype] assign[=] call[call[name[hdulist]][constant[0]].header.get, parameter[constant[FILETYPE], constant[]]] for taget[name[i]] in starred[call[name[range], parameter[constant[1], call[name[len], parameter[name[hdulist]]]]]] begin[:] call[name[hdulist]][name[i]].name assign[=] name[filetype] call[name[hdulist]][name[i]]._extver assign[=] name[i] call[call[name[hdulist]][name[i]].header][constant[EXPNAME]] assign[=] tuple[[<ast.Name object at 0x7da18f811f30>, <ast.Constant object at 0x7da18f812ce0>]] call[call[name[hdulist]][name[i]].header][constant[EXTVER]] assign[=] tuple[[<ast.Name object at 0x7da18f811e40>, <ast.Constant object at 0x7da18f8103a0>]] call[call[name[hdulist]][name[i]].header][constant[EXTNAME]] assign[=] tuple[[<ast.Name object at 0x7da18f8124d0>, <ast.Constant object at 0x7da18f812d10>]] call[call[name[hdulist]][name[i]].header][constant[INHERIT]] assign[=] tuple[[<ast.Constant object at 0x7da18f811030>, <ast.Constant object at 0x7da18f812d70>]] call[call[name[hdulist]][name[i]].header][constant[ROOTNAME]] assign[=] tuple[[<ast.Name object at 0x7da18f811f00>, <ast.Constant object at 0x7da18f8110f0>]]
keyword[def] identifier[stsci] ( identifier[hdulist] ): literal[string] identifier[instrument] = identifier[hdulist] [ literal[int] ]. identifier[header] . identifier[get] ( literal[string] , literal[string] ) keyword[if] identifier[instrument] keyword[in] ( literal[string] , literal[string] ): identifier[rootname] = identifier[hdulist] [ literal[int] ]. identifier[header] . identifier[get] ( literal[string] , literal[string] ) identifier[filetype] = identifier[hdulist] [ literal[int] ]. identifier[header] . identifier[get] ( literal[string] , literal[string] ) keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[hdulist] )): identifier[hdulist] [ identifier[i] ]. identifier[name] = identifier[filetype] identifier[hdulist] [ identifier[i] ]. identifier[_extver] = identifier[i] identifier[hdulist] [ identifier[i] ]. identifier[header] [ literal[string] ]=( identifier[rootname] , literal[string] ) identifier[hdulist] [ identifier[i] ]. identifier[header] [ literal[string] ]=( identifier[i] , literal[string] ) identifier[hdulist] [ identifier[i] ]. identifier[header] [ literal[string] ]=( identifier[filetype] , literal[string] ) identifier[hdulist] [ identifier[i] ]. identifier[header] [ literal[string] ]=( keyword[True] , literal[string] ) identifier[hdulist] [ identifier[i] ]. identifier[header] [ literal[string] ]=( identifier[rootname] , literal[string] )
def stsci(hdulist): """For STScI GEIS files, need to do extra steps.""" instrument = hdulist[0].header.get('INSTRUME', '') # Update extension header keywords if instrument in ('WFPC2', 'FOC'): rootname = hdulist[0].header.get('ROOTNAME', '') filetype = hdulist[0].header.get('FILETYPE', '') for i in range(1, len(hdulist)): # Add name and extver attributes to match PyFITS data structure hdulist[i].name = filetype hdulist[i]._extver = i # Add extension keywords for this chip to extension hdulist[i].header['EXPNAME'] = (rootname, '9 character exposure identifier') hdulist[i].header['EXTVER'] = (i, 'extension version number') hdulist[i].header['EXTNAME'] = (filetype, 'extension name') hdulist[i].header['INHERIT'] = (True, 'inherit the primary header') hdulist[i].header['ROOTNAME'] = (rootname, 'rootname of the observation set') # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]]
def channel(self, name): """Create a channel. Returns ``_Sender`` object implementing :class:`~aioredis.abc.AbcChannel`. """ enc_name = _converters[type(name)](name) if (enc_name, False) not in self._refs: ch = _Sender(self, enc_name, is_pattern=False) self._refs[(enc_name, False)] = ch return ch return self._refs[(enc_name, False)]
def function[channel, parameter[self, name]]: constant[Create a channel. Returns ``_Sender`` object implementing :class:`~aioredis.abc.AbcChannel`. ] variable[enc_name] assign[=] call[call[name[_converters]][call[name[type], parameter[name[name]]]], parameter[name[name]]] if compare[tuple[[<ast.Name object at 0x7da2054a4790>, <ast.Constant object at 0x7da2054a7fd0>]] <ast.NotIn object at 0x7da2590d7190> name[self]._refs] begin[:] variable[ch] assign[=] call[name[_Sender], parameter[name[self], name[enc_name]]] call[name[self]._refs][tuple[[<ast.Name object at 0x7da2054a7d90>, <ast.Constant object at 0x7da2054a5d50>]]] assign[=] name[ch] return[name[ch]] return[call[name[self]._refs][tuple[[<ast.Name object at 0x7da2054a4730>, <ast.Constant object at 0x7da2054a6bf0>]]]]
keyword[def] identifier[channel] ( identifier[self] , identifier[name] ): literal[string] identifier[enc_name] = identifier[_converters] [ identifier[type] ( identifier[name] )]( identifier[name] ) keyword[if] ( identifier[enc_name] , keyword[False] ) keyword[not] keyword[in] identifier[self] . identifier[_refs] : identifier[ch] = identifier[_Sender] ( identifier[self] , identifier[enc_name] , identifier[is_pattern] = keyword[False] ) identifier[self] . identifier[_refs] [( identifier[enc_name] , keyword[False] )]= identifier[ch] keyword[return] identifier[ch] keyword[return] identifier[self] . identifier[_refs] [( identifier[enc_name] , keyword[False] )]
def channel(self, name): """Create a channel. Returns ``_Sender`` object implementing :class:`~aioredis.abc.AbcChannel`. """ enc_name = _converters[type(name)](name) if (enc_name, False) not in self._refs: ch = _Sender(self, enc_name, is_pattern=False) self._refs[enc_name, False] = ch return ch # depends on [control=['if'], data=[]] return self._refs[enc_name, False]
def _get_local_folder(self, root=None): """Return local NApp root folder. Search for kytos.json in _./_ folder and _./user/napp_. Args: root (pathlib.Path): Where to begin searching. Return: pathlib.Path: NApp root folder. Raises: FileNotFoundError: If there is no such local NApp. """ if root is None: root = Path() for folders in ['.'], [self.user, self.napp]: kytos_json = root / Path(*folders) / 'kytos.json' if kytos_json.exists(): with kytos_json.open() as file_descriptor: meta = json.load(file_descriptor) # WARNING: This will change in future versions, when # 'author' will be removed. username = meta.get('username', meta.get('author')) if username == self.user and meta.get('name') == self.napp: return kytos_json.parent raise FileNotFoundError('kytos.json not found.')
def function[_get_local_folder, parameter[self, root]]: constant[Return local NApp root folder. Search for kytos.json in _./_ folder and _./user/napp_. Args: root (pathlib.Path): Where to begin searching. Return: pathlib.Path: NApp root folder. Raises: FileNotFoundError: If there is no such local NApp. ] if compare[name[root] is constant[None]] begin[:] variable[root] assign[=] call[name[Path], parameter[]] for taget[name[folders]] in starred[tuple[[<ast.List object at 0x7da20c6aab30>, <ast.List object at 0x7da20c6a9fc0>]]] begin[:] variable[kytos_json] assign[=] binary_operation[binary_operation[name[root] / call[name[Path], parameter[<ast.Starred object at 0x7da20c6a8550>]]] / constant[kytos.json]] if call[name[kytos_json].exists, parameter[]] begin[:] with call[name[kytos_json].open, parameter[]] begin[:] variable[meta] assign[=] call[name[json].load, parameter[name[file_descriptor]]] variable[username] assign[=] call[name[meta].get, parameter[constant[username], call[name[meta].get, parameter[constant[author]]]]] if <ast.BoolOp object at 0x7da1b25edf30> begin[:] return[name[kytos_json].parent] <ast.Raise object at 0x7da1b25ef580>
keyword[def] identifier[_get_local_folder] ( identifier[self] , identifier[root] = keyword[None] ): literal[string] keyword[if] identifier[root] keyword[is] keyword[None] : identifier[root] = identifier[Path] () keyword[for] identifier[folders] keyword[in] [ literal[string] ],[ identifier[self] . identifier[user] , identifier[self] . identifier[napp] ]: identifier[kytos_json] = identifier[root] / identifier[Path] (* identifier[folders] )/ literal[string] keyword[if] identifier[kytos_json] . identifier[exists] (): keyword[with] identifier[kytos_json] . identifier[open] () keyword[as] identifier[file_descriptor] : identifier[meta] = identifier[json] . identifier[load] ( identifier[file_descriptor] ) identifier[username] = identifier[meta] . identifier[get] ( literal[string] , identifier[meta] . identifier[get] ( literal[string] )) keyword[if] identifier[username] == identifier[self] . identifier[user] keyword[and] identifier[meta] . identifier[get] ( literal[string] )== identifier[self] . identifier[napp] : keyword[return] identifier[kytos_json] . identifier[parent] keyword[raise] identifier[FileNotFoundError] ( literal[string] )
def _get_local_folder(self, root=None): """Return local NApp root folder. Search for kytos.json in _./_ folder and _./user/napp_. Args: root (pathlib.Path): Where to begin searching. Return: pathlib.Path: NApp root folder. Raises: FileNotFoundError: If there is no such local NApp. """ if root is None: root = Path() # depends on [control=['if'], data=['root']] for folders in (['.'], [self.user, self.napp]): kytos_json = root / Path(*folders) / 'kytos.json' if kytos_json.exists(): with kytos_json.open() as file_descriptor: meta = json.load(file_descriptor) # WARNING: This will change in future versions, when # 'author' will be removed. username = meta.get('username', meta.get('author')) if username == self.user and meta.get('name') == self.napp: return kytos_json.parent # depends on [control=['if'], data=[]] # depends on [control=['with'], data=['file_descriptor']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['folders']] raise FileNotFoundError('kytos.json not found.')
def px_to_pt(self, px): """Convert a size in pxel to a size in points.""" if px < 200: pt = self.PX_TO_PT[px] else: pt = int(floor((px - 1.21) / 1.332)) return pt
def function[px_to_pt, parameter[self, px]]: constant[Convert a size in pxel to a size in points.] if compare[name[px] less[<] constant[200]] begin[:] variable[pt] assign[=] call[name[self].PX_TO_PT][name[px]] return[name[pt]]
keyword[def] identifier[px_to_pt] ( identifier[self] , identifier[px] ): literal[string] keyword[if] identifier[px] < literal[int] : identifier[pt] = identifier[self] . identifier[PX_TO_PT] [ identifier[px] ] keyword[else] : identifier[pt] = identifier[int] ( identifier[floor] (( identifier[px] - literal[int] )/ literal[int] )) keyword[return] identifier[pt]
def px_to_pt(self, px): """Convert a size in pxel to a size in points.""" if px < 200: pt = self.PX_TO_PT[px] # depends on [control=['if'], data=['px']] else: pt = int(floor((px - 1.21) / 1.332)) return pt
def selectlastrow(self, window_name, object_name): """ Select last row @param window_name: Window name to type in, either full name, LDTP's name convention, or a Unix glob. @type window_name: string @param object_name: Object name to type in, either full name, LDTP's name convention, or a Unix glob. @type object_name: string @return: 1 on success. @rtype: integer """ object_handle = self._get_object_handle(window_name, object_name) if not object_handle.AXEnabled: raise LdtpServerException(u"Object %s state disabled" % object_name) cell = object_handle.AXRows[-1] if not cell.AXSelected: object_handle.activate() cell.AXSelected = True else: # Selected pass return 1
def function[selectlastrow, parameter[self, window_name, object_name]]: constant[ Select last row @param window_name: Window name to type in, either full name, LDTP's name convention, or a Unix glob. @type window_name: string @param object_name: Object name to type in, either full name, LDTP's name convention, or a Unix glob. @type object_name: string @return: 1 on success. @rtype: integer ] variable[object_handle] assign[=] call[name[self]._get_object_handle, parameter[name[window_name], name[object_name]]] if <ast.UnaryOp object at 0x7da18f09c0a0> begin[:] <ast.Raise object at 0x7da18f09f310> variable[cell] assign[=] call[name[object_handle].AXRows][<ast.UnaryOp object at 0x7da20c7c8550>] if <ast.UnaryOp object at 0x7da20c7c8e20> begin[:] call[name[object_handle].activate, parameter[]] name[cell].AXSelected assign[=] constant[True] return[constant[1]]
keyword[def] identifier[selectlastrow] ( identifier[self] , identifier[window_name] , identifier[object_name] ): literal[string] identifier[object_handle] = identifier[self] . identifier[_get_object_handle] ( identifier[window_name] , identifier[object_name] ) keyword[if] keyword[not] identifier[object_handle] . identifier[AXEnabled] : keyword[raise] identifier[LdtpServerException] ( literal[string] % identifier[object_name] ) identifier[cell] = identifier[object_handle] . identifier[AXRows] [- literal[int] ] keyword[if] keyword[not] identifier[cell] . identifier[AXSelected] : identifier[object_handle] . identifier[activate] () identifier[cell] . identifier[AXSelected] = keyword[True] keyword[else] : keyword[pass] keyword[return] literal[int]
def selectlastrow(self, window_name, object_name): """ Select last row @param window_name: Window name to type in, either full name, LDTP's name convention, or a Unix glob. @type window_name: string @param object_name: Object name to type in, either full name, LDTP's name convention, or a Unix glob. @type object_name: string @return: 1 on success. @rtype: integer """ object_handle = self._get_object_handle(window_name, object_name) if not object_handle.AXEnabled: raise LdtpServerException(u'Object %s state disabled' % object_name) # depends on [control=['if'], data=[]] cell = object_handle.AXRows[-1] if not cell.AXSelected: object_handle.activate() cell.AXSelected = True # depends on [control=['if'], data=[]] else: # Selected pass return 1
def _get_style_of_faulting_term(self, C, rup): """ Returns the style-of-faulting term. Fault type (Strike-slip, Normal, Thrust/reverse) is derived from rake angle. Rakes angles within 30 of horizontal are strike-slip, angles from 30 to 150 are reverse, and angles from -30 to -150 are normal. Note that the 'Unspecified' case is not considered in this class as rake is required as an input variable """ SS, NS, RS = 0.0, 0.0, 0.0 if np.abs(rup.rake) <= 30.0 or (180.0 - np.abs(rup.rake)) <= 30.0: # strike-slip SS = 1.0 elif rup.rake > 30.0 and rup.rake < 150.0: # reverse RS = 1.0 else: # normal NS = 1.0 return (C["sofN"] * NS) + (C["sofR"] * RS) + (C["sofS"] * SS)
def function[_get_style_of_faulting_term, parameter[self, C, rup]]: constant[ Returns the style-of-faulting term. Fault type (Strike-slip, Normal, Thrust/reverse) is derived from rake angle. Rakes angles within 30 of horizontal are strike-slip, angles from 30 to 150 are reverse, and angles from -30 to -150 are normal. Note that the 'Unspecified' case is not considered in this class as rake is required as an input variable ] <ast.Tuple object at 0x7da1b26ad600> assign[=] tuple[[<ast.Constant object at 0x7da1b26ae710>, <ast.Constant object at 0x7da1b26add80>, <ast.Constant object at 0x7da1b26ae140>]] if <ast.BoolOp object at 0x7da1b26ad780> begin[:] variable[SS] assign[=] constant[1.0] return[binary_operation[binary_operation[binary_operation[call[name[C]][constant[sofN]] * name[NS]] + binary_operation[call[name[C]][constant[sofR]] * name[RS]]] + binary_operation[call[name[C]][constant[sofS]] * name[SS]]]]
keyword[def] identifier[_get_style_of_faulting_term] ( identifier[self] , identifier[C] , identifier[rup] ): literal[string] identifier[SS] , identifier[NS] , identifier[RS] = literal[int] , literal[int] , literal[int] keyword[if] identifier[np] . identifier[abs] ( identifier[rup] . identifier[rake] )<= literal[int] keyword[or] ( literal[int] - identifier[np] . identifier[abs] ( identifier[rup] . identifier[rake] ))<= literal[int] : identifier[SS] = literal[int] keyword[elif] identifier[rup] . identifier[rake] > literal[int] keyword[and] identifier[rup] . identifier[rake] < literal[int] : identifier[RS] = literal[int] keyword[else] : identifier[NS] = literal[int] keyword[return] ( identifier[C] [ literal[string] ]* identifier[NS] )+( identifier[C] [ literal[string] ]* identifier[RS] )+( identifier[C] [ literal[string] ]* identifier[SS] )
def _get_style_of_faulting_term(self, C, rup): """ Returns the style-of-faulting term. Fault type (Strike-slip, Normal, Thrust/reverse) is derived from rake angle. Rakes angles within 30 of horizontal are strike-slip, angles from 30 to 150 are reverse, and angles from -30 to -150 are normal. Note that the 'Unspecified' case is not considered in this class as rake is required as an input variable """ (SS, NS, RS) = (0.0, 0.0, 0.0) if np.abs(rup.rake) <= 30.0 or 180.0 - np.abs(rup.rake) <= 30.0: # strike-slip SS = 1.0 # depends on [control=['if'], data=[]] elif rup.rake > 30.0 and rup.rake < 150.0: # reverse RS = 1.0 # depends on [control=['if'], data=[]] else: # normal NS = 1.0 return C['sofN'] * NS + C['sofR'] * RS + C['sofS'] * SS
def _get_messages_from_folder_name(self, folder_name): """ Retrieves all messages from a folder, specified by its name. This only works with "Well Known" folders, such as 'Inbox' or 'Drafts'. Args: folder_name (str): The name of the folder to retrieve Returns: List[:class:`Message <pyOutlook.core.message.Message>` ] """ r = requests.get('https://outlook.office.com/api/v2.0/me/MailFolders/' + folder_name + '/messages', headers=self._headers) check_response(r) return Message._json_to_messages(self, r.json())
def function[_get_messages_from_folder_name, parameter[self, folder_name]]: constant[ Retrieves all messages from a folder, specified by its name. This only works with "Well Known" folders, such as 'Inbox' or 'Drafts'. Args: folder_name (str): The name of the folder to retrieve Returns: List[:class:`Message <pyOutlook.core.message.Message>` ] ] variable[r] assign[=] call[name[requests].get, parameter[binary_operation[binary_operation[constant[https://outlook.office.com/api/v2.0/me/MailFolders/] + name[folder_name]] + constant[/messages]]]] call[name[check_response], parameter[name[r]]] return[call[name[Message]._json_to_messages, parameter[name[self], call[name[r].json, parameter[]]]]]
keyword[def] identifier[_get_messages_from_folder_name] ( identifier[self] , identifier[folder_name] ): literal[string] identifier[r] = identifier[requests] . identifier[get] ( literal[string] + identifier[folder_name] + literal[string] , identifier[headers] = identifier[self] . identifier[_headers] ) identifier[check_response] ( identifier[r] ) keyword[return] identifier[Message] . identifier[_json_to_messages] ( identifier[self] , identifier[r] . identifier[json] ())
def _get_messages_from_folder_name(self, folder_name): """ Retrieves all messages from a folder, specified by its name. This only works with "Well Known" folders, such as 'Inbox' or 'Drafts'. Args: folder_name (str): The name of the folder to retrieve Returns: List[:class:`Message <pyOutlook.core.message.Message>` ] """ r = requests.get('https://outlook.office.com/api/v2.0/me/MailFolders/' + folder_name + '/messages', headers=self._headers) check_response(r) return Message._json_to_messages(self, r.json())
def rap(self,analytic=False,pot=None,**kwargs): """ NAME: rap PURPOSE: return the apocenter radius INPUT: analytic - compute this analytically pot - potential to use for analytical calculation OUTPUT: R_ap HISTORY: 2010-09-20 - Written - Bovy (NYU) """ if analytic: self._setupaA(pot=pot,type='adiabatic') (rperi,rap)= self._aA.calcRapRperi(self) return rap if not hasattr(self,'orbit'): raise AttributeError("Integrate the orbit first") if not hasattr(self,'rs'): self.rs= self.orbit[:,0] return nu.amax(self.rs)
def function[rap, parameter[self, analytic, pot]]: constant[ NAME: rap PURPOSE: return the apocenter radius INPUT: analytic - compute this analytically pot - potential to use for analytical calculation OUTPUT: R_ap HISTORY: 2010-09-20 - Written - Bovy (NYU) ] if name[analytic] begin[:] call[name[self]._setupaA, parameter[]] <ast.Tuple object at 0x7da1b0c40ac0> assign[=] call[name[self]._aA.calcRapRperi, parameter[name[self]]] return[name[rap]] if <ast.UnaryOp object at 0x7da1b0c42b60> begin[:] <ast.Raise object at 0x7da1b0c42b90> if <ast.UnaryOp object at 0x7da1b0c429b0> begin[:] name[self].rs assign[=] call[name[self].orbit][tuple[[<ast.Slice object at 0x7da1b0c40f70>, <ast.Constant object at 0x7da1b0c40cd0>]]] return[call[name[nu].amax, parameter[name[self].rs]]]
keyword[def] identifier[rap] ( identifier[self] , identifier[analytic] = keyword[False] , identifier[pot] = keyword[None] ,** identifier[kwargs] ): literal[string] keyword[if] identifier[analytic] : identifier[self] . identifier[_setupaA] ( identifier[pot] = identifier[pot] , identifier[type] = literal[string] ) ( identifier[rperi] , identifier[rap] )= identifier[self] . identifier[_aA] . identifier[calcRapRperi] ( identifier[self] ) keyword[return] identifier[rap] keyword[if] keyword[not] identifier[hasattr] ( identifier[self] , literal[string] ): keyword[raise] identifier[AttributeError] ( literal[string] ) keyword[if] keyword[not] identifier[hasattr] ( identifier[self] , literal[string] ): identifier[self] . identifier[rs] = identifier[self] . identifier[orbit] [:, literal[int] ] keyword[return] identifier[nu] . identifier[amax] ( identifier[self] . identifier[rs] )
def rap(self, analytic=False, pot=None, **kwargs): """ NAME: rap PURPOSE: return the apocenter radius INPUT: analytic - compute this analytically pot - potential to use for analytical calculation OUTPUT: R_ap HISTORY: 2010-09-20 - Written - Bovy (NYU) """ if analytic: self._setupaA(pot=pot, type='adiabatic') (rperi, rap) = self._aA.calcRapRperi(self) return rap # depends on [control=['if'], data=[]] if not hasattr(self, 'orbit'): raise AttributeError('Integrate the orbit first') # depends on [control=['if'], data=[]] if not hasattr(self, 'rs'): self.rs = self.orbit[:, 0] # depends on [control=['if'], data=[]] return nu.amax(self.rs)
def _fftconv(a, b, axes=(0, 1)): """Patched version of :func:`sporco.linalg.fftconv`.""" if cp.isrealobj(a) and cp.isrealobj(b): fft = cp.fft.rfftn ifft = cp.fft.irfftn else: fft = cp.fft.fftn ifft = cp.fft.ifftn dims = cp.maximum(cp.asarray([a.shape[i] for i in axes]), cp.asarray([b.shape[i] for i in axes])) dims = [int(d) for d in dims] af = fft(a, dims, axes) bf = fft(b, dims, axes) return ifft(af * bf, dims, axes)
def function[_fftconv, parameter[a, b, axes]]: constant[Patched version of :func:`sporco.linalg.fftconv`.] if <ast.BoolOp object at 0x7da1b07faf80> begin[:] variable[fft] assign[=] name[cp].fft.rfftn variable[ifft] assign[=] name[cp].fft.irfftn variable[dims] assign[=] call[name[cp].maximum, parameter[call[name[cp].asarray, parameter[<ast.ListComp object at 0x7da1b06ea0e0>]], call[name[cp].asarray, parameter[<ast.ListComp object at 0x7da1b06e9d20>]]]] variable[dims] assign[=] <ast.ListComp object at 0x7da1b06eabc0> variable[af] assign[=] call[name[fft], parameter[name[a], name[dims], name[axes]]] variable[bf] assign[=] call[name[fft], parameter[name[b], name[dims], name[axes]]] return[call[name[ifft], parameter[binary_operation[name[af] * name[bf]], name[dims], name[axes]]]]
keyword[def] identifier[_fftconv] ( identifier[a] , identifier[b] , identifier[axes] =( literal[int] , literal[int] )): literal[string] keyword[if] identifier[cp] . identifier[isrealobj] ( identifier[a] ) keyword[and] identifier[cp] . identifier[isrealobj] ( identifier[b] ): identifier[fft] = identifier[cp] . identifier[fft] . identifier[rfftn] identifier[ifft] = identifier[cp] . identifier[fft] . identifier[irfftn] keyword[else] : identifier[fft] = identifier[cp] . identifier[fft] . identifier[fftn] identifier[ifft] = identifier[cp] . identifier[fft] . identifier[ifftn] identifier[dims] = identifier[cp] . identifier[maximum] ( identifier[cp] . identifier[asarray] ([ identifier[a] . identifier[shape] [ identifier[i] ] keyword[for] identifier[i] keyword[in] identifier[axes] ]), identifier[cp] . identifier[asarray] ([ identifier[b] . identifier[shape] [ identifier[i] ] keyword[for] identifier[i] keyword[in] identifier[axes] ])) identifier[dims] =[ identifier[int] ( identifier[d] ) keyword[for] identifier[d] keyword[in] identifier[dims] ] identifier[af] = identifier[fft] ( identifier[a] , identifier[dims] , identifier[axes] ) identifier[bf] = identifier[fft] ( identifier[b] , identifier[dims] , identifier[axes] ) keyword[return] identifier[ifft] ( identifier[af] * identifier[bf] , identifier[dims] , identifier[axes] )
def _fftconv(a, b, axes=(0, 1)): """Patched version of :func:`sporco.linalg.fftconv`.""" if cp.isrealobj(a) and cp.isrealobj(b): fft = cp.fft.rfftn ifft = cp.fft.irfftn # depends on [control=['if'], data=[]] else: fft = cp.fft.fftn ifft = cp.fft.ifftn dims = cp.maximum(cp.asarray([a.shape[i] for i in axes]), cp.asarray([b.shape[i] for i in axes])) dims = [int(d) for d in dims] af = fft(a, dims, axes) bf = fft(b, dims, axes) return ifft(af * bf, dims, axes)
def __calculate_states(self): """! @brief Calculates new state of each neuron. @detail There is no any assignment. @return (list) Returns new states (output). """ output = [ 0.0 for _ in range(self.__num_osc) ] for i in range(self.__num_osc): output[i] = self.__neuron_evolution(i) return output
def function[__calculate_states, parameter[self]]: constant[! @brief Calculates new state of each neuron. @detail There is no any assignment. @return (list) Returns new states (output). ] variable[output] assign[=] <ast.ListComp object at 0x7da1b01e2d10> for taget[name[i]] in starred[call[name[range], parameter[name[self].__num_osc]]] begin[:] call[name[output]][name[i]] assign[=] call[name[self].__neuron_evolution, parameter[name[i]]] return[name[output]]
keyword[def] identifier[__calculate_states] ( identifier[self] ): literal[string] identifier[output] =[ literal[int] keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[self] . identifier[__num_osc] )] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[self] . identifier[__num_osc] ): identifier[output] [ identifier[i] ]= identifier[self] . identifier[__neuron_evolution] ( identifier[i] ) keyword[return] identifier[output]
def __calculate_states(self): """! @brief Calculates new state of each neuron. @detail There is no any assignment. @return (list) Returns new states (output). """ output = [0.0 for _ in range(self.__num_osc)] for i in range(self.__num_osc): output[i] = self.__neuron_evolution(i) # depends on [control=['for'], data=['i']] return output
def djfrontend_jquery_datatables(version=None): """ Returns the jQuery DataTables plugin file according to version number. TEMPLATE_DEBUG returns full file, otherwise returns minified file. """ if version is None: if not getattr(settings, 'DJFRONTEND_JQUERY_DATATABLES', False): version = getattr(settings, 'DJFRONTEND_JQUERY_DATATABLES_VERSION', DJFRONTEND_JQUERY_DATATABLES_VERSION_DEFAULT) else: version = getattr(settings, 'DJFRONTEND_JQUERY_DATATABLES', DJFRONTEND_JQUERY_DATATABLES_VERSION_DEFAULT) if getattr(settings, 'TEMPLATE_DEBUG', False): template = '<script src="{static}djfrontend/js/jquery/jquery.dataTables/{v}/jquery.dataTables.js"></script>' else: template = ( '<script src="//cdnjs.cloudflare.com/ajax/libs/datatables/{v}/jquery.dataTables.min.js"></script>' '<script>window.jQuery.fn.DataTable || document.write(\'<script src="{static}djfrontend/js/jquery/jquery.dataTables/{v}/jquery.dataTables.min.js"><\/script>\')</script>') return format_html(template, static=_static_url, v=version)
def function[djfrontend_jquery_datatables, parameter[version]]: constant[ Returns the jQuery DataTables plugin file according to version number. TEMPLATE_DEBUG returns full file, otherwise returns minified file. ] if compare[name[version] is constant[None]] begin[:] if <ast.UnaryOp object at 0x7da1b03a9a50> begin[:] variable[version] assign[=] call[name[getattr], parameter[name[settings], constant[DJFRONTEND_JQUERY_DATATABLES_VERSION], name[DJFRONTEND_JQUERY_DATATABLES_VERSION_DEFAULT]]] if call[name[getattr], parameter[name[settings], constant[TEMPLATE_DEBUG], constant[False]]] begin[:] variable[template] assign[=] constant[<script src="{static}djfrontend/js/jquery/jquery.dataTables/{v}/jquery.dataTables.js"></script>] return[call[name[format_html], parameter[name[template]]]]
keyword[def] identifier[djfrontend_jquery_datatables] ( identifier[version] = keyword[None] ): literal[string] keyword[if] identifier[version] keyword[is] keyword[None] : keyword[if] keyword[not] identifier[getattr] ( identifier[settings] , literal[string] , keyword[False] ): identifier[version] = identifier[getattr] ( identifier[settings] , literal[string] , identifier[DJFRONTEND_JQUERY_DATATABLES_VERSION_DEFAULT] ) keyword[else] : identifier[version] = identifier[getattr] ( identifier[settings] , literal[string] , identifier[DJFRONTEND_JQUERY_DATATABLES_VERSION_DEFAULT] ) keyword[if] identifier[getattr] ( identifier[settings] , literal[string] , keyword[False] ): identifier[template] = literal[string] keyword[else] : identifier[template] =( literal[string] literal[string] ) keyword[return] identifier[format_html] ( identifier[template] , identifier[static] = identifier[_static_url] , identifier[v] = identifier[version] )
def djfrontend_jquery_datatables(version=None): """ Returns the jQuery DataTables plugin file according to version number. TEMPLATE_DEBUG returns full file, otherwise returns minified file. """ if version is None: if not getattr(settings, 'DJFRONTEND_JQUERY_DATATABLES', False): version = getattr(settings, 'DJFRONTEND_JQUERY_DATATABLES_VERSION', DJFRONTEND_JQUERY_DATATABLES_VERSION_DEFAULT) # depends on [control=['if'], data=[]] else: version = getattr(settings, 'DJFRONTEND_JQUERY_DATATABLES', DJFRONTEND_JQUERY_DATATABLES_VERSION_DEFAULT) # depends on [control=['if'], data=['version']] if getattr(settings, 'TEMPLATE_DEBUG', False): template = '<script src="{static}djfrontend/js/jquery/jquery.dataTables/{v}/jquery.dataTables.js"></script>' # depends on [control=['if'], data=[]] else: template = '<script src="//cdnjs.cloudflare.com/ajax/libs/datatables/{v}/jquery.dataTables.min.js"></script><script>window.jQuery.fn.DataTable || document.write(\'<script src="{static}djfrontend/js/jquery/jquery.dataTables/{v}/jquery.dataTables.min.js"><\\/script>\')</script>' return format_html(template, static=_static_url, v=version)
def __apf_cmd(cmd): ''' Return the apf location ''' apf_cmd = '{0} {1}'.format(salt.utils.path.which('apf'), cmd) out = __salt__['cmd.run_all'](apf_cmd) if out['retcode'] != 0: if not out['stderr']: msg = out['stdout'] else: msg = out['stderr'] raise CommandExecutionError( 'apf failed: {0}'.format(msg) ) return out['stdout']
def function[__apf_cmd, parameter[cmd]]: constant[ Return the apf location ] variable[apf_cmd] assign[=] call[constant[{0} {1}].format, parameter[call[name[salt].utils.path.which, parameter[constant[apf]]], name[cmd]]] variable[out] assign[=] call[call[name[__salt__]][constant[cmd.run_all]], parameter[name[apf_cmd]]] if compare[call[name[out]][constant[retcode]] not_equal[!=] constant[0]] begin[:] if <ast.UnaryOp object at 0x7da1b2160400> begin[:] variable[msg] assign[=] call[name[out]][constant[stdout]] <ast.Raise object at 0x7da1b2161840> return[call[name[out]][constant[stdout]]]
keyword[def] identifier[__apf_cmd] ( identifier[cmd] ): literal[string] identifier[apf_cmd] = literal[string] . identifier[format] ( identifier[salt] . identifier[utils] . identifier[path] . identifier[which] ( literal[string] ), identifier[cmd] ) identifier[out] = identifier[__salt__] [ literal[string] ]( identifier[apf_cmd] ) keyword[if] identifier[out] [ literal[string] ]!= literal[int] : keyword[if] keyword[not] identifier[out] [ literal[string] ]: identifier[msg] = identifier[out] [ literal[string] ] keyword[else] : identifier[msg] = identifier[out] [ literal[string] ] keyword[raise] identifier[CommandExecutionError] ( literal[string] . identifier[format] ( identifier[msg] ) ) keyword[return] identifier[out] [ literal[string] ]
def __apf_cmd(cmd): """ Return the apf location """ apf_cmd = '{0} {1}'.format(salt.utils.path.which('apf'), cmd) out = __salt__['cmd.run_all'](apf_cmd) if out['retcode'] != 0: if not out['stderr']: msg = out['stdout'] # depends on [control=['if'], data=[]] else: msg = out['stderr'] raise CommandExecutionError('apf failed: {0}'.format(msg)) # depends on [control=['if'], data=[]] return out['stdout']