text
stringlengths
89
104k
code_tokens
list
avg_line_len
float64
7.91
980
score
float64
0
630
def all_inspections(obj): """ Generator to iterate all current Jishaku inspections. """ for name, callback in INSPECTIONS: result = callback(obj) if result: yield name, result
[ "def", "all_inspections", "(", "obj", ")", ":", "for", "name", ",", "callback", "in", "INSPECTIONS", ":", "result", "=", "callback", "(", "obj", ")", "if", "result", ":", "yield", "name", ",", "result" ]
23.555556
12.888889
def contians_attribute(self, attribute): """ Returns how many cards in the deck have the specified attribute. This method requires a library to be stored in the deck instance and will return `None` if there is no library. """ if self.library is None: return 0 load = self.library.load_card matches = 0 for code in self.cards: card = load(code) if card.has_attribute(attribute): matches += 1 return matches
[ "def", "contians_attribute", "(", "self", ",", "attribute", ")", ":", "if", "self", ".", "library", "is", "None", ":", "return", "0", "load", "=", "self", ".", "library", ".", "load_card", "matches", "=", "0", "for", "code", "in", "self", ".", "cards",...
30.764706
15.588235
def duration(self): """ Returns the current value of the counter and then multiplies it by :attr:`factor` :rtype: float """ d = self.for_attempt(self.cur_attempt) self.cur_attempt += 1 return d
[ "def", "duration", "(", "self", ")", ":", "d", "=", "self", ".", "for_attempt", "(", "self", ".", "cur_attempt", ")", "self", ".", "cur_attempt", "+=", "1", "return", "d" ]
24.9
17.3
def domain(self, expparams): """ Returns a list of ``Domain``s, one for each input expparam. :param numpy.ndarray expparams: Array of experimental parameters. This array must be of dtype agreeing with the ``expparams_dtype`` property, or, in the case where ``n_outcomes_constant`` is ``True``, ``None`` should be a valid input. :rtype: list of ``Domain`` """ return [IntegerDomain(min=0,max=n_o-1) for n_o in self.n_outcomes(expparams)]
[ "def", "domain", "(", "self", ",", "expparams", ")", ":", "return", "[", "IntegerDomain", "(", "min", "=", "0", ",", "max", "=", "n_o", "-", "1", ")", "for", "n_o", "in", "self", ".", "n_outcomes", "(", "expparams", ")", "]" ]
42.666667
23.833333
def __experimental_range(start, stop, var, cond, loc={}): '''Utility function made to reproduce range() with unit integer step but with the added possibility of specifying a condition on the looping variable (e.g. var % 2 == 0) ''' locals().update(loc) if start < stop: for __ in range(start, stop): locals()[var] = __ if eval(cond, globals(), locals()): yield __ else: for __ in range(start, stop, -1): locals()[var] = __ if eval(cond, globals(), locals()): yield __
[ "def", "__experimental_range", "(", "start", ",", "stop", ",", "var", ",", "cond", ",", "loc", "=", "{", "}", ")", ":", "locals", "(", ")", ".", "update", "(", "loc", ")", "if", "start", "<", "stop", ":", "for", "__", "in", "range", "(", "start",...
36.5
15.875
def get_transactions(cls, address): """Gets the ID of all transactions related to an address. :param address: The address in question. :type address: ``str`` :raises ConnectionError: If all API services fail. :rtype: ``list`` of ``str`` """ for api_call in cls.GET_TRANSACTIONS_MAIN: try: return api_call(address) except cls.IGNORED_ERRORS: pass raise ConnectionError('All APIs are unreachable.')
[ "def", "get_transactions", "(", "cls", ",", "address", ")", ":", "for", "api_call", "in", "cls", ".", "GET_TRANSACTIONS_MAIN", ":", "try", ":", "return", "api_call", "(", "address", ")", "except", "cls", ".", "IGNORED_ERRORS", ":", "pass", "raise", "Connecti...
31.5
15
def getattr(self, c, attr, default=None, match_only=None): """ Get the attribute of a component. Args: c (component): The component to look up. attr (str): The attribute to get. default (str): What to return in the event of no match. match_only (list of str): The component attributes to include in the comparison. Default: All of them. Returns: obj. The specified attribute of the matching Decor in the Legend. """ matching_decor = self.get_decor(c, match_only=match_only) try: return getattr(matching_decor, attr) except AttributeError: return default
[ "def", "getattr", "(", "self", ",", "c", ",", "attr", ",", "default", "=", "None", ",", "match_only", "=", "None", ")", ":", "matching_decor", "=", "self", ".", "get_decor", "(", "c", ",", "match_only", "=", "match_only", ")", "try", ":", "return", "...
34.75
19.95
def html_to_text(html, base_url='', bodywidth=CONFIG_DEFAULT): """ Convert a HTML mesasge to plain text. """ def _patched_handle_charref(c): self = h charref = self.charref(c) if self.code or self.pre: charref = cgi.escape(charref) self.o(charref, 1) def _patched_handle_entityref(c): self = h entityref = self.entityref(c) if self.code or self.pre: # this expression was inversed. entityref = cgi.escape(entityref) self.o(entityref, 1) h = HTML2Text(baseurl=base_url, bodywidth=config.BODY_WIDTH if bodywidth is CONFIG_DEFAULT else bodywidth) h.handle_entityref = _patched_handle_entityref h.handle_charref = _patched_handle_charref return h.handle(html).rstrip()
[ "def", "html_to_text", "(", "html", ",", "base_url", "=", "''", ",", "bodywidth", "=", "CONFIG_DEFAULT", ")", ":", "def", "_patched_handle_charref", "(", "c", ")", ":", "self", "=", "h", "charref", "=", "self", ".", "charref", "(", "c", ")", "if", "sel...
35
14.818182
def strip_tx_flags(self, idx): """strip(1 byte) tx_flags :idx: int :return: int idx :return: int """ idx = Radiotap.align(idx, 2) tx_flags, = struct.unpack_from('<B', self._rtap, idx) return idx + 1, tx_flags
[ "def", "strip_tx_flags", "(", "self", ",", "idx", ")", ":", "idx", "=", "Radiotap", ".", "align", "(", "idx", ",", "2", ")", "tx_flags", ",", "=", "struct", ".", "unpack_from", "(", "'<B'", ",", "self", ".", "_rtap", ",", "idx", ")", "return", "idx...
27.5
13.1
def cfg(self): """Load the application configuration. This method loads configuration from python module. """ config = LStruct(self.defaults) module = config['CONFIG'] = os.environ.get( CONFIGURATION_ENVIRON_VARIABLE, config['CONFIG']) if module: try: module = import_module(module) config.update({ name: getattr(module, name) for name in dir(module) if name == name.upper() and not name.startswith('_') }) except ImportError as exc: config.CONFIG = None self.logger.error("Error importing %s: %s", module, exc) # Patch configuration from ENV for name in config: if name.startswith('_') or name != name.upper() or name not in os.environ: continue try: config[name] = json.loads(os.environ[name]) except ValueError: pass return config
[ "def", "cfg", "(", "self", ")", ":", "config", "=", "LStruct", "(", "self", ".", "defaults", ")", "module", "=", "config", "[", "'CONFIG'", "]", "=", "os", ".", "environ", ".", "get", "(", "CONFIGURATION_ENVIRON_VARIABLE", ",", "config", "[", "'CONFIG'",...
32.903226
20.354839
def _set_factory_context(factory_class, bundle_context): # type: (type, Optional[BundleContext]) -> Optional[FactoryContext] """ Transforms the context data dictionary into its FactoryContext object form. :param factory_class: A manipulated class :param bundle_context: The class bundle context :return: The factory context, None on error """ try: # Try to get the factory context (built using decorators) context = getattr(factory_class, constants.IPOPO_FACTORY_CONTEXT) except AttributeError: # The class has not been manipulated, or too badly return None if not context.completed: # Partial context (class not manipulated) return None # Associate the factory to the bundle context context.set_bundle_context(bundle_context) return context
[ "def", "_set_factory_context", "(", "factory_class", ",", "bundle_context", ")", ":", "# type: (type, Optional[BundleContext]) -> Optional[FactoryContext]", "try", ":", "# Try to get the factory context (built using decorators)", "context", "=", "getattr", "(", "factory_class", ","...
35.73913
19.565217
def create_subscription(self, subscription): """CreateSubscription. Create a subscription. :param :class:`<Subscription> <azure.devops.v5_0.service_hooks.models.Subscription>` subscription: Subscription to be created. :rtype: :class:`<Subscription> <azure.devops.v5_0.service_hooks.models.Subscription>` """ content = self._serialize.body(subscription, 'Subscription') response = self._send(http_method='POST', location_id='fc50d02a-849f-41fb-8af1-0a5216103269', version='5.0', content=content) return self._deserialize('Subscription', response)
[ "def", "create_subscription", "(", "self", ",", "subscription", ")", ":", "content", "=", "self", ".", "_serialize", ".", "body", "(", "subscription", ",", "'Subscription'", ")", "response", "=", "self", ".", "_send", "(", "http_method", "=", "'POST'", ",", ...
57.333333
22.25
def instruction_INC_register(self, opcode, register): """ Adds to the register. The carry bit is not affected, thus allowing this instruction to be used as a loop counter in multiple-precision computations. When operating on unsigned values, only the BEQ and BNE branches can be expected to behave consistently. When operating on twos complement values, all signed branches are correctly available. source code forms: INC Q; INCA; INCB CC bits "HNZVC": -aaa- """ a = register.value r = self.INC(a) r = register.set(r)
[ "def", "instruction_INC_register", "(", "self", ",", "opcode", ",", "register", ")", ":", "a", "=", "register", ".", "value", "r", "=", "self", ".", "INC", "(", "a", ")", "r", "=", "register", ".", "set", "(", "r", ")" ]
40.066667
21.8
def get(value): "Query to get the value." if not isinstance(value, Token): raise TypeError('value must be a token') if not hasattr(value, 'identifier'): raise TypeError('value must support an identifier') if not value.identifier: value = value.__class__(**value.__dict__) value.identifier = 'v' ident = Identifier(value.identifier) return Query([ Match(value), Return(ident) ])
[ "def", "get", "(", "value", ")", ":", "if", "not", "isinstance", "(", "value", ",", "Token", ")", ":", "raise", "TypeError", "(", "'value must be a token'", ")", "if", "not", "hasattr", "(", "value", ",", "'identifier'", ")", ":", "raise", "TypeError", "...
24.444444
19.555556
def receives(self, *args, **kwargs): """Pop the next `Request` and assert it matches. Returns None if the server is stopped. Pass a `Request` or request pattern to specify what client request to expect. See the tutorial for examples. Pass ``timeout`` as a keyword argument to override this server's ``request_timeout``. """ timeout = kwargs.pop('timeout', self._request_timeout) end = time.time() + timeout matcher = Matcher(*args, **kwargs) while not self._stopped: try: # Short timeout so we notice if the server is stopped. request = self._request_q.get(timeout=0.05) except Empty: if time.time() > end: raise AssertionError('expected to receive %r, got nothing' % matcher.prototype) else: if matcher.matches(request): return request else: raise AssertionError('expected to receive %r, got %r' % (matcher.prototype, request))
[ "def", "receives", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "timeout", "=", "kwargs", ".", "pop", "(", "'timeout'", ",", "self", ".", "_request_timeout", ")", "end", "=", "time", ".", "time", "(", ")", "+", "timeout", "matc...
43.961538
18.846154
def make_folium_polyline(edge, edge_color, edge_width, edge_opacity, popup_attribute=None): """ Turn a row from the gdf_edges GeoDataFrame into a folium PolyLine with attributes. Parameters ---------- edge : GeoSeries a row from the gdf_edges GeoDataFrame edge_color : string color of the edge lines edge_width : numeric width of the edge lines edge_opacity : numeric opacity of the edge lines popup_attribute : string edge attribute to display in a pop-up when an edge is clicked, if None, no popup Returns ------- pl : folium.PolyLine """ # check if we were able to import folium successfully if not folium: raise ImportError('The folium package must be installed to use this optional feature.') # locations is a list of points for the polyline # folium takes coords in lat,lon but geopandas provides them in lon,lat # so we have to flip them around locations = list([(lat, lon) for lon, lat in edge['geometry'].coords]) # if popup_attribute is None, then create no pop-up if popup_attribute is None: popup = None else: # folium doesn't interpret html in the html argument (weird), so can't # do newlines without an iframe popup_text = json.dumps(edge[popup_attribute]) popup = folium.Popup(html=popup_text) # create a folium polyline with attributes pl = folium.PolyLine(locations=locations, popup=popup, color=edge_color, weight=edge_width, opacity=edge_opacity) return pl
[ "def", "make_folium_polyline", "(", "edge", ",", "edge_color", ",", "edge_width", ",", "edge_opacity", ",", "popup_attribute", "=", "None", ")", ":", "# check if we were able to import folium successfully", "if", "not", "folium", ":", "raise", "ImportError", "(", "'Th...
33.234043
23.276596
def callback(self, filename, lines, **kwargs): """Sends log lines to redis servers""" self._logger.debug('Redis transport called') timestamp = self.get_timestamp(**kwargs) if kwargs.get('timestamp', False): del kwargs['timestamp'] namespaces = self._beaver_config.get_field('redis_namespace', filename) if not namespaces: namespaces = self._namespace namespaces = namespaces.split(",") self._logger.debug('Got namespaces: '.join(namespaces)) data_type = self._data_type self._logger.debug('Got data type: ' + data_type) server = self._get_next_server() self._logger.debug('Got redis server: ' + server['url']) pipeline = server['redis'].pipeline(transaction=False) callback_map = { self.LIST_DATA_TYPE: pipeline.rpush, self.CHANNEL_DATA_TYPE: pipeline.publish, } callback_method = callback_map[data_type] for line in lines: for namespace in namespaces: callback_method( namespace.strip(), self.format(filename, line, timestamp, **kwargs) ) try: pipeline.execute() except redis.exceptions.RedisError, exception: self._logger.warn('Cannot push lines to redis server: ' + server['url']) raise TransportException(exception)
[ "def", "callback", "(", "self", ",", "filename", ",", "lines", ",", "*", "*", "kwargs", ")", ":", "self", ".", "_logger", ".", "debug", "(", "'Redis transport called'", ")", "timestamp", "=", "self", ".", "get_timestamp", "(", "*", "*", "kwargs", ")", ...
33.52381
19.857143
def getZernike(self, index): """getZernike Retrieve a map representing the index-th Zernike polynomial Args: index (int): The index of Zernike map to be generated, following Noll 1976 ordering. Returns: np.array: A map representing the index-th Zernike polynomial """ if index not in list(self._dictCache.keys()): self._dictCache[index]= self._polar(index, self._rhoMap, self._thetaMap) return self._dictCache[index]
[ "def", "getZernike", "(", "self", ",", "index", ")", ":", "if", "index", "not", "in", "list", "(", "self", ".", "_dictCache", ".", "keys", "(", ")", ")", ":", "self", ".", "_dictCache", "[", "index", "]", "=", "self", ".", "_polar", "(", "index", ...
32.941176
22.352941
def base(self, du): """Return the base CLB for a given DU""" parameter = 'base' if parameter not in self._by: self._by[parameter] = {} for clb in self.upi.values(): if clb.floor == 0: self._by[parameter][clb.du] = clb return self._by[parameter][du]
[ "def", "base", "(", "self", ",", "du", ")", ":", "parameter", "=", "'base'", "if", "parameter", "not", "in", "self", ".", "_by", ":", "self", ".", "_by", "[", "parameter", "]", "=", "{", "}", "for", "clb", "in", "self", ".", "upi", ".", "values",...
36.888889
7.111111
def gram_schmidt(matrix, return_opt='orthonormal'): r"""Gram-Schmit This method orthonormalizes the row vectors of the input matrix. Parameters ---------- matrix : np.ndarray Input matrix array return_opt : str {orthonormal, orthogonal, both} Option to return u, e or both. Returns ------- Lists of orthogonal vectors, u, and/or orthonormal vectors, e Examples -------- >>> from modopt.math.matrix import gram_schmidt >>> a = np.arange(9).reshape(3, 3) >>> gram_schmidt(a) array([[ 0. , 0.4472136 , 0.89442719], [ 0.91287093, 0.36514837, -0.18257419], [-1. , 0. , 0. ]]) Notes ----- Implementation from: https://en.wikipedia.org/wiki/Gram%E2%80%93Schmidt_process """ if return_opt not in ('orthonormal', 'orthogonal', 'both'): raise ValueError('Invalid return_opt, options are: "orthonormal", ' '"orthogonal" or "both"') u = [] e = [] for vector in matrix: if len(u) == 0: u_now = vector else: u_now = vector - sum([project(u_i, vector) for u_i in u]) u.append(u_now) e.append(u_now / np.linalg.norm(u_now, 2)) u = np.array(u) e = np.array(e) if return_opt == 'orthonormal': return e elif return_opt == 'orthogonal': return u elif return_opt == 'both': return u, e
[ "def", "gram_schmidt", "(", "matrix", ",", "return_opt", "=", "'orthonormal'", ")", ":", "if", "return_opt", "not", "in", "(", "'orthonormal'", ",", "'orthogonal'", ",", "'both'", ")", ":", "raise", "ValueError", "(", "'Invalid return_opt, options are: \"orthonormal...
24.482759
23.206897
def get_code(self, code, card_id=None, check_consume=True): """ 查询 code 信息 """ card_data = { 'code': code } if card_id: card_data['card_id'] = card_id if not check_consume: card_data['check_consume'] = check_consume return self._post( 'card/code/get', data=card_data )
[ "def", "get_code", "(", "self", ",", "code", ",", "card_id", "=", "None", ",", "check_consume", "=", "True", ")", ":", "card_data", "=", "{", "'code'", ":", "code", "}", "if", "card_id", ":", "card_data", "[", "'card_id'", "]", "=", "card_id", "if", ...
25.733333
15.066667
def getSet(self, setID): ''' Gets the information of one specific build using its Brickset set ID. :param str setID: The ID of the build from Brickset. :returns: A single Build object. :rtype: :class:`brickfront.build.Build` :raises brickfront.errors.InvalidSetID: If no sets exist by that ID. ''' params = { 'apiKey': self.apiKey, 'userHash': self.userHash, 'setID': setID } url = Client.ENDPOINT.format('getSet') returned = get(url, params=params) self.checkResponse(returned) # Put it into a Build class root = ET.fromstring(returned.text) v = [Build(i, self) for i in root] # Return to user try: return v[0] except IndexError: raise InvalidSetID('There is no set with the ID of `{}`.'.format(setID))
[ "def", "getSet", "(", "self", ",", "setID", ")", ":", "params", "=", "{", "'apiKey'", ":", "self", ".", "apiKey", ",", "'userHash'", ":", "self", ".", "userHash", ",", "'setID'", ":", "setID", "}", "url", "=", "Client", ".", "ENDPOINT", ".", "format"...
31.571429
19.642857
def _bound_waveform(wave, indep_min, indep_max): """Add independent variable vector bounds if they are not in vector.""" indep_min, indep_max = _validate_min_max(wave, indep_min, indep_max) indep_vector = copy.copy(wave._indep_vector) if ( isinstance(indep_min, float) or isinstance(indep_max, float) ) and indep_vector.dtype.name.startswith("int"): indep_vector = indep_vector.astype(float) min_pos = np.searchsorted(indep_vector, indep_min) if not np.isclose(indep_min, indep_vector[min_pos], FP_RTOL, FP_ATOL): indep_vector = np.insert(indep_vector, min_pos, indep_min) max_pos = np.searchsorted(indep_vector, indep_max) if not np.isclose(indep_max, indep_vector[max_pos], FP_RTOL, FP_ATOL): indep_vector = np.insert(indep_vector, max_pos, indep_max) dep_vector = _interp_dep_vector(wave, indep_vector) wave._indep_vector = indep_vector[min_pos : max_pos + 1] wave._dep_vector = dep_vector[min_pos : max_pos + 1]
[ "def", "_bound_waveform", "(", "wave", ",", "indep_min", ",", "indep_max", ")", ":", "indep_min", ",", "indep_max", "=", "_validate_min_max", "(", "wave", ",", "indep_min", ",", "indep_max", ")", "indep_vector", "=", "copy", ".", "copy", "(", "wave", ".", ...
57.588235
19.294118
def update_flags(self, idlist, flags): """ A thin back compat wrapper around build_update(flags=X) """ return self.update_bugs(idlist, self.build_update(flags=flags))
[ "def", "update_flags", "(", "self", ",", "idlist", ",", "flags", ")", ":", "return", "self", ".", "update_bugs", "(", "idlist", ",", "self", ".", "build_update", "(", "flags", "=", "flags", ")", ")" ]
38.8
11.2
def message_convert_rx(message_rx): """convert the message from the CANAL type to pythoncan type""" is_extended_id = bool(message_rx.flags & IS_ID_TYPE) is_remote_frame = bool(message_rx.flags & IS_REMOTE_FRAME) is_error_frame = bool(message_rx.flags & IS_ERROR_FRAME) return Message(timestamp=message_rx.timestamp, is_remote_frame=is_remote_frame, is_extended_id=is_extended_id, is_error_frame=is_error_frame, arbitration_id=message_rx.id, dlc=message_rx.sizeData, data=message_rx.data[:message_rx.sizeData])
[ "def", "message_convert_rx", "(", "message_rx", ")", ":", "is_extended_id", "=", "bool", "(", "message_rx", ".", "flags", "&", "IS_ID_TYPE", ")", "is_remote_frame", "=", "bool", "(", "message_rx", ".", "flags", "&", "IS_REMOTE_FRAME", ")", "is_error_frame", "=",...
48.615385
13.461538
def _connect(self): """ Connects the bot to the server and identifies itself. """ self.conn = self._create_connection() spawn(self.conn.connect) self.set_nick(self.nick) self.cmd(u'USER', u'{0} 3 * {1}'.format(self.nick, self.realname))
[ "def", "_connect", "(", "self", ")", ":", "self", ".", "conn", "=", "self", ".", "_create_connection", "(", ")", "spawn", "(", "self", ".", "conn", ".", "connect", ")", "self", ".", "set_nick", "(", "self", ".", "nick", ")", "self", ".", "cmd", "("...
35.625
12.125
def commits(self, drop_collections=True): """ Returns a table of git log data, with "commits" as rows/observations. :param bool drop_collections: Defaults to True. Indicates whether columns with lists/dicts/sets will be dropped. :return: pandas.DataFrame """ base_df = self._data if drop_collections is True: out_df = self._drop_collections(base_df) else: out_df = base_df return out_df
[ "def", "commits", "(", "self", ",", "drop_collections", "=", "True", ")", ":", "base_df", "=", "self", ".", "_data", "if", "drop_collections", "is", "True", ":", "out_df", "=", "self", ".", "_drop_collections", "(", "base_df", ")", "else", ":", "out_df", ...
33.642857
20.785714
def compose(*validators): """ Implement composition of validators. For instance >>> utf8_not_empty = compose(utf8, not_empty) """ def composed_validator(value): out = value for validator in reversed(validators): out = validator(out) return out composed_validator.__name__ = 'compose(%s)' % ','.join( val.__name__ for val in validators) return composed_validator
[ "def", "compose", "(", "*", "validators", ")", ":", "def", "composed_validator", "(", "value", ")", ":", "out", "=", "value", "for", "validator", "in", "reversed", "(", "validators", ")", ":", "out", "=", "validator", "(", "out", ")", "return", "out", ...
30.071429
12.357143
def get(self): """ Constructs a EngagementContextContext :returns: twilio.rest.studio.v1.flow.engagement.engagement_context.EngagementContextContext :rtype: twilio.rest.studio.v1.flow.engagement.engagement_context.EngagementContextContext """ return EngagementContextContext( self._version, flow_sid=self._solution['flow_sid'], engagement_sid=self._solution['engagement_sid'], )
[ "def", "get", "(", "self", ")", ":", "return", "EngagementContextContext", "(", "self", ".", "_version", ",", "flow_sid", "=", "self", ".", "_solution", "[", "'flow_sid'", "]", ",", "engagement_sid", "=", "self", ".", "_solution", "[", "'engagement_sid'", "]...
38.333333
21.666667
def foreground(color): """Set the foreground color.""" if color not in foreground_colors: return if is_win32: last_fg = foreground_colors[color][1] set_color_win32(last_fg | last_bg) else: set_color_ansi(foreground_colors[color][0])
[ "def", "foreground", "(", "color", ")", ":", "if", "color", "not", "in", "foreground_colors", ":", "return", "if", "is_win32", ":", "last_fg", "=", "foreground_colors", "[", "color", "]", "[", "1", "]", "set_color_win32", "(", "last_fg", "|", "last_bg", ")...
23.6
16.6
def crc32(filename): ''' Calculates the CRC checksum for a file. Using CRC32 because security isn't the issue and don't need perfect noncollisions. We just need to know if a file has changed. On my machine, crc32 was 20 times faster than any hashlib algorithm, including blake and md5 algorithms. ''' result = 0 with open(filename, 'rb') as fin: while True: chunk = fin.read(48) if len(chunk) == 0: break result = zlib.crc32(chunk, result) return result
[ "def", "crc32", "(", "filename", ")", ":", "result", "=", "0", "with", "open", "(", "filename", ",", "'rb'", ")", "as", "fin", ":", "while", "True", ":", "chunk", "=", "fin", ".", "read", "(", "48", ")", "if", "len", "(", "chunk", ")", "==", "0...
31.647059
19.411765
def on_result(self, task, result): '''Called every result''' if not result: return if 'taskid' in task and 'project' in task and 'url' in task: logger.info('result %s:%s %s -> %.30r' % ( task['project'], task['taskid'], task['url'], result)) return self.resultdb.save( project=task['project'], taskid=task['taskid'], url=task['url'], result=result ) else: logger.warning('result UNKNOW -> %.30r' % result) return
[ "def", "on_result", "(", "self", ",", "task", ",", "result", ")", ":", "if", "not", "result", ":", "return", "if", "'taskid'", "in", "task", "and", "'project'", "in", "task", "and", "'url'", "in", "task", ":", "logger", ".", "info", "(", "'result %s:%s...
36.3125
15.3125
def strip_label(mapper, connection, target): """Strip labels at ORM level so the unique=True means something.""" if target.label is not None: target.label = target.label.strip()
[ "def", "strip_label", "(", "mapper", ",", "connection", ",", "target", ")", ":", "if", "target", ".", "label", "is", "not", "None", ":", "target", ".", "label", "=", "target", ".", "label", ".", "strip", "(", ")" ]
47.5
3.75
def _check_for_life_signs(self): """Check Connection for life signs. First check if any data has been sent, if not send a heartbeat to the remote server. If we have not received any data what so ever within two intervals, we need to raise an exception so that we can close the connection. :rtype: bool """ if not self._running.is_set(): return False if self._writes_since_check == 0: self.send_heartbeat_impl() self._lock.acquire() try: if self._reads_since_check == 0: self._threshold += 1 if self._threshold >= 2: self._running.clear() self._raise_or_append_exception() return False else: self._threshold = 0 finally: self._reads_since_check = 0 self._writes_since_check = 0 self._lock.release() return self._start_new_timer()
[ "def", "_check_for_life_signs", "(", "self", ")", ":", "if", "not", "self", ".", "_running", ".", "is_set", "(", ")", ":", "return", "False", "if", "self", ".", "_writes_since_check", "==", "0", ":", "self", ".", "send_heartbeat_impl", "(", ")", "self", ...
32.03125
13.90625
def _insert_vars(self, path: str, data: dict) -> str: """Inserts variables into the ESI URL path. Args: path: raw ESI URL path data: data to insert into the URL Returns: path with variables filled """ data = data.copy() while True: match = re.search(self.VAR_REPLACE_REGEX, path) if not match: return path replace_from = match.group(0) replace_with = str(data.get(match.group(1))) path = path.replace(replace_from, replace_with)
[ "def", "_insert_vars", "(", "self", ",", "path", ":", "str", ",", "data", ":", "dict", ")", "->", "str", ":", "data", "=", "data", ".", "copy", "(", ")", "while", "True", ":", "match", "=", "re", ".", "search", "(", "self", ".", "VAR_REPLACE_REGEX"...
31.833333
15.277778
def do_dump(self, arg): ''' Output all bytes waiting in output queue. ''' if not self.arm.is_connected(): print(self.style.error('Error: ', 'Arm is not connected.')) return print(self.arm.dump())
[ "def", "do_dump", "(", "self", ",", "arg", ")", ":", "if", "not", "self", ".", "arm", ".", "is_connected", "(", ")", ":", "print", "(", "self", ".", "style", ".", "error", "(", "'Error: '", ",", "'Arm is not connected.'", ")", ")", "return", "print", ...
39.666667
16.333333
def flash_progress_callback(action, progress_string, percentage): """Callback that can be used with ``JLink.flash()``. This callback generates a progress bar in the console to show the progress of each of the steps of the flash. Args: action (str): the current action being invoked progress_string (str): the current step in the progress percentage (int): the percent to which the current step has been done Returns: ``None`` Note: This function ignores the compare action. """ if action.lower() != 'compare': return progress_bar(min(100, percentage), 100, prefix=action) return None
[ "def", "flash_progress_callback", "(", "action", ",", "progress_string", ",", "percentage", ")", ":", "if", "action", ".", "lower", "(", ")", "!=", "'compare'", ":", "return", "progress_bar", "(", "min", "(", "100", ",", "percentage", ")", ",", "100", ",",...
30.571429
24.52381
def init(self, dict_or_str, val=None, warn=True): """initialize one or several options. Arguments --------- `dict_or_str` a dictionary if ``val is None``, otherwise a key. If `val` is provided `dict_or_str` must be a valid key. `val` value for key Details ------- Only known keys are accepted. Known keys are in `CMAOptions.defaults()` """ # dic = dict_or_key if val is None else {dict_or_key:val} self.check(dict_or_str) dic = dict_or_str if val is not None: dic = {dict_or_str:val} for key, val in dic.items(): key = self.corrected_key(key) if key not in CMAOptions.defaults(): # TODO: find a better solution? if warn: print('Warning in cma.CMAOptions.init(): key ' + str(key) + ' ignored') else: self[key] = val return self
[ "def", "init", "(", "self", ",", "dict_or_str", ",", "val", "=", "None", ",", "warn", "=", "True", ")", ":", "# dic = dict_or_key if val is None else {dict_or_key:val}", "self", ".", "check", "(", "dict_or_str", ")", "dic", "=", "dict_or_str", "if", "val", "is...
30.757576
19.363636
def wrap(self, wrapper): """ Allows the underlying socket to be wrapped, as by an SSL connection. :param wrapper: A callable taking, as its first argument, a socket.socket object. The callable must return a valid proxy for the socket.socket object, which will subsequently be used to communicate on the connection. Note: Be extremely careful with calling this method after the TCP connection has been initiated. The action of this method affects both sending and receiving streams simultaneously, and no attempt is made to deal with buffered data, other than ensuring that both the sending and receiving threads are at stopping points. """ if self._recv_thread and self._send_thread: # Have to suspend the send/recv threads self._recv_lock.acquire() self._send_lock.acquire() # Wrap the socket self._sock = wrapper(self._sock) # OK, restart the send/recv threads if self._recv_thread and self._send_thread: # Release our locks self._send_lock.release() self._recv_lock.release()
[ "def", "wrap", "(", "self", ",", "wrapper", ")", ":", "if", "self", ".", "_recv_thread", "and", "self", ".", "_send_thread", ":", "# Have to suspend the send/recv threads", "self", ".", "_recv_lock", ".", "acquire", "(", ")", "self", ".", "_send_lock", ".", ...
39.0625
18.9375
def parse_elem(element): """Parse a OSM node XML element. Args: element (etree.Element): XML Element to parse Returns: Node: Object representing parsed element """ ident = int(element.get('id')) latitude = element.get('lat') longitude = element.get('lon') flags = _parse_flags(element) return Node(ident, latitude, longitude, *flags)
[ "def", "parse_elem", "(", "element", ")", ":", "ident", "=", "int", "(", "element", ".", "get", "(", "'id'", ")", ")", "latitude", "=", "element", ".", "get", "(", "'lat'", ")", "longitude", "=", "element", ".", "get", "(", "'lon'", ")", "flags", "...
26.125
17.5625
def clear_cache_delete_selected(modeladmin, request, queryset): """ A delete action that will invalidate cache after being called. """ result = delete_selected(modeladmin, request, queryset) # A result of None means that the delete happened. if not result and hasattr(modeladmin, 'invalidate_cache'): modeladmin.invalidate_cache(queryset=queryset) return result
[ "def", "clear_cache_delete_selected", "(", "modeladmin", ",", "request", ",", "queryset", ")", ":", "result", "=", "delete_selected", "(", "modeladmin", ",", "request", ",", "queryset", ")", "# A result of None means that the delete happened.", "if", "not", "result", ...
35.363636
20.090909
def service_remove(path, service_name): ''' Remove the definition of a docker-compose service This does not rm the container This wil re-write your yaml file. Comments will be lost. Indentation is set to 2 spaces path Path where the docker-compose file is stored on the server service_name Name of the service to remove CLI Example: .. code-block:: bash salt myminion dockercompose.service_remove /path/where/docker-compose/stored service_name ''' compose_result, err = __load_docker_compose(path) if err: return err services = compose_result['compose_content']['services'] if service_name not in services: return __standardize_result(False, 'Service {0} did not exists'.format(service_name), None, None) del services[service_name] return __dump_compose_file(path, compose_result, 'Service {0} is removed from {1}'.format(service_name, path), already_existed=True)
[ "def", "service_remove", "(", "path", ",", "service_name", ")", ":", "compose_result", ",", "err", "=", "__load_docker_compose", "(", "path", ")", "if", "err", ":", "return", "err", "services", "=", "compose_result", "[", "'compose_content'", "]", "[", "'servi...
37.034483
24.413793
def _generate_limit_items(lower, upper): """Yield key, value pairs for limits dictionary. Yield pairs of key, value where key is ``lower``, ``upper`` or ``fixed``. A key, value pair is emitted if the bounds are not None. """ # Use value + 0 to convert any -0.0 to 0.0 which looks better. if lower is not None and upper is not None and lower == upper: yield 'fixed', upper + 0 else: if lower is not None: yield 'lower', lower + 0 if upper is not None: yield 'upper', upper + 0
[ "def", "_generate_limit_items", "(", "lower", ",", "upper", ")", ":", "# Use value + 0 to convert any -0.0 to 0.0 which looks better.", "if", "lower", "is", "not", "None", "and", "upper", "is", "not", "None", "and", "lower", "==", "upper", ":", "yield", "'fixed'", ...
38.5
15.571429
def schedules(self): ''' Returns details of the posting schedules associated with a social media profile. ''' url = PATHS['GET_SCHEDULES'] % self.id self.__schedules = self.api.get(url=url) return self.__schedules
[ "def", "schedules", "(", "self", ")", ":", "url", "=", "PATHS", "[", "'GET_SCHEDULES'", "]", "%", "self", ".", "id", "self", ".", "__schedules", "=", "self", ".", "api", ".", "get", "(", "url", "=", "url", ")", "return", "self", ".", "__schedules" ]
21.636364
26.181818
def close_spider(self, _spider): """ Write out to file """ self.df['date_download'] = pd.to_datetime( self.df['date_download'], errors='coerce', infer_datetime_format=True ) self.df['date_modify'] = pd.to_datetime( self.df['date_modify'], errors='coerce', infer_datetime_format=True ) self.df['date_publish'] = pd.to_datetime( self.df['date_publish'], errors='coerce', infer_datetime_format=True ) self.df.to_pickle(self.full_path) self.log.info("Wrote to Pandas to %s", self.full_path)
[ "def", "close_spider", "(", "self", ",", "_spider", ")", ":", "self", ".", "df", "[", "'date_download'", "]", "=", "pd", ".", "to_datetime", "(", "self", ".", "df", "[", "'date_download'", "]", ",", "errors", "=", "'coerce'", ",", "infer_datetime_format", ...
39.733333
19.066667
def get_values(self, context_type): """ Get the values valid on this line. :param context_type: "ENV" or "LABEL" :return: values of given type valid on this line """ if context_type.upper() == "ENV": return self.envs elif context_type.upper() == "LABEL": return self.labels
[ "def", "get_values", "(", "self", ",", "context_type", ")", ":", "if", "context_type", ".", "upper", "(", ")", "==", "\"ENV\"", ":", "return", "self", ".", "envs", "elif", "context_type", ".", "upper", "(", ")", "==", "\"LABEL\"", ":", "return", "self", ...
31.272727
8.727273
def _GetStat(self): """Retrieves information about the file entry. Returns: VFSStat: a stat object. """ stat_object = super(FakeFileEntry, self)._GetStat() location = getattr(self.path_spec, 'location', None) if location: file_data = self._file_system.GetDataByPath(location) if file_data is not None: stat_object.size = len(file_data) return stat_object
[ "def", "_GetStat", "(", "self", ")", ":", "stat_object", "=", "super", "(", "FakeFileEntry", ",", "self", ")", ".", "_GetStat", "(", ")", "location", "=", "getattr", "(", "self", ".", "path_spec", ",", "'location'", ",", "None", ")", "if", "location", ...
24.8125
20.125
def __parse_enabled_plugins(self): """ :returns: [(plugin_name, plugin_package, plugin_config), ...] :rtype: list of tuple """ return [ ( plugin_name, plugin['package'], plugin) for plugin_name, plugin in self.raw_config_dict.items() if ( plugin_name not in self.BASE_SCHEMA.keys()) and isinstance( plugin, dict) and plugin.get('enabled')]
[ "def", "__parse_enabled_plugins", "(", "self", ")", ":", "return", "[", "(", "plugin_name", ",", "plugin", "[", "'package'", "]", ",", "plugin", ")", "for", "plugin_name", ",", "plugin", "in", "self", ".", "raw_config_dict", ".", "items", "(", ")", "if", ...
34.714286
13.571429
def cmd_link_ports(self): '''show available ports''' ports = mavutil.auto_detect_serial(preferred_list=['*FTDI*',"*Arduino_Mega_2560*", "*3D_Robotics*", "*USB_to_UART*", '*PX4*', '*FMU*']) for p in ports: print("%s : %s : %s" % (p.device, p.description, p.hwid))
[ "def", "cmd_link_ports", "(", "self", ")", ":", "ports", "=", "mavutil", ".", "auto_detect_serial", "(", "preferred_list", "=", "[", "'*FTDI*'", ",", "\"*Arduino_Mega_2560*\"", ",", "\"*3D_Robotics*\"", ",", "\"*USB_to_UART*\"", ",", "'*PX4*'", ",", "'*FMU*'", "]"...
58.8
34
def sync_time(self): """Sets the time on the pyboard to match the time on the host.""" now = time.localtime(time.time()) self.remote(set_time, (now.tm_year, now.tm_mon, now.tm_mday, now.tm_wday + 1, now.tm_hour, now.tm_min, now.tm_sec, 0)) return now
[ "def", "sync_time", "(", "self", ")", ":", "now", "=", "time", ".", "localtime", "(", "time", ".", "time", "(", ")", ")", "self", ".", "remote", "(", "set_time", ",", "(", "now", ".", "tm_year", ",", "now", ".", "tm_mon", ",", "now", ".", "tm_mda...
51.333333
19.833333
def all_subclasses(cls): """ Recursively generate of all the subclasses of class cls. """ for subclass in cls.__subclasses__(): yield subclass for subc in all_subclasses(subclass): yield subc
[ "def", "all_subclasses", "(", "cls", ")", ":", "for", "subclass", "in", "cls", ".", "__subclasses__", "(", ")", ":", "yield", "subclass", "for", "subc", "in", "all_subclasses", "(", "subclass", ")", ":", "yield", "subc" ]
37
9.666667
def dragMoveEvent( self, event ): """ Handles the drag move event. :param event | <QDragEvent> """ tags = nativestring(event.mimeData().text()) if ( event.source() == self ): event.acceptProposedAction() elif ( tags ): event.acceptProposedAction() else: super(XMultiTagEdit, self).dragMoveEvent(event)
[ "def", "dragMoveEvent", "(", "self", ",", "event", ")", ":", "tags", "=", "nativestring", "(", "event", ".", "mimeData", "(", ")", ".", "text", "(", ")", ")", "if", "(", "event", ".", "source", "(", ")", "==", "self", ")", ":", "event", ".", "acc...
29.857143
10.714286
def remove(self, item): """ Transactional implementation of :func:`List.remove(item) <hazelcast.proxy.list.List.remove>` :param item: (object), the specified item to be removed. :return: (bool), ``true`` if the item is removed successfully, ``false`` otherwise. """ check_not_none(item, "item can't be none") return self._encode_invoke(transactional_list_remove_codec, item=self._to_data(item))
[ "def", "remove", "(", "self", ",", "item", ")", ":", "check_not_none", "(", "item", ",", "\"item can't be none\"", ")", "return", "self", ".", "_encode_invoke", "(", "transactional_list_remove_codec", ",", "item", "=", "self", ".", "_to_data", "(", "item", ")"...
49.222222
28.333333
def run(self, stat_name, criticity, commands, repeat, mustache_dict=None): """Run the commands (in background). - stats_name: plugin_name (+ header) - criticity: criticity of the trigger - commands: a list of command line with optional {{mustache}} - If True, then repeat the action - mustache_dict: Plugin stats (can be use within {{mustache}}) Return True if the commands have been ran. """ if (self.get(stat_name) == criticity and not repeat) or \ not self.start_timer.finished(): # Action already executed => Exit return False logger.debug("{} action {} for {} ({}) with stats {}".format( "Repeat" if repeat else "Run", commands, stat_name, criticity, mustache_dict)) # Run all actions in background for cmd in commands: # Replace {{arg}} by the dict one (Thk to {Mustache}) if pystache_tag: cmd_full = pystache.render(cmd, mustache_dict) else: cmd_full = cmd # Execute the action logger.info("Action triggered for {} ({}): {}".format(stat_name, criticity, cmd_full)) logger.debug("Stats value for the trigger: {}".format( mustache_dict)) try: Popen(cmd_full, shell=True) except OSError as e: logger.error("Can't execute the action ({})".format(e)) self.set(stat_name, criticity) return True
[ "def", "run", "(", "self", ",", "stat_name", ",", "criticity", ",", "commands", ",", "repeat", ",", "mustache_dict", "=", "None", ")", ":", "if", "(", "self", ".", "get", "(", "stat_name", ")", "==", "criticity", "and", "not", "repeat", ")", "or", "n...
40
19.439024
def color_key(tkey): """ Function which returns a colorized TKey name given its type """ name = tkey.GetName() classname = tkey.GetClassName() for class_regex, color in _COLOR_MATCHER: if class_regex.match(classname): return colored(name, color=color) return name
[ "def", "color_key", "(", "tkey", ")", ":", "name", "=", "tkey", ".", "GetName", "(", ")", "classname", "=", "tkey", ".", "GetClassName", "(", ")", "for", "class_regex", ",", "color", "in", "_COLOR_MATCHER", ":", "if", "class_regex", ".", "match", "(", ...
30.2
9.8
def _GetStat(self): """Retrieves information about the file entry. Returns: VFSStat: a stat object. """ stat_object = super(FVDEFileEntry, self)._GetStat() stat_object.size = self._fvde_volume.get_size() return stat_object
[ "def", "_GetStat", "(", "self", ")", ":", "stat_object", "=", "super", "(", "FVDEFileEntry", ",", "self", ")", ".", "_GetStat", "(", ")", "stat_object", ".", "size", "=", "self", ".", "_fvde_volume", ".", "get_size", "(", ")", "return", "stat_object" ]
22.272727
20.363636
def get_status(self, json_status=None): """ Returns status of for json """ if json_status: self.json_status = json_status if self.json_status not in AjaxResponseStatus.choices: raise ValueError( "Invalid status selected: '{}'".format(self.json_status)) return self.json_status
[ "def", "get_status", "(", "self", ",", "json_status", "=", "None", ")", ":", "if", "json_status", ":", "self", ".", "json_status", "=", "json_status", "if", "self", ".", "json_status", "not", "in", "AjaxResponseStatus", ".", "choices", ":", "raise", "ValueEr...
34.1
17.5
def get_pip_requirement_set(self, arguments, use_remote_index, use_wheels=False): """ Get the unpacked requirement(s) specified by the caller by running pip. :param arguments: The command line arguments to ``pip install ...`` (a list of strings). :param use_remote_index: A boolean indicating whether pip is allowed to connect to the main package index (http://pypi.python.org by default). :param use_wheels: Whether pip and pip-accel are allowed to use wheels_ (:data:`False` by default for backwards compatibility with callers that use pip-accel as a Python API). :returns: A :class:`pip.req.RequirementSet` object created by pip. :raises: Any exceptions raised by pip. """ # Compose the pip command line arguments. This is where a lot of the # core logic of pip-accel is hidden and it uses some esoteric features # of pip so this method is heavily commented. command_line = [] # Use `--download' to instruct pip to download requirement(s) into # pip-accel's local source distribution index directory. This has the # following documented side effects (see `pip install --help'): # 1. It disables the installation of requirements (without using the # `--no-install' option which is deprecated and slated for removal # in pip 7.x). # 2. It ignores requirements that are already installed (because # pip-accel doesn't actually need to re-install requirements that # are already installed we will have work around this later, but # that seems fairly simple to do). command_line.append('--download=%s' % self.config.source_index) # Use `--find-links' to point pip at pip-accel's local source # distribution index directory. This ensures that source distribution # archives are never downloaded more than once (regardless of the HTTP # cache that was introduced in pip 6.x). command_line.append('--find-links=%s' % create_file_url(self.config.source_index)) # Use `--no-binary=:all:' to ignore wheel distributions by default in # order to preserve backwards compatibility with callers that expect a # requirement set consisting only of source distributions that can be # converted to `dumb binary distributions'. if not use_wheels and self.arguments_allow_wheels(arguments): command_line.append('--no-binary=:all:') # Use `--no-index' to force pip to only consider source distribution # archives contained in pip-accel's local source distribution index # directory. This enables pip-accel to ask pip "Can the local source # distribution index satisfy all requirements in the given requirement # set?" which enables pip-accel to keep pip off the internet unless # absolutely necessary :-). if not use_remote_index: command_line.append('--no-index') # Use `--no-clean' to instruct pip to unpack the source distribution # archives and *not* clean up the unpacked source distributions # afterwards. This enables pip-accel to replace pip's installation # logic with cached binary distribution archives. command_line.append('--no-clean') # Use `--build-directory' to instruct pip to unpack the source # distribution archives to a temporary directory managed by pip-accel. # We will clean up the build directory when we're done using the # unpacked source distributions. command_line.append('--build-directory=%s' % self.build_directory) # Append the user's `pip install ...' arguments to the command line # that we just assembled. command_line.extend(arguments) logger.info("Executing command: pip install %s", ' '.join(command_line)) # Clear the build directory to prevent PreviousBuildDirError exceptions. self.clear_build_directory() # During the pip 6.x upgrade pip-accel switched to using `pip install # --download' which can produce an interactive prompt as described in # issue 51 [1]. The documented way [2] to get rid of this interactive # prompt is pip's --exists-action option, but due to what is most # likely a bug in pip this doesn't actually work. The environment # variable $PIP_EXISTS_ACTION does work however, so if the user didn't # set it we will set a reasonable default for them. # [1] https://github.com/paylogic/pip-accel/issues/51 # [2] https://pip.pypa.io/en/latest/reference/pip.html#exists-action-option os.environ.setdefault('PIP_EXISTS_ACTION', 'w') # Initialize and run the `pip install' command. command = InstallCommand() opts, args = command.parse_args(command_line) if not opts.ignore_installed: # If the user didn't supply the -I, --ignore-installed option we # will forcefully disable the option. Refer to the documentation of # the AttributeOverrides class for further details. opts = AttributeOverrides(opts, ignore_installed=False) requirement_set = command.run(opts, args) # Make sure the output of pip and pip-accel are not intermingled. sys.stdout.flush() if requirement_set is None: raise NothingToDoError(""" pip didn't generate a requirement set, most likely you specified an empty requirements file? """) else: return self.transform_pip_requirement_set(requirement_set)
[ "def", "get_pip_requirement_set", "(", "self", ",", "arguments", ",", "use_remote_index", ",", "use_wheels", "=", "False", ")", ":", "# Compose the pip command line arguments. This is where a lot of the", "# core logic of pip-accel is hidden and it uses some esoteric features", "# of...
61.537634
25.666667
def format_timestamp(t): """Cast given object to a Timestamp and return a nicely formatted string""" # Timestamp is only valid for 1678 to 2262 try: datetime_str = str(pd.Timestamp(t)) except OutOfBoundsDatetime: datetime_str = str(t) try: date_str, time_str = datetime_str.split() except ValueError: # catch NaT and others that don't split nicely return datetime_str else: if time_str == '00:00:00': return date_str else: return '{}T{}'.format(date_str, time_str)
[ "def", "format_timestamp", "(", "t", ")", ":", "# Timestamp is only valid for 1678 to 2262", "try", ":", "datetime_str", "=", "str", "(", "pd", ".", "Timestamp", "(", "t", ")", ")", "except", "OutOfBoundsDatetime", ":", "datetime_str", "=", "str", "(", "t", ")...
30.888889
16.277778
def add(self, name, graph): """ Index and add a :ref:`networkx.Graph <networkx:graph>` to the :class:`.GraphCollection`. Parameters ---------- name : hashable Unique name used to identify the `graph`. graph : :ref:`networkx.Graph <networkx:graph>` Raises ------ ValueError If `name` has already been used in this :class:`.GraphCollection`\. """ if name in self: raise ValueError("{0} exists in this GraphCollection".format(name)) elif hasattr(self, unicode(name)): raise ValueError("Name conflicts with an existing attribute") indexed_graph = self.index(name, graph) # Add all edges to the `master_graph`. for s, t, attrs in indexed_graph.edges(data=True): attrs.update({'graph': name}) self.master_graph.add_edge(s, t, **attrs) # Add all node attributes to the `master_graph`. for n, attrs in indexed_graph.nodes(data=True): for k,v in attrs.iteritems(): if k not in self.master_graph.node[n]: self.master_graph.node[n][k] = {} self.master_graph.node[n][k][name] = v dict.__setitem__(self, name, indexed_graph)
[ "def", "add", "(", "self", ",", "name", ",", "graph", ")", ":", "if", "name", "in", "self", ":", "raise", "ValueError", "(", "\"{0} exists in this GraphCollection\"", ".", "format", "(", "name", ")", ")", "elif", "hasattr", "(", "self", ",", "unicode", "...
35.277778
19.666667
def _get_model_param_names(cls): r"""Get parameter names for the model""" # fetch model parameters if hasattr(cls, 'set_model_params'): # introspect the constructor arguments to find the model parameters # to represent args, varargs, kw, default = getargspec_no_self(cls.set_model_params) if varargs is not None: raise RuntimeError("PyEMMA models should always specify their parameters in the signature" " of their set_model_params (no varargs). %s doesn't follow this convention." % (cls,)) return args else: # No parameters known return []
[ "def", "_get_model_param_names", "(", "cls", ")", ":", "# fetch model parameters", "if", "hasattr", "(", "cls", ",", "'set_model_params'", ")", ":", "# introspect the constructor arguments to find the model parameters", "# to represent", "args", ",", "varargs", ",", "kw", ...
48.6
22.066667
def until_synced(self, timeout=None): """Return a tornado Future; resolves when all subordinate clients are synced""" futures = [r.until_synced(timeout) for r in dict.values(self.children)] yield tornado.gen.multi(futures, quiet_exceptions=tornado.gen.TimeoutError)
[ "def", "until_synced", "(", "self", ",", "timeout", "=", "None", ")", ":", "futures", "=", "[", "r", ".", "until_synced", "(", "timeout", ")", "for", "r", "in", "dict", ".", "values", "(", "self", ".", "children", ")", "]", "yield", "tornado", ".", ...
71.5
21.25
def get_batch(self, batch_id): """ Check to see if the requested batch_id is in the current chain. If so, find the batch with the batch_id and return it. This is done by finding the block and searching for the batch. :param batch_id (string): The id of the batch requested. :return: The batch with the batch_id. """ payload = self._get_data_by_id(batch_id, 'commit_store_get_batch') batch = Batch() batch.ParseFromString(payload) return batch
[ "def", "get_batch", "(", "self", ",", "batch_id", ")", ":", "payload", "=", "self", ".", "_get_data_by_id", "(", "batch_id", ",", "'commit_store_get_batch'", ")", "batch", "=", "Batch", "(", ")", "batch", ".", "ParseFromString", "(", "payload", ")", "return"...
30.941176
22.235294
def set_or_clear_breakpoint(self): """Set/clear breakpoint""" if self.data: editor = self.get_current_editor() editor.debugger.toogle_breakpoint()
[ "def", "set_or_clear_breakpoint", "(", "self", ")", ":", "if", "self", ".", "data", ":", "editor", "=", "self", ".", "get_current_editor", "(", ")", "editor", ".", "debugger", ".", "toogle_breakpoint", "(", ")" ]
37.2
7.4
def update_repository(self, new_repository_info, repository_id, project=None): """UpdateRepository. [Preview API] Updates the Git repository with either a new repo name or a new default branch. :param :class:`<GitRepository> <azure.devops.v5_1.git.models.GitRepository>` new_repository_info: Specify a new repo name or a new default branch of the repository :param str repository_id: The name or ID of the repository. :param str project: Project ID or project name :rtype: :class:`<GitRepository> <azure.devops.v5_1.git.models.GitRepository>` """ route_values = {} if project is not None: route_values['project'] = self._serialize.url('project', project, 'str') if repository_id is not None: route_values['repositoryId'] = self._serialize.url('repository_id', repository_id, 'str') content = self._serialize.body(new_repository_info, 'GitRepository') response = self._send(http_method='PATCH', location_id='225f7195-f9c7-4d14-ab28-a83f7ff77e1f', version='5.1-preview.1', route_values=route_values, content=content) return self._deserialize('GitRepository', response)
[ "def", "update_repository", "(", "self", ",", "new_repository_info", ",", "repository_id", ",", "project", "=", "None", ")", ":", "route_values", "=", "{", "}", "if", "project", "is", "not", "None", ":", "route_values", "[", "'project'", "]", "=", "self", ...
64.75
29.5
def crypto_sign(message, sk): """ Signs the message ``message`` using the secret key ``sk`` and returns the signed message. :param message: bytes :param sk: bytes :rtype: bytes """ signed = ffi.new("unsigned char[]", len(message) + crypto_sign_BYTES) signed_len = ffi.new("unsigned long long *") rc = lib.crypto_sign(signed, signed_len, message, len(message), sk) ensure(rc == 0, 'Unexpected library error', raising=exc.RuntimeError) return ffi.buffer(signed, signed_len[0])[:]
[ "def", "crypto_sign", "(", "message", ",", "sk", ")", ":", "signed", "=", "ffi", ".", "new", "(", "\"unsigned char[]\"", ",", "len", "(", "message", ")", "+", "crypto_sign_BYTES", ")", "signed_len", "=", "ffi", ".", "new", "(", "\"unsigned long long *\"", ...
29.611111
19.611111
def h_boiling_Huang_Sheer(rhol, rhog, mul, kl, Hvap, sigma, Cpl, q, Tsat, angle=35.): r'''Calculates the two-phase boiling heat transfer coefficient of a liquid and gas flowing inside a plate and frame heat exchanger, as developed in [1]_ and again in the thesis [2]_. Depends on the properties of the fluid and not the heat exchanger's geometry. .. math:: h = 1.87\times10^{-3}\left(\frac{k_l}{d_o}\right)\left(\frac{q d_o} {k_l T_{sat}}\right)^{0.56} \left(\frac{H_{vap} d_o^2}{\alpha_l^2}\right)^{0.31} Pr_l^{0.33} d_o = 0.0146\theta\left[\frac{2\sigma}{g(\rho_l-\rho_g)}\right]^{0.5}\\ \theta = 35^\circ Note that this model depends on the specific heat flux involved and the saturation temperature of the fluid. Parameters ---------- rhol : float Density of the liquid [kg/m^3] rhog : float Density of the gas [kg/m^3] mul : float Viscosity of the liquid [Pa*s] kl : float Thermal conductivity of liquid [W/m/K] Hvap : float Heat of vaporization of the fluid at the system pressure, [J/kg] sigma : float Surface tension of liquid [N/m] Cpl : float Heat capacity of liquid [J/kg/K] q : float Heat flux, [W/m^2] Tsat : float Actual saturation temperature of the fluid at the system pressure, [K] angle : float, optional Contact angle of the bubbles with the wall, assumed 35 for refrigerants in the development of the correlation [degrees] Returns ------- h : float Boiling heat transfer coefficient [W/m^2/K] Notes ----- Developed with 222 data points for R134a and R507A with only two of them for ammonia and R12. Chevron angles ranged from 28 to 60 degrees, heat fluxes from 1.85 kW/m^2 to 10.75 kW/m^2, mass fluxes 5.6 to 52.25 kg/m^2/s, qualities from 0.21 to 0.95, and saturation temperatures in degrees Celcius of 1.9 to 13.04. The inclusion of the saturation temperature makes this correlation have limited predictive power for other fluids whose saturation tempratures might be much higher or lower than those used in the development of the correlation. For this reason it should be regarded with caution. As first published in [1]_ a power of two was missing in the correlation for bubble diameter in the dimensionless group with a power of 0.31. That made the correlation non-dimensional. A second variant of this correlation was also published in [2]_ but with less accuracy because it was designed to mimick the standard pool boiling curve. The correlation is reviewed in [3]_, but without the corrected power. It was also changed there to use hydraulic diameter, not bubble diameter. It still ranked as one of the more accurate correlations reviewed. [4]_ also reviewed it without the corrected power but found it predicted the lowest results of those surveyed. Examples -------- >>> h_boiling_Huang_Sheer(rhol=567., rhog=18.09, kl=0.086, mul=156E-6, ... Hvap=9E5, sigma=0.02, Cpl=2200, q=1E4, Tsat=279.15) 4401.055635078054 References ---------- .. [1] Huang, Jianchang, Thomas J. Sheer, and Michael Bailey-McEwan. "Heat Transfer and Pressure Drop in Plate Heat Exchanger Refrigerant Evaporators." International Journal of Refrigeration 35, no. 2 (March 2012): 325-35. doi:10.1016/j.ijrefrig.2011.11.002. .. [2] Huang, Jianchang. "Performance Analysis of Plate Heat Exchangers Used as Refrigerant Evaporators," 2011. Thesis. http://wiredspace.wits.ac.za/handle/10539/9779 .. [3] Amalfi, Raffaele L., Farzad Vakili-Farahani, and John R. Thome. "Flow Boiling and Frictional Pressure Gradients in Plate Heat Exchangers. Part 1: Review and Experimental Database." International Journal of Refrigeration 61 (January 2016): 166-84. doi:10.1016/j.ijrefrig.2015.07.010. .. [4] Eldeeb, Radia, Vikrant Aute, and Reinhard Radermacher. "A Survey of Correlations for Heat Transfer and Pressure Drop for Evaporation and Condensation in Plate Heat Exchangers." International Journal of Refrigeration 65 (May 2016): 12-26. doi:10.1016/j.ijrefrig.2015.11.013. ''' do = 0.0146*angle*(2.*sigma/(g*(rhol - rhog)))**0.5 Prl = Prandtl(Cp=Cpl, mu=mul, k=kl) alpha_l = thermal_diffusivity(k=kl, rho=rhol, Cp=Cpl) h = 1.87E-3*(kl/do)*(q*do/(kl*Tsat))**0.56*(Hvap*do**2/alpha_l**2)**0.31*Prl**0.33 return h
[ "def", "h_boiling_Huang_Sheer", "(", "rhol", ",", "rhog", ",", "mul", ",", "kl", ",", "Hvap", ",", "sigma", ",", "Cpl", ",", "q", ",", "Tsat", ",", "angle", "=", "35.", ")", ":", "do", "=", "0.0146", "*", "angle", "*", "(", "2.", "*", "sigma", ...
43.711538
26.673077
def handle_has_members(self, _, __, tokens: ParseResults) -> ParseResults: """Handle list relations like ``p(X) hasMembers list(p(Y), p(Z), ...)``.""" return self._handle_list_helper(tokens, HAS_MEMBER)
[ "def", "handle_has_members", "(", "self", ",", "_", ",", "__", ",", "tokens", ":", "ParseResults", ")", "->", "ParseResults", ":", "return", "self", ".", "_handle_list_helper", "(", "tokens", ",", "HAS_MEMBER", ")" ]
72
17.666667
def urljoin(base, url, allow_fragments=True): """Join a base URL and a possibly relative URL to form an absolute interpretation of the latter.""" if not base: return url if not url: return base bscheme, bnetloc, bpath, bparams, bquery, bfragment = \ urlparse(base, '', allow_fragments) scheme, netloc, path, params, query, fragment = \ urlparse(url, bscheme, allow_fragments) if scheme != bscheme or scheme not in uses_relative: return url if scheme in uses_netloc: if netloc: return urlunparse((scheme, netloc, path, params, query, fragment)) netloc = bnetloc if path[:1] == '/': return urlunparse((scheme, netloc, path, params, query, fragment)) if not path and not params: path = bpath params = bparams if not query: query = bquery return urlunparse((scheme, netloc, path, params, query, fragment)) segments = bpath.split('/')[:-1] + path.split('/') # XXX The stuff below is bogus in various ways... if segments[-1] == '.': segments[-1] = '' while '.' in segments: segments.remove('.') while 1: i = 1 n = len(segments) - 1 while i < n: if (segments[i] == '..' and segments[i-1] not in ('', '..')): del segments[i-1:i+1] break i = i+1 else: break if segments == ['', '..']: segments[-1] = '' elif len(segments) >= 2 and segments[-1] == '..': segments[-2:] = [''] return urlunparse((scheme, netloc, '/'.join(segments), params, query, fragment))
[ "def", "urljoin", "(", "base", ",", "url", ",", "allow_fragments", "=", "True", ")", ":", "if", "not", "base", ":", "return", "url", "if", "not", "url", ":", "return", "base", "bscheme", ",", "bnetloc", ",", "bpath", ",", "bparams", ",", "bquery", ",...
34.333333
14.72549
def is_valid_combination(row): """ This is a filtering function. Filtering functions should return True if combination is valid and False otherwise. Test row that is passed here can be incomplete. To prevent search for unnecessary items filtering function is executed with found subset of data to validate it. """ n = len(row) if n > 1: # Brand Y does not support Windows 98 if "98" == row[1] and "Brand Y" == row[0]: return False # Brand X does not work with XP if "XP" == row[1] and "Brand X" == row[0]: return False if n > 4: # Contractors are billed in 30 min increments if "Contr." == row[3] and row[4] < 30: return False return True
[ "def", "is_valid_combination", "(", "row", ")", ":", "n", "=", "len", "(", "row", ")", "if", "n", ">", "1", ":", "# Brand Y does not support Windows 98", "if", "\"98\"", "==", "row", "[", "1", "]", "and", "\"Brand Y\"", "==", "row", "[", "0", "]", ":",...
27.62963
19.851852
def worker(): """ Initialize the distributed environment. """ import torch import torch.distributed as dist from torch.multiprocessing import Process import numpy as np print("Initializing distributed pytorch") os.environ['MASTER_ADDR'] = str(args.master_addr) os.environ['MASTER_PORT'] = str(args.master_port) # Use TCP backend. Gloo needs nightly, where it currently fails with # dist.init_process_group('gloo', rank=args.rank, # AttributeError: module 'torch.distributed' has no attribute 'init_process_group' dist.init_process_group('tcp', rank=args.rank, world_size=args.size) tensor = torch.ones(args.size_mb*250*1000)*(args.rank+1) time_list = [] outfile = 'out' if args.rank == 0 else '/dev/null' log = util.FileLogger(outfile) for i in range(args.iters): # print('before: rank ', args.rank, ' has data ', tensor[0]) start_time = time.perf_counter() if args.rank == 0: dist.send(tensor=tensor, dst=1) else: dist.recv(tensor=tensor, src=0) elapsed_time_ms = (time.perf_counter() - start_time)*1000 time_list.append(elapsed_time_ms) # print('after: rank ', args.rank, ' has data ', tensor[0]) rate = args.size_mb/(elapsed_time_ms/1000) log('%03d/%d added %d MBs in %.1f ms: %.2f MB/second' % (i, args.iters, args.size_mb, elapsed_time_ms, rate)) min = np.min(time_list) median = np.median(time_list) log(f"min: {min:8.2f}, median: {median:8.2f}, mean: {np.mean(time_list):8.2f}")
[ "def", "worker", "(", ")", ":", "import", "torch", "import", "torch", ".", "distributed", "as", "dist", "from", "torch", ".", "multiprocessing", "import", "Process", "import", "numpy", "as", "np", "print", "(", "\"Initializing distributed pytorch\"", ")", "os", ...
36.975
20.85
def _call_retry(self, force_retry): """Call request and retry up to max_attempts times (or none if self.max_attempts=1)""" last_exception = None for i in range(self.max_attempts): try: log.info("Calling %s %s" % (self.method, self.url)) response = self.requests_method( self.url, data=self.data, params=self.params, headers=self.headers, timeout=(self.connect_timeout, self.read_timeout), verify=self.verify_ssl, ) if response is None: log.warn("Got response None") if self._method_is_safe_to_retry(): delay = 0.5 + i * 0.5 log.info("Waiting %s sec and Retrying since call is a %s" % (delay, self.method)) time.sleep(delay) continue else: raise PyMacaronCoreException("Call %s %s returned empty response" % (self.method, self.url)) return response except Exception as e: last_exception = e retry = force_retry if isinstance(e, ReadTimeout): # Log enough to help debugging... log.warn("Got a ReadTimeout calling %s %s" % (self.method, self.url)) log.warn("Exception was: %s" % str(e)) resp = e.response if not resp: log.info("Requests error has no response.") # TODO: retry=True? Is it really safe? else: b = resp.content log.info("Requests has a response with content: " + pprint.pformat(b)) if self._method_is_safe_to_retry(): # It is safe to retry log.info("Retrying since call is a %s" % self.method) retry = True elif isinstance(e, ConnectTimeout): log.warn("Got a ConnectTimeout calling %s %s" % (self.method, self.url)) log.warn("Exception was: %s" % str(e)) # ConnectTimeouts are safe to retry whatever the call... retry = True if retry: continue else: raise e # max_attempts has been reached: propagate the last received Exception if not last_exception: last_exception = Exception("Reached max-attempts (%s). Giving up calling %s %s" % (self.max_attempts, self.method, self.url)) raise last_exception
[ "def", "_call_retry", "(", "self", ",", "force_retry", ")", ":", "last_exception", "=", "None", "for", "i", "in", "range", "(", "self", ".", "max_attempts", ")", ":", "try", ":", "log", ".", "info", "(", "\"Calling %s %s\"", "%", "(", "self", ".", "met...
42.84375
21.125
def get_group_details(self, group_url='', group_id=0): ''' a method to retrieve details about a meetup group :param group_url: string with meetup urlname of group :param group_id: int with meetup id for group :return: dictionary with group details inside [json] key group_details = self._reconstruct_group(**{}) ''' # https://www.meetup.com/meetup_api/docs/:urlname/#get title = '%s.get_group_details' % self.__class__.__name__ # validate inputs input_fields = { 'group_url': group_url, 'group_id': group_id } for key, value in input_fields.items(): if value: object_title = '%s(%s=%s)' % (title, key, str(value)) self.fields.validate(value, '.%s' % key, object_title) if not group_url and not group_id: raise IndexError('%s requires either a group_url or group_id argument.' % title) # construct request fields if group_id: url = '%s/2/groups?fields=last_event,next_event,join_info&group_id=%s' % (self.endpoint, group_id) else: url = '%s/%s?fields=last_event,next_event,join_info' % (self.endpoint, group_url) # send request group_details = self._get_request(url) # cosntruct method output if group_id: if 'results' in group_details['json'].keys(): if group_details['json']['results']: group_details['json'] = self._reconstruct_group(group_details['json']['results'][0]) else: group_details['json'] = self._reconstruct_group(group_details['json']) return group_details
[ "def", "get_group_details", "(", "self", ",", "group_url", "=", "''", ",", "group_id", "=", "0", ")", ":", "# https://www.meetup.com/meetup_api/docs/:urlname/#get\r", "title", "=", "'%s.get_group_details'", "%", "self", ".", "__class__", ".", "__name__", "# validate i...
37.777778
26.755556
def match(self, situation): """Accept a situation (input) and return a MatchSet containing the classifier rules whose conditions match the situation. If appropriate per the algorithm managing this classifier set, create new rules to ensure sufficient coverage of the possible actions. Usage: match_set = model.match(situation) Arguments: situation: The situation for which a match set is desired. Return: A MatchSet instance for the given situation, drawn from the classifier rules in this classifier set. """ # Find the conditions that match against the current situation, and # group them according to which action(s) they recommend. by_action = {} for condition, actions in self._population.items(): if not condition(situation): continue for action, rule in actions.items(): if action in by_action: by_action[action][condition] = rule else: by_action[action] = {condition: rule} # Construct the match set. match_set = MatchSet(self, situation, by_action) # If an insufficient number of actions are recommended, create some # new rules (condition/action pairs) until there are enough actions # being recommended. if self._algorithm.covering_is_required(match_set): # Ask the algorithm to provide a new classifier rule to add to # the population. rule = self._algorithm.cover(match_set) # Ensure that the condition provided by the algorithm does # indeed match the situation. If not, there is a bug in the # algorithm. assert rule.condition(situation) # Add the new classifier, getting back a list of the rule(s) # which had to be removed to make room for it. replaced = self.add(rule) # Remove the rules that were removed the population from the # action set, as well. Note that they may not appear in the # action set, in which case nothing is done. for replaced_rule in replaced: action = replaced_rule.action condition = replaced_rule.condition if action in by_action and condition in by_action[action]: del by_action[action][condition] if not by_action[action]: del by_action[action] # Add the new classifier to the action set. This is done after # the replaced rules are removed, just in case the algorithm # provided us with a rule that was already present and was # displaced. if rule.action not in by_action: by_action[rule.action] = {} by_action[rule.action][rule.condition] = rule # Reconstruct the match set with the modifications we just # made. match_set = MatchSet(self, situation, by_action) # Return the newly created match set. return match_set
[ "def", "match", "(", "self", ",", "situation", ")", ":", "# Find the conditions that match against the current situation, and", "# group them according to which action(s) they recommend.", "by_action", "=", "{", "}", "for", "condition", ",", "actions", "in", "self", ".", "_...
42.243243
21.662162
def get_requested_form(self, request): """Returns an instance of a form requested.""" flow_name = self.get_flow_name() flow_key = '%s_flow' % self.flow_type flow_enabled = self.enabled form_data = None if (flow_enabled and request.method == 'POST' and request.POST.get(flow_key, False) and request.POST[flow_key] == flow_name): form_data = request.POST form = self.init_form( form_data, widget_attrs=self.flow_args.get('widget_attrs', None), template=self.get_template_name(self.flow_args.get('template', None)) ) # Attach flow identifying field to differentiate among several possible forms. form.fields[flow_key] = forms.CharField(required=True, initial=flow_name, widget=forms.HiddenInput) form.flow_enabled = flow_enabled form.flow_disabled_text = self.disabled_text return form
[ "def", "get_requested_form", "(", "self", ",", "request", ")", ":", "flow_name", "=", "self", ".", "get_flow_name", "(", ")", "flow_key", "=", "'%s_flow'", "%", "self", ".", "flow_type", "flow_enabled", "=", "self", ".", "enabled", "form_data", "=", "None", ...
38
19.76
def img(self): '''return a cv image for the icon''' SlipThumbnail.img(self) if self.rotation: # rotate the image mat = cv2.getRotationMatrix2D((self.height//2, self.width//2), -self.rotation, 1.0) self._rotated = cv2.warpAffine(self._img, mat, (self.height, self.width)) else: self._rotated = self._img return self._rotated
[ "def", "img", "(", "self", ")", ":", "SlipThumbnail", ".", "img", "(", "self", ")", "if", "self", ".", "rotation", ":", "# rotate the image", "mat", "=", "cv2", ".", "getRotationMatrix2D", "(", "(", "self", ".", "height", "//", "2", ",", "self", ".", ...
36.545455
22.363636
def clear(self, *args): """ Clears the LED matrix with a single colour, default is black / off e.g. ap.clear() or ap.clear(r, g, b) or colour = (r, g, b) ap.clear(colour) """ black = (0, 0, 0) # default if len(args) == 0: colour = black elif len(args) == 1: colour = args[0] elif len(args) == 3: colour = args else: raise ValueError('Pixel arguments must be given as (r, g, b) or r, g, b') self.set_pixels([colour] * 64)
[ "def", "clear", "(", "self", ",", "*", "args", ")", ":", "black", "=", "(", "0", ",", "0", ",", "0", ")", "# default", "if", "len", "(", "args", ")", "==", "0", ":", "colour", "=", "black", "elif", "len", "(", "args", ")", "==", "1", ":", "...
23.75
20.416667
def annotate_event(ev, key, ts=None, namespace=None, **kwargs): """Add an annotation to an event.""" ann = {} if ts is None: ts = time.time() ann["ts"] = ts ann["key"] = key if namespace is None and "HUMILIS_ENVIRONMENT" in os.environ: namespace = "{}:{}:{}".format( os.environ.get("HUMILIS_ENVIRONMENT"), os.environ.get("HUMILIS_LAYER"), os.environ.get("HUMILIS_STAGE")) if namespace is not None: ann["namespace"] = namespace ann.update(kwargs) _humilis = ev.get("_humilis", {}) if not _humilis: ev["_humilis"] = {"annotation": [ann]} else: ev["_humilis"]["annotation"] = _humilis.get("annotation", []) # Clean up previous annotations with the same key delete_annotations(ev, key) ev["_humilis"]["annotation"].append(ann) return ev
[ "def", "annotate_event", "(", "ev", ",", "key", ",", "ts", "=", "None", ",", "namespace", "=", "None", ",", "*", "*", "kwargs", ")", ":", "ann", "=", "{", "}", "if", "ts", "is", "None", ":", "ts", "=", "time", ".", "time", "(", ")", "ann", "[...
32.961538
16.730769
def _initialize_distance_grid(self): """Initialize the distance grid by calls to _grid_dist.""" p = [self._grid_distance(i) for i in range(self.num_neurons)] return np.array(p)
[ "def", "_initialize_distance_grid", "(", "self", ")", ":", "p", "=", "[", "self", ".", "_grid_distance", "(", "i", ")", "for", "i", "in", "range", "(", "self", ".", "num_neurons", ")", "]", "return", "np", ".", "array", "(", "p", ")" ]
49.25
11.75
def _count_counters(self, counter): """Return all elements count from Counter """ if getattr(self, 'as_set', False): return len(set(counter)) else: return sum(counter.values())
[ "def", "_count_counters", "(", "self", ",", "counter", ")", ":", "if", "getattr", "(", "self", ",", "'as_set'", ",", "False", ")", ":", "return", "len", "(", "set", "(", "counter", ")", ")", "else", ":", "return", "sum", "(", "counter", ".", "values"...
32.285714
5.428571
def snow_dual(im, voxel_size=1, boundary_faces=['top', 'bottom', 'left', 'right', 'front', 'back'], marching_cubes_area=False): r""" Analyzes an image that has been partitioned into void and solid regions and extracts the void and solid phase geometry as well as network connectivity. Parameters ---------- im : ND-array Binary image in the Boolean form with True’s as void phase and False’s as solid phase. It can process the inverted configuration of the boolean image as well, but output labelling of phases will be inverted and solid phase properties will be assigned to void phase properties labels which will cause confusion while performing the simulation. voxel_size : scalar The resolution of the image, expressed as the length of one side of a voxel, so the volume of a voxel would be **voxel_size**-cubed. The default is 1, which is useful when overlaying the PNM on the original image since the scale of the image is alway 1 unit lenth per voxel. boundary_faces : list of strings Boundary faces labels are provided to assign hypothetical boundary nodes having zero resistance to transport process. For cubical geometry, the user can choose ‘left’, ‘right’, ‘top’, ‘bottom’, ‘front’ and ‘back’ face labels to assign boundary nodes. If no label is assigned then all six faces will be selected as boundary nodes automatically which can be trimmed later on based on user requirements. marching_cubes_area : bool If ``True`` then the surface area and interfacial area between regions will be using the marching cube algorithm. This is a more accurate representation of area in extracted network, but is quite slow, so it is ``False`` by default. The default method simply counts voxels so does not correctly account for the voxelated nature of the images. Returns ------- A dictionary containing all the void and solid phase size data, as well as the network topological information. The dictionary names use the OpenPNM convention (i.e. 'pore.coords', 'throat.conns') so it may be converted directly to an OpenPNM network object using the ``update`` command. * ``net``: A dictionary containing all the void and solid phase size data, as well as the network topological information. The dictionary names use the OpenPNM convention (i.e. 'pore.coords', 'throat.conns') so it may be converted directly to an OpenPNM network object using the ``update`` command. * ``im``: The binary image of the void space * ``dt``: The combined distance transform of the image * ``regions``: The void and solid space partitioned into pores and solids phases using a marker based watershed with the peaks found by the SNOW Algorithm. References ---------- [1] Gostick, J. "A versatile and efficient network extraction algorithm using marker-based watershed segmenation". Phys. Rev. E 96, 023307 (2017) [2] Khan, ZA et al. "Dual network extraction algorithm to investigate multiple transport processes in porous materials: Image-based modeling of pore and grain-scale processes. Computers and Chemical Engineering. 123(6), 64-77 (2019) """ # ------------------------------------------------------------------------- # SNOW void phase pore_regions = snow_partitioning(im, return_all=True) # SNOW solid phase solid_regions = snow_partitioning(~im, return_all=True) # ------------------------------------------------------------------------- # Combined Distance transform of two phases. pore_dt = pore_regions.dt solid_dt = solid_regions.dt dt = pore_dt + solid_dt # Calculates combined void and solid regions for dual network extraction pore_regions = pore_regions.regions solid_regions = solid_regions.regions pore_region = pore_regions*im solid_region = solid_regions*~im solid_num = sp.amax(pore_regions) solid_region = solid_region + solid_num solid_region = solid_region * ~im regions = pore_region + solid_region b_num = sp.amax(regions) # ------------------------------------------------------------------------- # Boundary Conditions regions = add_boundary_regions(regions=regions, faces=boundary_faces) # ------------------------------------------------------------------------- # Padding distance transform to extract geometrical properties dt = pad_faces(im=dt, faces=boundary_faces) # ------------------------------------------------------------------------- # Extract void,solid and throat information from image net = regions_to_network(im=regions, dt=dt, voxel_size=voxel_size) # ------------------------------------------------------------------------- # Extract marching cube surface area and interfacial area of regions if marching_cubes_area: areas = region_surface_areas(regions=regions) interface_area = region_interface_areas(regions=regions, areas=areas, voxel_size=voxel_size) net['pore.surface_area'] = areas * voxel_size**2 net['throat.area'] = interface_area.area # ------------------------------------------------------------------------- # Find void to void, void to solid and solid to solid throat conns loc1 = net['throat.conns'][:, 0] < solid_num loc2 = net['throat.conns'][:, 1] >= solid_num loc3 = net['throat.conns'][:, 1] < b_num pore_solid_labels = loc1 * loc2 * loc3 loc4 = net['throat.conns'][:, 0] >= solid_num loc5 = net['throat.conns'][:, 0] < b_num solid_solid_labels = loc4 * loc2 * loc5 * loc3 loc6 = net['throat.conns'][:, 1] < solid_num pore_pore_labels = loc1 * loc6 loc7 = net['throat.conns'][:, 1] >= b_num boundary_throat_labels = loc5 * loc7 solid_labels = ((net['pore.label'] > solid_num) * ~ (net['pore.label'] > b_num)) boundary_labels = net['pore.label'] > b_num b_sa = sp.zeros(len(boundary_labels[boundary_labels == 1.0])) # ------------------------------------------------------------------------- # Calculates void interfacial area that connects with solid and vice versa p_conns = net['throat.conns'][:, 0][pore_solid_labels] ps = net['throat.area'][pore_solid_labels] p_sa = sp.bincount(p_conns, ps) s_conns = net['throat.conns'][:, 1][pore_solid_labels] s_pa = sp.bincount(s_conns, ps) s_pa = sp.trim_zeros(s_pa) # remove pore surface area labels p_solid_surf = sp.concatenate((p_sa, s_pa, b_sa)) # ------------------------------------------------------------------------- # Calculates interfacial area using marching cube method if marching_cubes_area: ps_c = net['throat.area'][pore_solid_labels] p_sa_c = sp.bincount(p_conns, ps_c) s_pa_c = sp.bincount(s_conns, ps_c) s_pa_c = sp.trim_zeros(s_pa_c) # remove pore surface area labels p_solid_surf = sp.concatenate((p_sa_c, s_pa_c, b_sa)) # ------------------------------------------------------------------------- # Adding additional information of dual network net['pore.solid_void_area'] = (p_solid_surf * voxel_size**2) net['throat.void'] = pore_pore_labels net['throat.interconnect'] = pore_solid_labels net['throat.solid'] = solid_solid_labels net['throat.boundary'] = boundary_throat_labels net['pore.void'] = net['pore.label'] <= solid_num net['pore.solid'] = solid_labels net['pore.boundary'] = boundary_labels # ------------------------------------------------------------------------- # label boundary cells net = label_boundary_cells(network=net, boundary_faces=boundary_faces) # ------------------------------------------------------------------------- # assign out values to dummy dict temp = _net_dict(net) temp.im = im.copy() temp.dt = dt temp.regions = regions return temp
[ "def", "snow_dual", "(", "im", ",", "voxel_size", "=", "1", ",", "boundary_faces", "=", "[", "'top'", ",", "'bottom'", ",", "'left'", ",", "'right'", ",", "'front'", ",", "'back'", "]", ",", "marching_cubes_area", "=", "False", ")", ":", "# ---------------...
49.521739
22.770186
def connect_to_ec2(region='us-east-1', access_key=None, secret_key=None): """ Connect to AWS ec2 :type region: str :param region: AWS region to connect to :type access_key: str :param access_key: AWS access key id :type secret_key: str :param secret_key: AWS secret access key :returns: boto.ec2.connection.EC2Connection -- EC2 connection """ if access_key: # Connect using supplied credentials logger.info('Connecting to AWS EC2 in {}'.format(region)) connection = ec2.connect_to_region( region, aws_access_key_id=access_key, aws_secret_access_key=secret_key) else: # Fetch instance metadata metadata = get_instance_metadata(timeout=1, num_retries=1) if metadata: try: region = metadata['placement']['availability-zone'][:-1] except KeyError: pass # Connect using env vars or boto credentials logger.info('Connecting to AWS EC2 in {}'.format(region)) connection = ec2.connect_to_region(region) if not connection: logger.error('An error occurred when connecting to EC2') sys.exit(1) return connection
[ "def", "connect_to_ec2", "(", "region", "=", "'us-east-1'", ",", "access_key", "=", "None", ",", "secret_key", "=", "None", ")", ":", "if", "access_key", ":", "# Connect using supplied credentials", "logger", ".", "info", "(", "'Connecting to AWS EC2 in {}'", ".", ...
32.459459
18.810811
def import_string(dotted_path): """ Import a dotted module path and return the attribute/class designated by the last name in the path. Raise ImportError if the import failed. Args: dotted_path: The path to attempt importing Returns: Imported class/attribute """ try: module_path, class_name = dotted_path.rsplit('.', 1) except ValueError as err: raise ImportError("%s doesn't look like a module path" % dotted_path) from err module = import_module(module_path) try: return getattr(module, class_name) except AttributeError as err: raise ImportError('Module "%s" does not define a "%s" attribute/class' % ( module_path, class_name)) from err
[ "def", "import_string", "(", "dotted_path", ")", ":", "try", ":", "module_path", ",", "class_name", "=", "dotted_path", ".", "rsplit", "(", "'.'", ",", "1", ")", "except", "ValueError", "as", "err", ":", "raise", "ImportError", "(", "\"%s doesn't look like a m...
32.73913
21.782609
def get_user_roles(user): """Get a list of a users's roles.""" if user: groups = user.groups.all() # Important! all() query may be cached on User with prefetch_related. roles = (RolesManager.retrieve_role(group.name) for group in groups if group.name in RolesManager.get_roles_names()) return sorted(roles, key=lambda r: r.get_name() ) else: return []
[ "def", "get_user_roles", "(", "user", ")", ":", "if", "user", ":", "groups", "=", "user", ".", "groups", ".", "all", "(", ")", "# Important! all() query may be cached on User with prefetch_related.", "roles", "=", "(", "RolesManager", ".", "retrieve_role", "(", "g...
48.75
33
def find_one_and_update(self, filter, update, **kwargs): """ See http://api.mongodb.com/python/current/api/pymongo/collection.html#pymongo.collection.Collection.find_one_and_update """ self._arctic_lib.check_quota() return self._collection.find_one_and_update(filter, update, **kwargs)
[ "def", "find_one_and_update", "(", "self", ",", "filter", ",", "update", ",", "*", "*", "kwargs", ")", ":", "self", ".", "_arctic_lib", ".", "check_quota", "(", ")", "return", "self", ".", "_collection", ".", "find_one_and_update", "(", "filter", ",", "upd...
53.333333
23.666667
def load(s, **kwargs): """Load yaml file""" try: return loads(s, **kwargs) except TypeError: return loads(s.read(), **kwargs)
[ "def", "load", "(", "s", ",", "*", "*", "kwargs", ")", ":", "try", ":", "return", "loads", "(", "s", ",", "*", "*", "kwargs", ")", "except", "TypeError", ":", "return", "loads", "(", "s", ".", "read", "(", ")", ",", "*", "*", "kwargs", ")" ]
24.666667
12.666667
def print_spelling_errors(filename, encoding='utf8'): """ Print misspelled words returned by sphinxcontrib-spelling """ filesize = os.stat(filename).st_size if filesize: sys.stdout.write('Misspelled Words:\n') with io.open(filename, encoding=encoding) as wordlist: for line in wordlist: sys.stdout.write(' ' + line) return 1 if filesize else 0
[ "def", "print_spelling_errors", "(", "filename", ",", "encoding", "=", "'utf8'", ")", ":", "filesize", "=", "os", ".", "stat", "(", "filename", ")", ".", "st_size", "if", "filesize", ":", "sys", ".", "stdout", ".", "write", "(", "'Misspelled Words:\\n'", "...
31.076923
14.615385
def is_gzipped_fastq(file_name): """ Determine whether indicated file appears to be a gzipped FASTQ. :param str file_name: Name/path of file to check as gzipped FASTQ. :return bool: Whether indicated file appears to be in gzipped FASTQ format. """ _, ext = os.path.splitext(file_name) return file_name.endswith(".fastq.gz") or file_name.endswith(".fq.gz")
[ "def", "is_gzipped_fastq", "(", "file_name", ")", ":", "_", ",", "ext", "=", "os", ".", "path", ".", "splitext", "(", "file_name", ")", "return", "file_name", ".", "endswith", "(", "\".fastq.gz\"", ")", "or", "file_name", ".", "endswith", "(", "\".fq.gz\""...
41.777778
19.777778
def extern_equals(self, context_handle, val1, val2): """Return true if the given Handles are __eq__.""" return self._ffi.from_handle(val1[0]) == self._ffi.from_handle(val2[0])
[ "def", "extern_equals", "(", "self", ",", "context_handle", ",", "val1", ",", "val2", ")", ":", "return", "self", ".", "_ffi", ".", "from_handle", "(", "val1", "[", "0", "]", ")", "==", "self", ".", "_ffi", ".", "from_handle", "(", "val2", "[", "0", ...
60.333333
15.666667
def getSamplingWorkflowEnabled(self): """Returns True if the sample of this Analysis Request has to be collected by the laboratory personnel """ template = self.getTemplate() if template: return template.getSamplingRequired() return self.bika_setup.getSamplingWorkflowEnabled()
[ "def", "getSamplingWorkflowEnabled", "(", "self", ")", ":", "template", "=", "self", ".", "getTemplate", "(", ")", "if", "template", ":", "return", "template", ".", "getSamplingRequired", "(", ")", "return", "self", ".", "bika_setup", ".", "getSamplingWorkflowEn...
41.25
7.375
def create_bool(help_string=NO_HELP, default=NO_DEFAULT): # type: (str, Union[bool, NO_DEFAULT_TYPE]) -> bool """ Create a bool parameter :param help_string: :param default: :return: """ # noinspection PyTypeChecker return ParamFunctions( help_string=help_string, default=default, type_name="bool", function_s2t=convert_string_to_bool, function_t2s=convert_bool_to_string, )
[ "def", "create_bool", "(", "help_string", "=", "NO_HELP", ",", "default", "=", "NO_DEFAULT", ")", ":", "# type: (str, Union[bool, NO_DEFAULT_TYPE]) -> bool", "# noinspection PyTypeChecker", "return", "ParamFunctions", "(", "help_string", "=", "help_string", ",", "default", ...
31.1875
11.6875
def build_select_fields(self): """ Generates the sql for the SELECT portion of the query :return: the SELECT portion of the query :rtype: str """ field_sql = [] # get the field sql for each table for table in self.tables: field_sql += table.get_field_sql() # get the field sql for each join table for join_item in self.joins: field_sql += join_item.right_table.get_field_sql() # combine all field sql separated by a comma sql = 'SELECT {0}{1} '.format(self.get_distinct_sql(), ', '.join(field_sql)) return sql
[ "def", "build_select_fields", "(", "self", ")", ":", "field_sql", "=", "[", "]", "# get the field sql for each table", "for", "table", "in", "self", ".", "tables", ":", "field_sql", "+=", "table", ".", "get_field_sql", "(", ")", "# get the field sql for each join ta...
31.1
18.2
def clean_cell_meta(self, meta): """Remove cell metadata that matches the default cell metadata.""" for k, v in DEFAULT_CELL_METADATA.items(): if meta.get(k, None) == v: meta.pop(k, None) return meta
[ "def", "clean_cell_meta", "(", "self", ",", "meta", ")", ":", "for", "k", ",", "v", "in", "DEFAULT_CELL_METADATA", ".", "items", "(", ")", ":", "if", "meta", ".", "get", "(", "k", ",", "None", ")", "==", "v", ":", "meta", ".", "pop", "(", "k", ...
41
8
def asdatetime(self, naive=True): """Return this datetime_tz as a datetime object. Args: naive: Return *without* any tz info. Returns: This datetime_tz as a datetime object. """ args = list(self.timetuple()[0:6])+[self.microsecond] if not naive: args.append(self.tzinfo) return datetime.datetime(*args)
[ "def", "asdatetime", "(", "self", ",", "naive", "=", "True", ")", ":", "args", "=", "list", "(", "self", ".", "timetuple", "(", ")", "[", "0", ":", "6", "]", ")", "+", "[", "self", ".", "microsecond", "]", "if", "not", "naive", ":", "args", "."...
26
15.923077
def normalizeBoolean(value): """ Normalizes a boolean. * **value** must be an ``int`` with value of 0 or 1, or a ``bool``. * Returned value will be a boolean. """ if isinstance(value, int) and value in (0, 1): value = bool(value) if not isinstance(value, bool): raise ValueError("Boolean values must be True or False, not '%s'." % value) return value
[ "def", "normalizeBoolean", "(", "value", ")", ":", "if", "isinstance", "(", "value", ",", "int", ")", "and", "value", "in", "(", "0", ",", "1", ")", ":", "value", "=", "bool", "(", "value", ")", "if", "not", "isinstance", "(", "value", ",", "bool",...
31.692308
14.769231
def _verify_field_spec(self, spec, path): """Verifies a given field specification is valid, recursing into nested schemas if required.""" # Required should be a boolean if 'required' in spec and not isinstance(spec['required'], bool): raise SchemaFormatException("{} required declaration should be True or False", path) # Required should be a boolean if 'nullable' in spec and not isinstance(spec['nullable'], bool): raise SchemaFormatException("{} nullable declaration should be True or False", path) # Must have a type specified if 'type' not in spec: raise SchemaFormatException("{} has no type declared.", path) self._verify_type(spec, path) # Validations should be either a single function or array of functions if 'validates' in spec: self._verify_validates(spec, path) # Defaults must be of the correct type or a function if 'default' in spec: self._verify_default(spec, path) # Only expected spec keys are supported if not set(spec.keys()).issubset(set(['type', 'required', 'validates', 'default', 'nullable'])): raise SchemaFormatException("Unsupported field spec item at {}. Items: "+repr(spec.keys()), path)
[ "def", "_verify_field_spec", "(", "self", ",", "spec", ",", "path", ")", ":", "# Required should be a boolean", "if", "'required'", "in", "spec", "and", "not", "isinstance", "(", "spec", "[", "'required'", "]", ",", "bool", ")", ":", "raise", "SchemaFormatExce...
45.785714
26.464286
def add_composition(self, composition): """Add a composition to the suite. Raise an UnexpectedObjectError when the supplied argument is not a Composition object. """ if not hasattr(composition, 'tracks'): raise UnexpectedObjectError("Object '%s' not expected. Expecting " "a mingus.containers.Composition object." % composition) self.compositions.append(composition) return self
[ "def", "add_composition", "(", "self", ",", "composition", ")", ":", "if", "not", "hasattr", "(", "composition", ",", "'tracks'", ")", ":", "raise", "UnexpectedObjectError", "(", "\"Object '%s' not expected. Expecting \"", "\"a mingus.containers.Composition object.\"", "%...
41.545455
17.636364
def nworker(data, smpchunk, tests): """ The workhorse function. Not numba. """ ## tell engines to limit threads #numba.config.NUMBA_DEFAULT_NUM_THREADS = 1 ## open the seqarray view, the modified array is in bootsarr with h5py.File(data.database.input, 'r') as io5: seqview = io5["bootsarr"][:] maparr = io5["bootsmap"][:] ## create an N-mask array of all seq cols (this isn't really too slow) nall_mask = seqview[:] == 78 ## tried numba compiling everythign below here, but was not faster ## than making nmask w/ axis arg in numpy ## get the input arrays ready rquartets = np.zeros((smpchunk.shape[0], 4), dtype=np.uint16) rweights = None #rweights = np.ones(smpchunk.shape[0], dtype=np.float64) rdstats = np.zeros((smpchunk.shape[0], 4), dtype=np.uint32) #times = [] ## fill arrays with results using numba funcs for idx in xrange(smpchunk.shape[0]): ## get seqchunk for 4 samples (4, ncols) sidx = smpchunk[idx] seqchunk = seqview[sidx] ## get N-containing columns in 4-array, and invariant sites. nmask = np.any(nall_mask[sidx], axis=0) nmask += np.all(seqchunk == seqchunk[0], axis=0) ## <- do we need this? ## get matrices if there are any shared SNPs ## returns best-tree index, qscores, and qstats #bidx, qscores, qstats = calculate(seqchunk, maparr[:, 0], nmask, tests) bidx, qstats = calculate(seqchunk, maparr[:, 0], nmask, tests) ## get weights from the three scores sorted. ## Only save to file if the quartet has information rdstats[idx] = qstats rquartets[idx] = smpchunk[idx][bidx] return rquartets, rweights, rdstats
[ "def", "nworker", "(", "data", ",", "smpchunk", ",", "tests", ")", ":", "## tell engines to limit threads", "#numba.config.NUMBA_DEFAULT_NUM_THREADS = 1", "## open the seqarray view, the modified array is in bootsarr", "with", "h5py", ".", "File", "(", "data", ".", "database"...
38.2
20.022222
def raise_for_status(self): ''' Raise BadStatus if one occurred. ''' if 400 <= self.status_code < 500: raise BadStatus('{} Client Error: {} for url: {}'.format(self.status_code, self.reason_phrase, self.url), self.status_code) elif 500 <= self.status_code < 600: raise BadStatus('{} Server Error: {} for url: {}'.format(self.status_code, self.reason_phrase, self.url), self.status_code)
[ "def", "raise_for_status", "(", "self", ")", ":", "if", "400", "<=", "self", ".", "status_code", "<", "500", ":", "raise", "BadStatus", "(", "'{} Client Error: {} for url: {}'", ".", "format", "(", "self", ".", "status_code", ",", "self", ".", "reason_phrase",...
55.375
33.125
def _update_access_key_pair(self, access_key_id, key, val): """ Helper for updating access keys in a DRY fashion. """ # Get current state via HTTPS. current_access_key = self.get_access_key(access_key_id) # Copy and only change the single parameter. payload_dict = KeenApi._build_access_key_dict(current_access_key) payload_dict[key] = val # Now just treat it like a full update. return self.update_access_key_full(access_key_id, **payload_dict)
[ "def", "_update_access_key_pair", "(", "self", ",", "access_key_id", ",", "key", ",", "val", ")", ":", "# Get current state via HTTPS.", "current_access_key", "=", "self", ".", "get_access_key", "(", "access_key_id", ")", "# Copy and only change the single parameter.", "p...
39.615385
18.076923