text
stringlengths
89
104k
code_tokens
list
avg_line_len
float64
7.91
980
score
float64
0
630
def prepare_env(gdef='', gvars={}, extra_vars={}, host='localhost'): '''clear current sos_dict, execute global_def (definitions and imports), and inject global variables''' env.sos_dict.clear() if not gdef and not gvars: # SoS Notebook calls prepare_env without global statement from a # particular gdef, gvars = analyze_global_statements('') if gdef: exec(compile(gdef, filename="<ast>", mode="exec"), env.sos_dict._dict) env.sos_dict.quick_update(gvars) env.sos_dict.quick_update(extra_vars) if 'CONFIG' not in env.sos_dict: # if this is in sos notebook load_config_files() if 'hosts' not in env.sos_dict[ 'CONFIG'] and 'localhost' not in env.sos_dict['CONFIG']: env.sos_dict['CONFIG']['localhost'] = 'localhost' env.sos_dict['CONFIG']['hosts'] = { 'localhost': { 'paths': {}, 'address': 'localhost' } } # expose `paths` of localhost if host == 'localhost': if 'localhost' in env.sos_dict['CONFIG']: if 'hosts' not in env.sos_dict['CONFIG'] or env.sos_dict['CONFIG'][ 'localhost'] not in env.sos_dict['CONFIG']['hosts']: env.logger.warning( f"Localhost {env.sos_dict['CONFIG']['localhost']} is not defined in CONFIG['hosts']" ) env.sos_dict['CONFIG']['hosts'][env.sos_dict['CONFIG'] ['localhost']] = { 'paths': {}, 'address': 'localhost' } env.sos_dict.set('__host__', env.sos_dict['CONFIG']['localhost']) else: if 'hosts' in env.sos_dict['CONFIG']: if 'localhost' not in env.sos_dict['CONFIG']['hosts']: env.logger.warning('locahost is not defined in "hosts".') env.sos_dict['CONFIG']['hosts']['localhost'] = { 'paths': {}, 'address': 'localhost' } elif 'paths' not in env.sos_dict['CONFIG']['hosts']['localhost']: env.sos_dict['CONFIG']['hosts']['localhost']['paths'] = {} env.sos_dict.set('__host__', 'localhost') else: if 'hosts' not in env.sos_dict['CONFIG'] or host not in env.sos_dict[ 'CONFIG']['hosts']: raise RuntimeError( f"Remote host {host} is not defined in CONFIG['hosts']. Available ones are {env.sos_dict['CONFIG']['hosts'].keys()}" ) env.sos_dict.set('__host__', host)
[ "def", "prepare_env", "(", "gdef", "=", "''", ",", "gvars", "=", "{", "}", ",", "extra_vars", "=", "{", "}", ",", "host", "=", "'localhost'", ")", ":", "env", ".", "sos_dict", ".", "clear", "(", ")", "if", "not", "gdef", "and", "not", "gvars", ":...
45.711864
22.186441
def otsu(data, min_threshold=None, max_threshold=None,bins=256): """Compute a threshold using Otsu's method data - an array of intensity values between zero and one min_threshold - only consider thresholds above this minimum value max_threshold - only consider thresholds below this maximum value bins - we bin the data into this many equally-spaced bins, then pick the bin index that optimizes the metric """ assert min_threshold is None or max_threshold is None or min_threshold < max_threshold def constrain(threshold): if not min_threshold is None and threshold < min_threshold: threshold = min_threshold if not max_threshold is None and threshold > max_threshold: threshold = max_threshold return threshold data = np.atleast_1d(data) data = data[~ np.isnan(data)] if len(data) == 0: return (min_threshold if not min_threshold is None else max_threshold if not max_threshold is None else 0) elif len(data) == 1: return constrain(data[0]) if bins > len(data): bins = len(data) data.sort() var = running_variance(data) rvar = np.flipud(running_variance(np.flipud(data))) thresholds = data[1:len(data):len(data)//bins] score_low = (var[0:len(data)-1:len(data)//bins] * np.arange(0,len(data)-1,len(data)//bins)) score_high = (rvar[1:len(data):len(data)//bins] * (len(data) - np.arange(1,len(data),len(data)//bins))) scores = score_low + score_high if len(scores) == 0: return constrain(thresholds[0]) index = np.argwhere(scores == scores.min()).flatten() if len(index)==0: return constrain(thresholds[0]) # # Take the average of the thresholds to either side of # the chosen value to get an intermediate in cases where there is # a steep step between the background and foreground index = index[0] if index == 0: index_low = 0 else: index_low = index-1 if index == len(thresholds)-1: index_high = len(thresholds)-1 else: index_high = index+1 return constrain((thresholds[index_low]+thresholds[index_high]) / 2)
[ "def", "otsu", "(", "data", ",", "min_threshold", "=", "None", ",", "max_threshold", "=", "None", ",", "bins", "=", "256", ")", ":", "assert", "min_threshold", "is", "None", "or", "max_threshold", "is", "None", "or", "min_threshold", "<", "max_threshold", ...
40.563636
18.4
def configure(obj, token): """Use this command to configure API tokens """ config = obj.get('config') or FileConfig(obj['profile']) config.auth_token = token config.save()
[ "def", "configure", "(", "obj", ",", "token", ")", ":", "config", "=", "obj", ".", "get", "(", "'config'", ")", "or", "FileConfig", "(", "obj", "[", "'profile'", "]", ")", "config", ".", "auth_token", "=", "token", "config", ".", "save", "(", ")" ]
31
11.333333
def cartesian_cs(self): """The :class:`CartesianCS` which describes the coordinate axes.""" cs = self.element.find(GML_NS + 'cartesianCS') href = cs.attrib[XLINK_NS + 'href'] return get(href)
[ "def", "cartesian_cs", "(", "self", ")", ":", "cs", "=", "self", ".", "element", ".", "find", "(", "GML_NS", "+", "'cartesianCS'", ")", "href", "=", "cs", ".", "attrib", "[", "XLINK_NS", "+", "'href'", "]", "return", "get", "(", "href", ")" ]
43.8
10
def save_file(self, title="Save As", initialDir="~", fileTypes="*|All Files", rememberAs=None, **kwargs): """ Show a Save As dialog Usage: C{dialog.save_file(title="Save As", initialDir="~", fileTypes="*|All Files", rememberAs=None, **kwargs)} @param title: window title for the dialog @param initialDir: starting directory for the file dialog @param fileTypes: file type filter expression @param rememberAs: gives an ID to this file dialog, allowing it to open at the last used path next time @return: a tuple containing the exit code and file path @rtype: C{DialogData(int, str)} """ if rememberAs is not None: return self._run_kdialog(title, ["--getsavefilename", initialDir, fileTypes, ":" + rememberAs], kwargs) else: return self._run_kdialog(title, ["--getsavefilename", initialDir, fileTypes], kwargs)
[ "def", "save_file", "(", "self", ",", "title", "=", "\"Save As\"", ",", "initialDir", "=", "\"~\"", ",", "fileTypes", "=", "\"*|All Files\"", ",", "rememberAs", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "rememberAs", "is", "not", "None", ":",...
54.705882
30.941176
def copy_location(new_node, old_node): """ Copy the source location hint (`lineno` and `col_offset`) from the old to the new node if possible and return the new one. """ for attr in 'lineno', 'col_offset': if attr in old_node._attributes and attr in new_node._attributes \ and hasattr(old_node, attr): setattr(new_node, attr, getattr(old_node, attr)) return new_node
[ "def", "copy_location", "(", "new_node", ",", "old_node", ")", ":", "for", "attr", "in", "'lineno'", ",", "'col_offset'", ":", "if", "attr", "in", "old_node", ".", "_attributes", "and", "attr", "in", "new_node", ".", "_attributes", "and", "hasattr", "(", "...
41.2
12.8
def loop(self, timeout = 1): """Main loop.""" rlist = [self.sock] wlist = [] if len(self.out_packet) > 0: wlist.append(self.sock) to_read, to_write, _ = select.select(rlist, wlist, [], timeout) if len(to_read) > 0: ret, _ = self.loop_read() if ret != NC.ERR_SUCCESS: return ret if len(to_write) > 0: ret, _ = self.loop_write() if ret != NC.ERR_SUCCESS: return ret self.loop_misc() return NC.ERR_SUCCESS
[ "def", "loop", "(", "self", ",", "timeout", "=", "1", ")", ":", "rlist", "=", "[", "self", ".", "sock", "]", "wlist", "=", "[", "]", "if", "len", "(", "self", ".", "out_packet", ")", ">", "0", ":", "wlist", ".", "append", "(", "self", ".", "s...
26.636364
15.454545
def create_eager_metrics_for_problem(problem, model_hparams): """See create_eager_metrics.""" metric_fns = problem.eval_metric_fns(model_hparams) problem_hparams = problem.get_hparams(model_hparams) target_modality = problem_hparams.modality["targets"] weights_fn = model_hparams.weights_fn.get( "targets", modalities.get_weights_fn(target_modality)) return create_eager_metrics_internal(metric_fns, weights_fn=weights_fn)
[ "def", "create_eager_metrics_for_problem", "(", "problem", ",", "model_hparams", ")", ":", "metric_fns", "=", "problem", ".", "eval_metric_fns", "(", "model_hparams", ")", "problem_hparams", "=", "problem", ".", "get_hparams", "(", "model_hparams", ")", "target_modali...
48.666667
14.777778
def _draw_breakpoint_icon(self, top, painter, icon_name): """Draw the given breakpoint pixmap. Args: top (int): top of the line to draw the breakpoint icon. painter (QPainter) icon_name (srt): key of icon to draw (see: self.icons) """ rect = QRect(0, top, self.sizeHint().width(), self.sizeHint().height()) try: icon = self.icons[icon_name] except KeyError as e: debug_print("Breakpoint icon doen't exist, {}".format(e)) else: icon.paint(painter, rect)
[ "def", "_draw_breakpoint_icon", "(", "self", ",", "top", ",", "painter", ",", "icon_name", ")", ":", "rect", "=", "QRect", "(", "0", ",", "top", ",", "self", ".", "sizeHint", "(", ")", ".", "width", "(", ")", ",", "self", ".", "sizeHint", "(", ")",...
36.6875
16.5
def jsonex_request(url, data, headers=None): """ Make a request with JsonEx :param url: URL :type url: str :param data: Data to POST :type data: dict :return: Response :rtype: dict :raises exc.ConnectionError: Connection error :raises exc.ServerError: Remote server error (unknown) :raises exc.ProviderError: any errors reported by the remote """ # Authentication? url, headers = _parse_authentication(url) headers['Content-Type'] = 'application/json' # Request try: req = Request(url, headers=headers) response = urlopen(req, jsonex_dumps(data)) res_str = response.read() res = jsonex_loads(res_str) except HTTPError as e: if 'Content-Type' in e.headers and e.headers['Content-Type'] == 'application/json': res = jsonex_loads(e.read()) else: raise exc.ServerError('Server at "{}" failed: {}'.format(url, e)) except URLError as e: raise exc.ConnectionError('Connection to "{}" failed: {}'.format(url, e)) # Errors? if 'error' in res: # Exception object raise res['error'] # Error raised by the remote side return res
[ "def", "jsonex_request", "(", "url", ",", "data", ",", "headers", "=", "None", ")", ":", "# Authentication?", "url", ",", "headers", "=", "_parse_authentication", "(", "url", ")", "headers", "[", "'Content-Type'", "]", "=", "'application/json'", "# Request", "...
33.114286
19.142857
def from_table(fileobj=None, url='http://hgdownload.cse.ucsc.edu/goldenpath/hg19/database/knownGene.txt.gz', parser=UCSCTable.KNOWN_GENE, mode='tx', decompress=None): ''' UCSC Genome project provides several tables with gene coordinates (https://genome.ucsc.edu/cgi-bin/hgTables), such as knownGene, refGene, ensGene, wgEncodeGencodeBasicV19, etc. Indexing the rows of those tables into a ``GenomeIntervalTree`` is a common task, implemented in this method. The table can be either specified as a ``fileobj`` (in which case the data is read line by line), or via an ``url`` (the ``url`` may be to a ``txt`` or ``txt.gz`` file either online or locally). The type of the table is specified using the ``parser`` parameter. This is a function that takes a line of the file (with no line ending) and returns a dictionary, mapping field names to values. This dictionary will be assigned to the ``data`` field of each interval in the resulting tree. Finally, there are different ways genes can be mapped into intervals for the sake of indexing as an interval tree. One way is to represent each gene via its transcribed region (``txStart``..``txEnd``). Another is to represent using coding region (``cdsStart``..``cdsEnd``). Finally, the third possibility is to map each gene into several intervals, corresponding to its exons (``exonStarts``..``exonEnds``). The mode, in which genes are mapped to intervals is specified via the ``mode`` parameter. The value can be ``tx``, ``cds`` and ``exons``, corresponding to the three mentioned possibilities. If a more specific way of interval-mapping is required (e.g. you might want to create 'coding-region+-10k' intervals), you can provide an "interval-maker" function as the ``mode`` parameter. An interval-maker function takes as input a dictionary, returned by the parser, and returns an iterable of Interval objects. The ``parser`` function must ensure that its output contains the field named ``chrom``, and also fields named ``txStart``/``txEnd`` if ``mode=='tx'``, fields ``cdsStart``/``cdsEnd`` if ``mode=='cds'``, and fields ``exonCount``/``exonStarts``/``exonEnds`` if ``mode=='exons'``. The ``decompress`` parameter specifies whether the provided file is gzip-compressed. This only applies to the situation when the url is given (no decompression is made if fileobj is provided in any case). If decompress is None, data is decompressed if the url ends with .gz, otherwise decompress = True forces decompression. >> knownGene = GenomeIntervalTree.from_table() >> len(knownGene) 82960 >> result = knownGene[b'chr1'].search(100000, 138529) >> len(result) 1 >> list(result)[0].data['name'] b'uc021oeg.2' ''' if fileobj is None: data = urlopen(url).read() if (decompress is None and url.endswith('.gz')) or decompress: data = zlib.decompress(data, 16+zlib.MAX_WBITS) fileobj = BytesIO(data) interval_lists = defaultdict(list) if mode == 'tx': interval_maker = IntervalMakers.TX elif mode == 'cds': interval_maker = IntervalMakers.CDS elif mode == 'exons': interval_maker = IntervalMakers.EXONS elif getattr(mode, __call__, None) is None: raise Exception("Parameter `mode` may only be 'tx', 'cds', 'exons' or a callable") else: interval_maker = mode for ln in fileobj: if not isinstance(ln, bytes): ln = ln.encode() ln = ln.strip() d = parser(ln) for interval in interval_maker(d): interval_lists[d['chrom']].append(_fix(interval)) # Now convert interval lists into trees gtree = GenomeIntervalTree() for chrom, lst in getattr(interval_lists, 'iteritems', interval_lists.items)(): gtree[chrom] = IntervalTree(lst) return gtree
[ "def", "from_table", "(", "fileobj", "=", "None", ",", "url", "=", "'http://hgdownload.cse.ucsc.edu/goldenpath/hg19/database/knownGene.txt.gz'", ",", "parser", "=", "UCSCTable", ".", "KNOWN_GENE", ",", "mode", "=", "'tx'", ",", "decompress", "=", "None", ")", ":", ...
55.917808
37.178082
def _args_for_remote(self): """ Generate arguments for 'terraform remote config'. Return None if not present in configuration. :return: list of args for 'terraform remote config' or None :rtype: :std:term:`list` """ conf = self.config.get('terraform_remote_state') if conf is None: return None args = ['-backend=%s' % conf['backend']] for k, v in sorted(conf['config'].items()): args.append('-backend-config="%s=%s"' % (k, v)) return args
[ "def", "_args_for_remote", "(", "self", ")", ":", "conf", "=", "self", ".", "config", ".", "get", "(", "'terraform_remote_state'", ")", "if", "conf", "is", "None", ":", "return", "None", "args", "=", "[", "'-backend=%s'", "%", "conf", "[", "'backend'", "...
35.8
15.4
def run(self, quil_program, classical_addresses: List[int] = None, trials=1): """ Run a Quil program multiple times, accumulating the values deposited in a list of classical addresses. :param Program quil_program: A Quil program. :param classical_addresses: The classical memory to retrieve. Specified as a list of integers that index into a readout register named ``ro``. This function--and particularly this argument--are included for backwards compatibility and will be removed in the future. :param int trials: Number of shots to collect. :return: A list of dictionaries of bits. Each dictionary corresponds to the values in `classical_addresses`. :rtype: list """ if classical_addresses is None: caddresses = get_classical_addresses_from_program(quil_program) else: caddresses = {'ro': classical_addresses} buffers = self._connection._qvm_run(quil_program, caddresses, trials, self.measurement_noise, self.gate_noise, self.random_seed) if len(buffers) == 0: return [] if 'ro' in buffers: return buffers['ro'].tolist() raise ValueError("You are using QVMConnection.run with multiple readout registers not " "named `ro`. Please use the new `QuantumComputer` abstraction.")
[ "def", "run", "(", "self", ",", "quil_program", ",", "classical_addresses", ":", "List", "[", "int", "]", "=", "None", ",", "trials", "=", "1", ")", ":", "if", "classical_addresses", "is", "None", ":", "caddresses", "=", "get_classical_addresses_from_program",...
45.121212
26.272727
def go_to_column(self, column): """ Moves the text cursor to given column. :param column: Column to go to. :type column: int :return: Method success. :rtype: bool """ cursor = self.textCursor() cursor.setPosition(cursor.block().position() + column) self.setTextCursor(cursor) return True
[ "def", "go_to_column", "(", "self", ",", "column", ")", ":", "cursor", "=", "self", ".", "textCursor", "(", ")", "cursor", ".", "setPosition", "(", "cursor", ".", "block", "(", ")", ".", "position", "(", ")", "+", "column", ")", "self", ".", "setText...
26
13.857143
def __validate(self, oid): """Validate and use the given id for this ObjectId. Raises TypeError if id is not an instance of (:class:`basestring` (:class:`str` or :class:`bytes` in python 3), ObjectId) and InvalidId if it is not a valid ObjectId. :Parameters: - `oid`: a valid ObjectId """ if isinstance(oid, ObjectId): self.__id = oid.binary # bytes or unicode in python 2, str in python 3 elif isinstance(oid, string_type): if len(oid) == 24: try: self.__id = bytes_from_hex(oid) except (TypeError, ValueError): _raise_invalid_id(oid) else: _raise_invalid_id(oid) else: raise TypeError("id must be an instance of (bytes, %s, ObjectId), " "not %s" % (text_type.__name__, type(oid)))
[ "def", "__validate", "(", "self", ",", "oid", ")", ":", "if", "isinstance", "(", "oid", ",", "ObjectId", ")", ":", "self", ".", "__id", "=", "oid", ".", "binary", "# bytes or unicode in python 2, str in python 3", "elif", "isinstance", "(", "oid", ",", "stri...
36.88
15.44
def initial_global_state(self) -> GlobalState: """Initialize the execution environment.""" environment = Environment( self.callee_account, self.caller, self.call_data, self.gas_price, self.call_value, self.origin, code=self.code or self.callee_account.code, ) return super().initial_global_state_from_environment( environment, active_function="fallback" )
[ "def", "initial_global_state", "(", "self", ")", "->", "GlobalState", ":", "environment", "=", "Environment", "(", "self", ".", "callee_account", ",", "self", ".", "caller", ",", "self", ".", "call_data", ",", "self", ".", "gas_price", ",", "self", ".", "c...
34.142857
14.214286
def get_content_metadata(id, version, cursor): """Return metadata related to the content from the database.""" # Do the module lookup args = dict(id=id, version=version) # FIXME We are doing two queries here that can hopefully be # condensed into one. cursor.execute(SQL['get-module-metadata'], args) try: result = cursor.fetchone()[0] # version is what we want to return, but in the sql we're using # current_version because otherwise there's a "column reference is # ambiguous" error result['version'] = result.pop('current_version') # FIXME We currently have legacy 'portal_type' names in the database. # Future upgrades should replace the portal type with a mimetype # of 'application/vnd.org.cnx.(module|collection|folder|<etc>)'. # Until then we will do the replacement here. result['mediaType'] = portaltype_to_mimetype(result['mediaType']) return result except (TypeError, IndexError,): # None returned raise httpexceptions.HTTPNotFound()
[ "def", "get_content_metadata", "(", "id", ",", "version", ",", "cursor", ")", ":", "# Do the module lookup", "args", "=", "dict", "(", "id", "=", "id", ",", "version", "=", "version", ")", "# FIXME We are doing two queries here that can hopefully be", "# condens...
46.913043
20.695652
def compute(self): """ Compute a MaxSAT solution. First, the method checks whether or not the set of hard clauses is satisfiable. If not, the method returns ``False``. Otherwise, add soft clauses to the oracle and call the MaxSAT algorithm (see :func:`_compute`). Note that the soft clauses are added to the oracles after being augmented with additional *selector* literals. The selectors literals are then used as *assumptions* when calling the SAT oracle and are needed for extracting unsatisfiable cores. """ if self.oracle.solve(): # hard part is satisfiable # create selectors and a mapping from selectors to clause ids self.sels, self.vmap = [], {} self.scpy = [True for cl in self.soft] # adding soft clauses to oracle for i in range(len(self.soft)): self.topv += 1 self.soft[i].append(-self.topv) self.sels.append(self.topv) self.oracle.add_clause(self.soft[i]) self.vmap[self.topv] = i self._compute() return True else: return False
[ "def", "compute", "(", "self", ")", ":", "if", "self", ".", "oracle", ".", "solve", "(", ")", ":", "# hard part is satisfiable", "# create selectors and a mapping from selectors to clause ids", "self", ".", "sels", ",", "self", ".", "vmap", "=", "[", "]", ",", ...
37.30303
20.636364
def macro_attachment_create(self, macro_id, data, **kwargs): "https://developer.zendesk.com/rest_api/docs/core/macros#create-macro-attachment" api_path = "/api/v2/macros/{macro_id}/attachments.json" api_path = api_path.format(macro_id=macro_id) return self.call(api_path, method="POST", data=data, **kwargs)
[ "def", "macro_attachment_create", "(", "self", ",", "macro_id", ",", "data", ",", "*", "*", "kwargs", ")", ":", "api_path", "=", "\"/api/v2/macros/{macro_id}/attachments.json\"", "api_path", "=", "api_path", ".", "format", "(", "macro_id", "=", "macro_id", ")", ...
67
27
def add_to_fields(self): '''Add this :class:`Field` to the fields of :attr:`model`.''' meta = self.model._meta meta.scalarfields.append(self) if self.index: meta.indices.append(self)
[ "def", "add_to_fields", "(", "self", ")", ":", "meta", "=", "self", ".", "model", ".", "_meta", "meta", ".", "scalarfields", ".", "append", "(", "self", ")", "if", "self", ".", "index", ":", "meta", ".", "indices", ".", "append", "(", "self", ")" ]
36.833333
12.833333
def create_project(self, project_name, project_des): """ Create a project Unsuccessful opertaion will cause an LogException. :type project_name: string :param project_name: the Project name :type project_des: string :param project_des: the description of a project :return: CreateProjectResponse :raise: LogException """ params = {} body = {"projectName": project_name, "description": project_des} body = six.b(json.dumps(body)) headers = {'Content-Type': 'application/json', 'x-log-bodyrawsize': str(len(body))} resource = "/" (resp, header) = self._send("POST", project_name, body, resource, params, headers) return CreateProjectResponse(header, resp)
[ "def", "create_project", "(", "self", ",", "project_name", ",", "project_des", ")", ":", "params", "=", "{", "}", "body", "=", "{", "\"projectName\"", ":", "project_name", ",", "\"description\"", ":", "project_des", "}", "body", "=", "six", ".", "b", "(", ...
32.916667
22.291667
def open(self): """ Opens a WinDivert handle for the given filter. Unless otherwise specified by flags, any packet that matches the filter will be diverted to the handle. Diverted packets can be read by the application with receive(). The remapped function is WinDivertOpen:: HANDLE WinDivertOpen( __in const char *filter, __in WINDIVERT_LAYER layer, __in INT16 priority, __in UINT64 flags ); For more info on the C call visit: http://reqrypt.org/windivert-doc.html#divert_open """ if self.is_open: raise RuntimeError("WinDivert handle is already open.") self._handle = windivert_dll.WinDivertOpen(self._filter, self._layer, self._priority, self._flags)
[ "def", "open", "(", "self", ")", ":", "if", "self", ".", "is_open", ":", "raise", "RuntimeError", "(", "\"WinDivert handle is already open.\"", ")", "self", ".", "_handle", "=", "windivert_dll", ".", "WinDivertOpen", "(", "self", ".", "_filter", ",", "self", ...
40.904762
23.190476
def declination_cooper69(dayofyear): """ Solar declination from Duffie & Beckman [1] and attributed to Cooper (1969) .. warning:: Return units are radians, not degrees. Declination can be expressed using either sine or cosine: .. math:: \\delta = 23.45 \\sin \\left( \\frac{2 \\pi}{365} \\left(n_{day} + 284 \\right) \\right) = -23.45 \\cos \\left( \\frac{2 \\pi}{365} \\left(n_{day} + 10 \\right) \\right) Parameters ---------- dayofyear : numeric Returns ------- declination (radians) : numeric Angular position of the sun at solar noon relative to the plane of the equator, approximately between +/-23.45 (degrees). References ---------- [1] J. A. Duffie and W. A. Beckman, "Solar Engineering of Thermal Processes, 3rd Edition" pp. 13-14, J. Wiley and Sons, New York (2006) [2] J. H. Seinfeld and S. N. Pandis, "Atmospheric Chemistry and Physics" p. 129, J. Wiley (1998) [3] Daryl R. Myers, "Solar Radiation: Practical Modeling for Renewable Energy Applications", p. 4 CRC Press (2013) See Also -------- declination_spencer71 """ day_angle = _calculate_simple_day_angle(dayofyear) dec = np.deg2rad(23.45 * np.sin(day_angle + (2.0 * np.pi / 365.0) * 285.0)) return dec
[ "def", "declination_cooper69", "(", "dayofyear", ")", ":", "day_angle", "=", "_calculate_simple_day_angle", "(", "dayofyear", ")", "dec", "=", "np", ".", "deg2rad", "(", "23.45", "*", "np", ".", "sin", "(", "day_angle", "+", "(", "2.0", "*", "np", ".", "...
29.953488
26.325581
def shorten_string(string, max_width): ''' make limited length string in form: "the string is very lo...(and 15 more)" ''' string_len = len(string) if string_len <= max_width: return string visible = max_width - 16 - int(log10(string_len)) # expected suffix len "...(and XXXXX more)" if not isinstance(string, unistr): visstring = unistr(string[:visible], errors='ignore') else: visstring = string[:visible] return u''.join((visstring, u'...(and ', unistr(string_len - visible), u' more)'))
[ "def", "shorten_string", "(", "string", ",", "max_width", ")", ":", "string_len", "=", "len", "(", "string", ")", "if", "string_len", "<=", "max_width", ":", "return", "string", "visible", "=", "max_width", "-", "16", "-", "int", "(", "log10", "(", "stri...
37.533333
12.466667
def _connect(self): """ Connexion à la base XAIR """ try: # On passe par Oracle Instant Client avec le TNS ORA_FULL self.conn = cx_Oracle.connect(self._ORA_FULL) self.cursor = self.conn.cursor() print('XAIR: Connexion établie') except cx_Oracle.Error as e: print("Erreur: %s" % (e)) raise cx_Oracle.Error('Echec de connexion')
[ "def", "_connect", "(", "self", ")", ":", "try", ":", "# On passe par Oracle Instant Client avec le TNS ORA_FULL", "self", ".", "conn", "=", "cx_Oracle", ".", "connect", "(", "self", ".", "_ORA_FULL", ")", "self", ".", "cursor", "=", "self", ".", "conn", ".", ...
32.846154
13.307692
def cross_goal(state): """ The goal function for cross solving search. """ centres, edges = state for edge in edges: if "D" not in edge.facings: return False if edge["D"] != centres["D"]["D"]: return False k = "".join(edge.facings.keys()).replace("D", "") if edge[k] != centres[k][k]: return False return True
[ "def", "cross_goal", "(", "state", ")", ":", "centres", ",", "edges", "=", "state", "for", "edge", "in", "edges", ":", "if", "\"D\"", "not", "in", "edge", ".", "facings", ":", "return", "False", "if", "edge", "[", "\"D\"", "]", "!=", "centres", "[", ...
31.428571
9.857143
def query_row(stmt, args=(), factory=None): """ Execute a query. Returns the first row of the result set, or `None`. """ for row in query(stmt, args, factory): return row return None
[ "def", "query_row", "(", "stmt", ",", "args", "=", "(", ")", ",", "factory", "=", "None", ")", ":", "for", "row", "in", "query", "(", "stmt", ",", "args", ",", "factory", ")", ":", "return", "row", "return", "None" ]
29.142857
12
def is_reachable_host(entity_name): ''' Returns a bool telling if the entity name is a reachable host (IPv4/IPv6/FQDN/etc). :param hostname: :return: ''' try: assert type(socket.getaddrinfo(entity_name, 0, 0, 0, 0)) == list ret = True except socket.gaierror: ret = False return ret
[ "def", "is_reachable_host", "(", "entity_name", ")", ":", "try", ":", "assert", "type", "(", "socket", ".", "getaddrinfo", "(", "entity_name", ",", "0", ",", "0", ",", "0", ",", "0", ")", ")", "==", "list", "ret", "=", "True", "except", "socket", "."...
25.076923
27.076923
def prepare(self, rule): """ Parse and/or compile given rule into rule tree. :param rule: Filtering grammar rule. :return: Parsed and/or compiled rule. """ if self.parser: rule = self.parser.parse(rule) if self.compiler: rule = self.compiler.compile(rule) return rule
[ "def", "prepare", "(", "self", ",", "rule", ")", ":", "if", "self", ".", "parser", ":", "rule", "=", "self", ".", "parser", ".", "parse", "(", "rule", ")", "if", "self", ".", "compiler", ":", "rule", "=", "self", ".", "compiler", ".", "compile", ...
28.75
11.75
def load_plugins(self, args=None): """Load all plugins in the 'plugins' folder.""" for item in os.listdir(plugins_path): if (item.startswith(self.header) and item.endswith(".py") and item != (self.header + "plugin.py")): # Load the plugin self._load_plugin(os.path.basename(item), args=args, config=self.config) # Log plugins list logger.debug("Active plugins list: {}".format(self.getPluginsList()))
[ "def", "load_plugins", "(", "self", ",", "args", "=", "None", ")", ":", "for", "item", "in", "os", ".", "listdir", "(", "plugins_path", ")", ":", "if", "(", "item", ".", "startswith", "(", "self", ".", "header", ")", "and", "item", ".", "endswith", ...
45
14.916667
def delete_lambda(awsclient, function_name, events=None, delete_logs=False): """Delete a lambda function. :param awsclient: :param function_name: :param events: list of events :param delete_logs: :return: exit_code """ if events is not None: unwire(awsclient, events, function_name, alias_name=ALIAS_NAME) client_lambda = awsclient.get_client('lambda') response = client_lambda.delete_function(FunctionName=function_name) if delete_logs: log_group_name = '/aws/lambda/%s' % function_name delete_log_group(awsclient, log_group_name) # TODO remove event source first and maybe also needed for permissions log.info(json2table(response)) return 0
[ "def", "delete_lambda", "(", "awsclient", ",", "function_name", ",", "events", "=", "None", ",", "delete_logs", "=", "False", ")", ":", "if", "events", "is", "not", "None", ":", "unwire", "(", "awsclient", ",", "events", ",", "function_name", ",", "alias_n...
35.25
19.8
def element_to_objects( element: etree.ElementTree, sender: str, sender_key_fetcher:Callable[[str], str]=None, user: UserType =None, ) -> List: """Transform an Element to a list of entities recursively. Possible child entities are added to each entity ``_children`` list. :param tree: Element :param sender: Payload sender id :param sender_key_fetcher: Function to fetch sender public key. If not given, key will always be fetched over network. The function should take sender handle as the only parameter. :param user: Optional receiving user object. If given, should have an ``id``. :returns: list of entities """ entities = [] cls = MAPPINGS.get(element.tag) if not cls: return [] attrs = xml_children_as_dict(element) transformed = transform_attributes(attrs, cls) if hasattr(cls, "fill_extra_attributes"): transformed = cls.fill_extra_attributes(transformed) entity = cls(**transformed) # Add protocol name entity._source_protocol = "diaspora" # Save element object to entity for possible later use entity._source_object = etree.tostring(element) # Save receiving id to object if user: entity._receiving_actor_id = user.id if issubclass(cls, DiasporaRelayableMixin): # If relayable, fetch sender key for validation entity._xml_tags = get_element_child_info(element, "tag") if sender_key_fetcher: entity._sender_key = sender_key_fetcher(entity.actor_id) else: profile = retrieve_and_parse_profile(entity.handle) if profile: entity._sender_key = profile.public_key else: # If not relayable, ensure handles match if not check_sender_and_entity_handle_match(sender, entity.handle): return [] try: entity.validate() except ValueError as ex: logger.error("Failed to validate entity %s: %s", entity, ex, extra={ "attrs": attrs, "transformed": transformed, }) return [] # Extract mentions entity._mentions = entity.extract_mentions() # Do child elements for child in element: entity._children.extend(element_to_objects(child, sender, user=user)) # Add to entities list entities.append(entity) return entities
[ "def", "element_to_objects", "(", "element", ":", "etree", ".", "ElementTree", ",", "sender", ":", "str", ",", "sender_key_fetcher", ":", "Callable", "[", "[", "str", "]", ",", "str", "]", "=", "None", ",", "user", ":", "UserType", "=", "None", ",", ")...
38.216667
20.466667
def predict_unseen(self, times, config): """ predict the loss of an unseen configuration Parameters: ----------- times: numpy array times where to predict the loss config: numpy array the numerical representation of the config Returns: -------- mean and variance prediction at input times for the given config """ assert np.all(times > 0) and np.all(times <= self.max_num_epochs) x = np.array(config)[None, :] idx = times / self.max_num_epochs x = np.repeat(x, idx.shape[0], axis=0) x = np.concatenate((x, idx[:, None]), axis=1) mean, var = self.model.predict(x) return 1 - mean, var
[ "def", "predict_unseen", "(", "self", ",", "times", ",", "config", ")", ":", "assert", "np", ".", "all", "(", "times", ">", "0", ")", "and", "np", ".", "all", "(", "times", "<=", "self", ".", "max_num_epochs", ")", "x", "=", "np", ".", "array", "...
26.758621
20.206897
def change_id(self, new_id_for_id): """ Changes the id of the specified motors (each id must be unique on the bus). """ if len(set(new_id_for_id.values())) < len(new_id_for_id): raise ValueError('each id must be unique.') for new_id in new_id_for_id.itervalues(): if self.ping(new_id): raise ValueError('id {} is already used.'.format(new_id)) self._change_id(new_id_for_id) for motor_id, new_id in new_id_for_id.iteritems(): if motor_id in self._known_models: self._known_models[new_id] = self._known_models[motor_id] del self._known_models[motor_id] if motor_id in self._known_mode: self._known_mode[new_id] = self._known_mode[motor_id] del self._known_mode[motor_id]
[ "def", "change_id", "(", "self", ",", "new_id_for_id", ")", ":", "if", "len", "(", "set", "(", "new_id_for_id", ".", "values", "(", ")", ")", ")", "<", "len", "(", "new_id_for_id", ")", ":", "raise", "ValueError", "(", "'each id must be unique.'", ")", "...
45.722222
17.777778
def commitVCS(self, tag=None): ''' Commit the current working directory state (or do nothing if the working directory is not version controlled) ''' if not self.vcs: return self.vcs.commit(message='version %s' % tag, tag=tag)
[ "def", "commitVCS", "(", "self", ",", "tag", "=", "None", ")", ":", "if", "not", "self", ".", "vcs", ":", "return", "self", ".", "vcs", ".", "commit", "(", "message", "=", "'version %s'", "%", "tag", ",", "tag", "=", "tag", ")" ]
39.285714
21.285714
def get_char_type(ch): """ 0, 汉字 1, 英文字母 2. 数字 3. 其他 """ if re.match(en_p, ch): return 1 elif re.match("\d+", ch): return 2 elif re.match(re_han, ch): return 3 else: return 4
[ "def", "get_char_type", "(", "ch", ")", ":", "if", "re", ".", "match", "(", "en_p", ",", "ch", ")", ":", "return", "1", "elif", "re", ".", "match", "(", "\"\\d+\"", ",", "ch", ")", ":", "return", "2", "elif", "re", ".", "match", "(", "re_han", ...
15.466667
20.133333
def get_decoded_jwt(request): """ Grab jwt from jwt cookie in request if possible. Returns a decoded jwt dict if it can be found. Returns None if the jwt is not found. """ jwt_cookie = request.COOKIES.get(jwt_cookie_name(), None) if not jwt_cookie: return None return jwt_decode_handler(jwt_cookie)
[ "def", "get_decoded_jwt", "(", "request", ")", ":", "jwt_cookie", "=", "request", ".", "COOKIES", ".", "get", "(", "jwt_cookie_name", "(", ")", ",", "None", ")", "if", "not", "jwt_cookie", ":", "return", "None", "return", "jwt_decode_handler", "(", "jwt_cook...
27.416667
14.583333
def quoted_insert(self, e): # (C-q or C-v) u'''Add the next character typed to the line verbatim. This is how to insert key sequences like C-q, for example.''' e = self.console.getkeypress() self.insert_text(e.char)
[ "def", "quoted_insert", "(", "self", ",", "e", ")", ":", "# (C-q or C-v)\r", "e", "=", "self", ".", "console", ".", "getkeypress", "(", ")", "self", ".", "insert_text", "(", "e", ".", "char", ")" ]
49.4
13
def export_agg_losses(ekey, dstore): """ :param ekey: export key, i.e. a pair (datastore key, fmt) :param dstore: datastore object """ dskey = ekey[0] oq = dstore['oqparam'] dt = oq.loss_dt() name, value, tags = _get_data(dstore, dskey, oq.hazard_stats().items()) writer = writers.CsvWriter(fmt=writers.FIVEDIGITS) expvalue = dstore['exposed_value'].value # shape (T1, T2, ..., L) tagcol = dstore['assetcol/tagcol'] tagnames = tuple(dstore['oqparam'].aggregate_by) header = ('loss_type',) + tagnames + ( 'loss_value', 'exposed_value', 'loss_ratio') for r, tag in enumerate(tags): rows = [] for multi_idx, loss in numpy.ndenumerate(value[:, r]): l, *tagidxs = multi_idx evalue = expvalue[tuple(tagidxs) + (l,)] row = tagcol.get_tagvalues(tagnames, tagidxs) + ( loss, evalue, loss / evalue) rows.append((dt.names[l],) + row) dest = dstore.build_fname(name, tag, 'csv') writer.save(rows, dest, header) return writer.getsaved()
[ "def", "export_agg_losses", "(", "ekey", ",", "dstore", ")", ":", "dskey", "=", "ekey", "[", "0", "]", "oq", "=", "dstore", "[", "'oqparam'", "]", "dt", "=", "oq", ".", "loss_dt", "(", ")", "name", ",", "value", ",", "tags", "=", "_get_data", "(", ...
40.884615
12.038462
def loads(cls, json_text, schema=None): """ :param str json_text: json text to be parse :param voluptuous.Schema schema: JSON schema. :return: Dictionary storing the parse results of JSON :rtype: dictionary :raises ImportError: :raises RuntimeError: :raises ValueError: """ try: json_text = json_text.decode("ascii") except AttributeError: pass try: dict_json = json.loads(json_text) except ValueError: _, e, _ = sys.exc_info() # for python 2.5 compatibility raise ValueError(os.linesep.join([ str(e), "decode error: check JSON format with http://jsonlint.com/", ])) cls.__validate_json(schema, dict_json) return dict_json
[ "def", "loads", "(", "cls", ",", "json_text", ",", "schema", "=", "None", ")", ":", "try", ":", "json_text", "=", "json_text", ".", "decode", "(", "\"ascii\"", ")", "except", "AttributeError", ":", "pass", "try", ":", "dict_json", "=", "json", ".", "lo...
29.428571
18.142857
def head_and_tail_print(self, n=5): """Display the first and last n elements of a DataFrame.""" from IPython import display display.display(display.HTML(self._head_and_tail_table(n)))
[ "def", "head_and_tail_print", "(", "self", ",", "n", "=", "5", ")", ":", "from", "IPython", "import", "display", "display", ".", "display", "(", "display", ".", "HTML", "(", "self", ".", "_head_and_tail_table", "(", "n", ")", ")", ")" ]
51
9.25
def execution_duration(self): """ Returns total BMDS execution time, in seconds. """ duration = None if self.execution_start and self.execution_end: delta = self.execution_end - self.execution_start duration = delta.total_seconds() return duration
[ "def", "execution_duration", "(", "self", ")", ":", "duration", "=", "None", "if", "self", ".", "execution_start", "and", "self", ".", "execution_end", ":", "delta", "=", "self", ".", "execution_end", "-", "self", ".", "execution_start", "duration", "=", "de...
34.555556
11
def ask_overwrite(dest): """Check if file *dest* exists. If 'True', asks if the user wants to overwrite it (just remove the file for later overwrite). """ msg = "File '{}' already exists. Overwrite file?".format(dest) if os.path.exists(dest): if yes_no_query(msg): os.remove(dest) else: sys.exit("Cancelling operation...")
[ "def", "ask_overwrite", "(", "dest", ")", ":", "msg", "=", "\"File '{}' already exists. Overwrite file?\"", ".", "format", "(", "dest", ")", "if", "os", ".", "path", ".", "exists", "(", "dest", ")", ":", "if", "yes_no_query", "(", "msg", ")", ":", "os", ...
33.909091
15.909091
def is_subfeature_of (parent_property, f): """ Return true iff f is an ordinary subfeature of the parent_property's feature, or if f is a subfeature of the parent_property's feature specific to the parent_property's value. """ if __debug__: from .property import Property assert isinstance(parent_property, Property) assert isinstance(f, Feature) if not f.subfeature: return False p = f.parent if not p: return False parent_feature = p[0] parent_value = p[1] if parent_feature != parent_property.feature: return False if parent_value and parent_value != parent_property.value: return False return True
[ "def", "is_subfeature_of", "(", "parent_property", ",", "f", ")", ":", "if", "__debug__", ":", "from", ".", "property", "import", "Property", "assert", "isinstance", "(", "parent_property", ",", "Property", ")", "assert", "isinstance", "(", "f", ",", "Feature"...
25.814815
20.666667
def style(self, value): """ Setter for **self.__style** attribute. :param value: Attribute value. :type value: Style """ if value is not None: assert type(value) is Style, "'{0}' attribute: '{1}' type is not 'Style'!".format("style", value) style = Style() for item in (self.__default_style, value): style.update(item) value = style self.__style = value
[ "def", "style", "(", "self", ",", "value", ")", ":", "if", "value", "is", "not", "None", ":", "assert", "type", "(", "value", ")", "is", "Style", ",", "\"'{0}' attribute: '{1}' type is not 'Style'!\"", ".", "format", "(", "\"style\"", ",", "value", ")", "s...
30.733333
17.266667
def pubmed(self, pubmedid=None, hgnc_symbol=None, hgnc_identifier=None, limit=None, as_df=False): """Method to query :class:`.models.PubMed` objects in database :param pubmedid: alias symbol(s) :type pubmedid: str or tuple(str) or None :param hgnc_symbol: HGNC symbol(s) :type hgnc_symbol: str or tuple(str) or None :param hgnc_identifier: identifiers(s) in :class:`.models.HGNC` :type hgnc_identifier: int or tuple(int) or None :param limit: - if `isinstance(limit,int)==True` -> limit - if `isinstance(limit,tuple)==True` -> format:= tuple(page_number, results_per_page) - if limit == None -> all results :type limit: int or tuple(int) or None :param bool as_df: if `True` results are returned as :class:`pandas.DataFrame` :return: - if `as_df == False` -> list(:class:`.models.PubMed`) - if `as_df == True` -> :class:`pandas.DataFrame` :rtype: list(:class:`.models.PubMed`) or :class:`pandas.DataFrame` """ q = self.session.query(models.PubMed) model_queries_config = ( (pubmedid, models.PubMed.pubmedid), ) q = self.get_model_queries(q, model_queries_config) many_to_many_queries_config = ( (hgnc_symbol, models.PubMed.hgncs, models.HGNC.symbol), (hgnc_identifier, models.PubMed.hgncs, models.HGNC.identifier), ) q = self.get_many_to_many_queries(q, many_to_many_queries_config) return self._limit_and_df(q, limit, as_df)
[ "def", "pubmed", "(", "self", ",", "pubmedid", "=", "None", ",", "hgnc_symbol", "=", "None", ",", "hgnc_identifier", "=", "None", ",", "limit", "=", "None", ",", "as_df", "=", "False", ")", ":", "q", "=", "self", ".", "session", ".", "query", "(", ...
39.025
24.7
def _isdst(dt): """Check if date is in dst. """ if type(dt) == datetime.date: dt = datetime.datetime.combine(dt, datetime.datetime.min.time()) dtc = dt.replace(year=datetime.datetime.now().year) if time.localtime(dtc.timestamp()).tm_isdst == 1: return True return False
[ "def", "_isdst", "(", "dt", ")", ":", "if", "type", "(", "dt", ")", "==", "datetime", ".", "date", ":", "dt", "=", "datetime", ".", "datetime", ".", "combine", "(", "dt", ",", "datetime", ".", "datetime", ".", "min", ".", "time", "(", ")", ")", ...
33.444444
15.222222
def process_row(cls, data, column_map): """Process the row data from Rekall""" row = {} for key,value in data.iteritems(): if not value: value = '-' elif isinstance(value, list): value = value[1] elif isinstance(value, dict): if 'type_name' in value: if 'UnixTimeStamp' in value['type_name']: value = datetime.datetime.utcfromtimestamp(value['epoch']) if value == datetime.datetime(1970, 1, 1, 0, 0): value = '-' # Assume the value is somehow well formed when we get here row[column_map[key]] = value return row
[ "def", "process_row", "(", "cls", ",", "data", ",", "column_map", ")", ":", "row", "=", "{", "}", "for", "key", ",", "value", "in", "data", ".", "iteritems", "(", ")", ":", "if", "not", "value", ":", "value", "=", "'-'", "elif", "isinstance", "(", ...
41.5
13.5
async def recv(self): """ Receive the next :class:`~av.audio.frame.AudioFrame`. The base implementation just reads silence, subclass :class:`AudioStreamTrack` to provide a useful implementation. """ if self.readyState != 'live': raise MediaStreamError sample_rate = 8000 samples = int(AUDIO_PTIME * sample_rate) if hasattr(self, '_timestamp'): self._timestamp += samples wait = self._start + (self._timestamp / sample_rate) - time.time() await asyncio.sleep(wait) else: self._start = time.time() self._timestamp = 0 frame = AudioFrame(format='s16', layout='mono', samples=samples) for p in frame.planes: p.update(bytes(p.buffer_size)) frame.pts = self._timestamp frame.sample_rate = sample_rate frame.time_base = fractions.Fraction(1, sample_rate) return frame
[ "async", "def", "recv", "(", "self", ")", ":", "if", "self", ".", "readyState", "!=", "'live'", ":", "raise", "MediaStreamError", "sample_rate", "=", "8000", "samples", "=", "int", "(", "AUDIO_PTIME", "*", "sample_rate", ")", "if", "hasattr", "(", "self", ...
33.892857
16.178571
def refresh_styles(self): """Load all available styles""" import matplotlib.pyplot as plt self.colours = {} for style in plt.style.available: try: style_colours = plt.style.library[style]['axes.prop_cycle'] self.colours[style] = [c['color'] for c in list(style_colours)] except KeyError: continue self.colours['km3pipe'] = [ "#ff7869", "#4babe1", "#96ad3e", "#e4823d", "#5d72b2", "#e2a3c2", "#fd9844", "#e480e7" ]
[ "def", "refresh_styles", "(", "self", ")", ":", "import", "matplotlib", ".", "pyplot", "as", "plt", "self", ".", "colours", "=", "{", "}", "for", "style", "in", "plt", ".", "style", ".", "available", ":", "try", ":", "style_colours", "=", "plt", ".", ...
34
19.9375
def det_optimal_snrsq(self, det): """Returns the opitmal SNR squared in the given detector. Parameters ---------- det : str The name of the detector. Returns ------- float : The opimtal SNR squared. """ # try to get it from current stats try: return getattr(self._current_stats, '{}_optimal_snrsq'.format(det)) except AttributeError: # hasn't been calculated yet; call loglr to do so self._loglr() # now try returning again return getattr(self._current_stats, '{}_optimal_snrsq'.format(det))
[ "def", "det_optimal_snrsq", "(", "self", ",", "det", ")", ":", "# try to get it from current stats", "try", ":", "return", "getattr", "(", "self", ".", "_current_stats", ",", "'{}_optimal_snrsq'", ".", "format", "(", "det", ")", ")", "except", "AttributeError", ...
30.714286
18.714286
def set_dicts(self, word_dict, char_dict): """Set with custom dictionaries. :param word_dict: The word dictionary. :param char_dict: The character dictionary. """ self.word_dict = word_dict self.char_dict = char_dict
[ "def", "set_dicts", "(", "self", ",", "word_dict", ",", "char_dict", ")", ":", "self", ".", "word_dict", "=", "word_dict", "self", ".", "char_dict", "=", "char_dict" ]
32.25
8.875
def interpolations_to_summary(sample_ind, interpolations, first_frame, last_frame, hparams, decode_hp): """Converts interpolated frames into tf summaries. The summaries consists of: 1. Image summary corresponding to the first frame. 2. Image summary corresponding to the last frame. 3. The interpolated frames as a gif summary. Args: sample_ind: int interpolations: Numpy array, shape=(num_interp, H, W, 3) first_frame: Numpy array, shape=(HWC) last_frame: Numpy array, shape=(HWC) hparams: HParams, train hparams decode_hp: HParams, decode hparams Returns: summaries: list of tf Summary Values. """ parent_tag = "sample_%d" % sample_ind frame_shape = hparams.problem.frame_shape interp_shape = [hparams.batch_size, decode_hp.num_interp] + frame_shape interpolations = np.reshape(interpolations, interp_shape) interp_tag = "%s/interp/%s" % (parent_tag, decode_hp.channel_interp) if decode_hp.channel_interp == "ranked": interp_tag = "%s/rank_%d" % (interp_tag, decode_hp.rank_interp) summaries, _ = common_video.py_gif_summary( interp_tag, interpolations, return_summary_value=True, max_outputs=decode_hp.max_display_outputs, fps=decode_hp.frames_per_second) if decode_hp.save_frames: first_frame_summ = image_utils.image_to_tf_summary_value( first_frame, "%s/first" % parent_tag) last_frame_summ = image_utils.image_to_tf_summary_value( last_frame, "%s/last" % parent_tag) summaries.append(first_frame_summ) summaries.append(last_frame_summ) return summaries
[ "def", "interpolations_to_summary", "(", "sample_ind", ",", "interpolations", ",", "first_frame", ",", "last_frame", ",", "hparams", ",", "decode_hp", ")", ":", "parent_tag", "=", "\"sample_%d\"", "%", "sample_ind", "frame_shape", "=", "hparams", ".", "problem", "...
40.410256
14.641026
def _parse_members(self, contents, module): """Extracts any module-level members from the code. They must appear before any type declalations.""" #We need to get hold of the text before the module's main CONTAINS keyword #so that we don't find variables from executables and claim them as #belonging to the module. icontains = module.contains_index ichar = module.charindex(icontains, 0) module.preamble = module.refstring[:ichar] #Get a dictionary of all the members in this module body #We only want to look at variable definitions before the first type lowest = ichar remove = [] #Will use later below, see next comment for t in module.types: remove.append((module.types[t].start, module.types[t].end)) if module.types[t].start < lowest: lowest = module.types[t].start module.members.update(self.vparser.parse(contents[:lowest-(module.start + 10 + len(module.name))], module)) #The docstrings for these members will appear as member tags in the same #preamble text. We can't use the entire preamble for this because member #docs inside of a type declaration will show up as belonging to the #module, when in fact, they don't. remove.sort(key=lambda tup: tup[0]) retain = [] cur_end = 0 for rem in remove: signature = module.refstring[rem[0]+1:rem[1]].index("\n") + 2 keep = module.refstring[cur_end:rem[0] + signature] cur_end = rem[1] retain.append(keep) #If there weren't any types in the module, we still want to get at the docs in #the preamble. if len(remove) == 0: retain = module.preamble docsearch = "".join(retain) module.predocs = self.docparser.parse_docs(docsearch, module) if module.name in module.predocs: #We can only do member docstrings if the module had internal docstrings #that may to members. memdocs = self.docparser.to_doc(module.predocs[module.name][0], module.name) remainingdocs = self.docparser.process_memberdocs(memdocs, module) module.predocs[module.name] = remainingdocs
[ "def", "_parse_members", "(", "self", ",", "contents", ",", "module", ")", ":", "#We need to get hold of the text before the module's main CONTAINS keyword", "#so that we don't find variables from executables and claim them as", "#belonging to the module.", "icontains", "=", "module", ...
47.829787
22.255319
def set(self, model, property_name, value): """ Set model property to value. Use setter if possible. :param model: model object or dict :param property_name: str, name on the model :param value: mixed, a value to set :return: None """ if type(model) is dict: model[property_name] = value elif hasattr(model, 'set_' + property_name): setter = getattr(model, 'set_' + property_name) setter(value) else: try: setattr(model, property_name, value) except AttributeError: pass
[ "def", "set", "(", "self", ",", "model", ",", "property_name", ",", "value", ")", ":", "if", "type", "(", "model", ")", "is", "dict", ":", "model", "[", "property_name", "]", "=", "value", "elif", "hasattr", "(", "model", ",", "'set_'", "+", "propert...
34.722222
11.277778
def filter(self, relation_id=None, duedate__lt=None, duedate__gte=None, **kwargs): """ A common query would be duedate__lt=date(2015, 1, 1) to get all Receivables that are due in 2014 and earlier. """ if relation_id is not None: # Filter by (relation) account_id. There doesn't seem to be # any reason to prefer # 'read/financial/ReceivablesListByAccount?accountId=X' over # this. relation_id = self._remote_guid(relation_id) self._filter_append(kwargs, u'AccountId eq %s' % (relation_id,)) if duedate__lt is not None: # Not sure what the AgeGroup means in # ReceivablesListByAgeGroup, but we can certainly do # without. duedate__lt = self._remote_datetime(duedate__lt) self._filter_append(kwargs, u'DueDate lt %s' % (duedate__lt,)) if duedate__gte is not None: # Not sure what the AgeGroup means in # ReceivablesListByAgeGroup, but we can certainly do # without. duedate__gte = self._remote_datetime(duedate__gte) self._filter_append(kwargs, u'DueDate ge %s' % (duedate__gte,)) return super(Receivables, self).filter(**kwargs)
[ "def", "filter", "(", "self", ",", "relation_id", "=", "None", ",", "duedate__lt", "=", "None", ",", "duedate__gte", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "relation_id", "is", "not", "None", ":", "# Filter by (relation) account_id. There doesn'...
43.896552
20.517241
def default_formatter(error): """Escape the error, and wrap it in a span with class ``error-message``""" quoted = formencode.htmlfill.escape_formatter(error) return u'<span class="error-message">{0}</span>'.format(quoted)
[ "def", "default_formatter", "(", "error", ")", ":", "quoted", "=", "formencode", ".", "htmlfill", ".", "escape_formatter", "(", "error", ")", "return", "u'<span class=\"error-message\">{0}</span>'", ".", "format", "(", "quoted", ")" ]
57.5
13.5
def read(self, input_buffer, kmip_version=enums.KMIPVersion.KMIP_1_3): """ Read the data encoding the ValidationInformation structure and decode it into its constituent parts. Args: input_buffer (stream): A data stream containing encoded object data, supporting a read method; usually a BytearrayStream object. kmip_version (KMIPVersion): An enumeration defining the KMIP version with which the object will be decoded. Optional, defaults to KMIP 2.0. Raises: InvalidKmipEncoding: Raised if the validation authority type, validation version major, validation type, and/or validation level are missing from the encoding. VersionNotSupported: Raised when a KMIP version is provided that does not support the ValidationInformation structure. """ if kmip_version < enums.KMIPVersion.KMIP_1_3: raise exceptions.VersionNotSupported( "KMIP {} does not support the ValidationInformation " "object.".format( kmip_version.value ) ) super(ValidationInformation, self).read( input_buffer, kmip_version=kmip_version ) local_buffer = utils.BytearrayStream(input_buffer.read(self.length)) if self.is_tag_next( enums.Tags.VALIDATION_AUTHORITY_TYPE, local_buffer ): validation_authority_type = primitives.Enumeration( enums.ValidationAuthorityType, tag=enums.Tags.VALIDATION_AUTHORITY_TYPE ) validation_authority_type.read( local_buffer, kmip_version=kmip_version ) self._validation_authority_type = validation_authority_type else: raise exceptions.InvalidKmipEncoding( "The ValidationInformation encoding is missing the " "validation authority type." ) if self.is_tag_next( enums.Tags.VALIDATION_AUTHORITY_COUNTRY, local_buffer ): validation_authority_country = primitives.TextString( tag=enums.Tags.VALIDATION_AUTHORITY_COUNTRY ) validation_authority_country.read( local_buffer, kmip_version=kmip_version ) self._validation_authority_country = validation_authority_country if self.is_tag_next(enums.Tags.VALIDATION_AUTHORITY_URI, local_buffer): validation_authority_uri = primitives.TextString( tag=enums.Tags.VALIDATION_AUTHORITY_URI ) validation_authority_uri.read( local_buffer, kmip_version=kmip_version ) self._validation_authority_uri = validation_authority_uri if self.is_tag_next( enums.Tags.VALIDATION_VERSION_MAJOR, local_buffer ): validation_version_major = primitives.Integer( tag=enums.Tags.VALIDATION_VERSION_MAJOR ) validation_version_major.read( local_buffer, kmip_version=kmip_version ) self._validation_version_major = validation_version_major else: raise exceptions.InvalidKmipEncoding( "The ValidationInformation encoding is missing the " "validation version major." ) if self.is_tag_next( enums.Tags.VALIDATION_VERSION_MINOR, local_buffer ): validation_version_minor = primitives.Integer( tag=enums.Tags.VALIDATION_VERSION_MINOR ) validation_version_minor.read( local_buffer, kmip_version=kmip_version ) self._validation_version_minor = validation_version_minor if self.is_tag_next(enums.Tags.VALIDATION_TYPE, local_buffer): validation_type = primitives.Enumeration( enums.ValidationType, tag=enums.Tags.VALIDATION_TYPE ) validation_type.read( local_buffer, kmip_version=kmip_version ) self._validation_type = validation_type else: raise exceptions.InvalidKmipEncoding( "The ValidationInformation encoding is missing the " "validation type." ) if self.is_tag_next(enums.Tags.VALIDATION_LEVEL, local_buffer): validation_level = primitives.Integer( tag=enums.Tags.VALIDATION_LEVEL ) validation_level.read(local_buffer, kmip_version=kmip_version) self._validation_level = validation_level else: raise exceptions.InvalidKmipEncoding( "The ValidationInformation encoding is missing the " "validation level." ) if self.is_tag_next( enums.Tags.VALIDATION_CERTIFICATE_IDENTIFIER, local_buffer ): validation_certificate_identifier = primitives.TextString( tag=enums.Tags.VALIDATION_CERTIFICATE_IDENTIFIER ) validation_certificate_identifier.read( local_buffer, kmip_version=kmip_version ) self._validation_certificate_identifier = \ validation_certificate_identifier if self.is_tag_next( enums.Tags.VALIDATION_CERTIFICATE_URI, local_buffer ): validation_certificate_uri = primitives.TextString( tag=enums.Tags.VALIDATION_CERTIFICATE_URI ) validation_certificate_uri.read( local_buffer, kmip_version=kmip_version ) self._validation_certificate_uri = validation_certificate_uri if self.is_tag_next(enums.Tags.VALIDATION_VENDOR_URI, local_buffer): validation_vendor_uri = primitives.TextString( tag=enums.Tags.VALIDATION_VENDOR_URI ) validation_vendor_uri.read(local_buffer, kmip_version=kmip_version) self._validation_vendor_uri = validation_vendor_uri validation_profiles = [] while self.is_tag_next(enums.Tags.VALIDATION_PROFILE, local_buffer): validation_profile = primitives.TextString( tag=enums.Tags.VALIDATION_PROFILE ) validation_profile.read(local_buffer, kmip_version=kmip_version) validation_profiles.append(validation_profile) self._validation_profiles = validation_profiles self.is_oversized(local_buffer)
[ "def", "read", "(", "self", ",", "input_buffer", ",", "kmip_version", "=", "enums", ".", "KMIPVersion", ".", "KMIP_1_3", ")", ":", "if", "kmip_version", "<", "enums", ".", "KMIPVersion", ".", "KMIP_1_3", ":", "raise", "exceptions", ".", "VersionNotSupported", ...
37.837989
20.206704
def clean_source_index(self): """ Cleanup broken symbolic links in the local source distribution index. The purpose of this method requires some context to understand. Let me preface this by stating that I realize I'm probably overcomplicating things, but I like to preserve forward / backward compatibility when possible and I don't feel like dropping everyone's locally cached source distribution archives without a good reason to do so. With that out of the way: - Versions of pip-accel based on pip 1.4.x maintained a local source distribution index based on a directory containing symbolic links pointing directly into pip's download cache. When files were removed from pip's download cache, broken symbolic links remained in pip-accel's local source distribution index directory. This resulted in very confusing error messages. To avoid this :func:`clean_source_index()` cleaned up broken symbolic links whenever pip-accel was about to invoke pip. - More recent versions of pip (6.x) no longer support the same style of download cache that contains source distribution archives that can be re-used directly by pip-accel. To cope with the changes in pip 6.x new versions of pip-accel tell pip to download source distribution archives directly into the local source distribution index directory maintained by pip-accel. - It is very reasonable for users of pip-accel to have multiple versions of pip-accel installed on their system (imagine a dozen Python virtual environments that won't all be updated at the same time; this is the situation I always find myself in :-). These versions of pip-accel will be sharing the same local source distribution index directory. - All of this leads up to the local source distribution index directory containing a mixture of symbolic links and regular files with no obvious way to atomically and gracefully upgrade the local source distribution index directory while avoiding fights between old and new versions of pip-accel :-). - I could of course switch to storing the new local source distribution index in a differently named directory (avoiding potential conflicts between multiple versions of pip-accel) but then I would have to introduce a new configuration option, otherwise everyone who has configured pip-accel to store its source index in a non-default location could still be bitten by compatibility issues. For now I've decided to keep using the same directory for the local source distribution index and to keep cleaning up broken symbolic links. This enables cooperating between old and new versions of pip-accel and avoids trashing user's local source distribution indexes. The main disadvantage is that pip-accel is still required to clean up broken symbolic links... """ cleanup_timer = Timer() cleanup_counter = 0 for entry in os.listdir(self.config.source_index): pathname = os.path.join(self.config.source_index, entry) if os.path.islink(pathname) and not os.path.exists(pathname): logger.warn("Cleaning up broken symbolic link: %s", pathname) os.unlink(pathname) cleanup_counter += 1 logger.debug("Cleaned up %i broken symbolic links from source index in %s.", cleanup_counter, cleanup_timer)
[ "def", "clean_source_index", "(", "self", ")", ":", "cleanup_timer", "=", "Timer", "(", ")", "cleanup_counter", "=", "0", "for", "entry", "in", "os", ".", "listdir", "(", "self", ".", "config", ".", "source_index", ")", ":", "pathname", "=", "os", ".", ...
57.571429
29.730159
def rename_motifs(motifs, stats=None): """Rename motifs to GimmeMotifs_1..GimmeMotifs_N. If stats object is passed, stats will be copied.""" final_motifs = [] for i, motif in enumerate(motifs): old = str(motif) motif.id = "GimmeMotifs_{}".format(i + 1) final_motifs.append(motif) if stats: stats[str(motif)] = stats[old].copy() if stats: return final_motifs, stats else: return final_motifs
[ "def", "rename_motifs", "(", "motifs", ",", "stats", "=", "None", ")", ":", "final_motifs", "=", "[", "]", "for", "i", ",", "motif", "in", "enumerate", "(", "motifs", ")", ":", "old", "=", "str", "(", "motif", ")", "motif", ".", "id", "=", "\"Gimme...
29.3125
14.6875
def as_labeller(x, default=label_value, multi_line=True): """ Coerse to labeller function Parameters ---------- x : function | dict Object to coerce default : function | str Default labeller. If it is a string, it should be the name of one the labelling functions provided by plotnine. multi_line : bool Whether to place each variable on a separate line Returns ------- out : function Labelling function """ if x is None: x = default # One of the labelling functions as string with suppress(KeyError, TypeError): x = LABELLERS[x] # x is a labeller with suppress(AttributeError): if x.__name__ == '_labeller': return x def _labeller(label_info): label_info = pd.Series(label_info).astype(str) if callable(x) and x.__name__ in LABELLERS: # labellers in this module return x(label_info) elif hasattr(x, '__contains__'): # dictionary lookup for var in label_info.index: if label_info[var] in x: label_info[var] = x[label_info[var]] return label_info elif callable(x): # generic function for var in label_info.index: label_info[var] = x(label_info[var]) return label_info else: msg = "Could not use '{0}' for labelling." raise PlotnineError(msg.format(x)) return _labeller
[ "def", "as_labeller", "(", "x", ",", "default", "=", "label_value", ",", "multi_line", "=", "True", ")", ":", "if", "x", "is", "None", ":", "x", "=", "default", "# One of the labelling functions as string", "with", "suppress", "(", "KeyError", ",", "TypeError"...
27.574074
15.907407
def set_version(self, version, force=True): """ Sets the version name for the current state of repo """ if version in self.versions: self._version = version if 'working' in self.repo.branch().stdout: if force: logger.info('Found working branch. Removing...') cmd = self.repo.checkout('master') cmd = self.repo.branch('working', d=True) else: logger.info('Found working branch from previous session. Use force=True to remove it and start anew.') return stdout = self.repo.checkout(version, b='working').stdout # active version set in 'working' branch logger.info('Version {0} set'.format(version)) else: raise AttributeError('Version {0} not found'.format(version))
[ "def", "set_version", "(", "self", ",", "version", ",", "force", "=", "True", ")", ":", "if", "version", "in", "self", ".", "versions", ":", "self", ".", "_version", "=", "version", "if", "'working'", "in", "self", ".", "repo", ".", "branch", "(", ")...
49.222222
25.944444
def _work(self, backend, package, ident='', log=True): """ Centralized task worker code. Used internally, see send_signal() and work() for the external interfaces. """ num = self._sending_task(backend) if log: self.log(INFO, 'Starting %s backend task #%s (%s)' % (backend, num, ident)) try: conn = socket.socket(socket.AF_INET, socket.SOCK_STREAM) conn.connect(backend) self.send(conn, package) result = self.recv(conn) self.log(DEBUG, result) result = self.codec.decode(result) except Exception as e: self._canceling_task(backend) raise else: self._closing_task(backend) if log: self.log(INFO, 'Finished %s backend task #%s (%s)' % (backend, num, ident)) finally: conn.close() if result[0] == 'error': # We reraise errors in our own way. raise BackendProcessingError(*result[1:]) else: return result[1]
[ "def", "_work", "(", "self", ",", "backend", ",", "package", ",", "ident", "=", "''", ",", "log", "=", "True", ")", ":", "num", "=", "self", ".", "_sending_task", "(", "backend", ")", "if", "log", ":", "self", ".", "log", "(", "INFO", ",", "'Star...
35.4375
14.375
def sixteen_oscillator_two_stimulated_ensembles_grid(): "Not accurate false due to spikes are observed" parameters = legion_parameters(); parameters.teta_x = -1.1; template_dynamic_legion(16, 2000, 1500, conn_type = conn_type.GRID_FOUR, params = parameters, stimulus = [1, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 1]);
[ "def", "sixteen_oscillator_two_stimulated_ensembles_grid", "(", ")", ":", "parameters", "=", "legion_parameters", "(", ")", "parameters", ".", "teta_x", "=", "-", "1.1", "template_dynamic_legion", "(", "16", ",", "2000", ",", "1500", ",", "conn_type", "=", "conn_t...
83.5
46.5
def send_query(self, ID, methodname, *args, **kwargs): """将调用请求的ID,方法名,参数包装为请求数据后编码为字节串发送出去. Parameters: ID (str): - 任务ID methodname (str): - 要调用的方法名 args (Any): - 要调用的方法的位置参数 kwargs (Any): - 要调用的方法的关键字参数 Return: (bool): - 准确地说没有错误就会返回True """ query = self._make_query(ID, methodname, *args, **kwargs) self._send_query(query) return True
[ "def", "send_query", "(", "self", ",", "ID", ",", "methodname", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "query", "=", "self", ".", "_make_query", "(", "ID", ",", "methodname", ",", "*", "args", ",", "*", "*", "kwargs", ")", "self", "...
25.941176
17.235294
def map(cls, x, palette, limits, na_value=None): """ Map values to a discrete palette Parameters ---------- palette : callable ``f(x)`` palette to use x : array_like Continuous values to scale na_value : object Value to use for missing values. Returns ------- out : array_like Values mapped onto a palette """ n = len(limits) pal = palette(n)[match(x, limits)] try: pal[pd.isnull(x)] = na_value except TypeError: pal = [v if not pd.isnull(v) else na_value for v in pal] return pal
[ "def", "map", "(", "cls", ",", "x", ",", "palette", ",", "limits", ",", "na_value", "=", "None", ")", ":", "n", "=", "len", "(", "limits", ")", "pal", "=", "palette", "(", "n", ")", "[", "match", "(", "x", ",", "limits", ")", "]", "try", ":",...
25.307692
15.692308
def start(name, call=None): ''' Start a node CLI Examples: .. code-block:: bash salt-cloud -a start myinstance ''' if call != 'action': raise SaltCloudSystemExit( 'The stop action must be called with -a or --action.' ) log.info('Starting node %s', name) instanceId = _get_node(name)['InstanceId'] params = {'Action': 'StartInstance', 'InstanceId': instanceId} result = query(params) return result
[ "def", "start", "(", "name", ",", "call", "=", "None", ")", ":", "if", "call", "!=", "'action'", ":", "raise", "SaltCloudSystemExit", "(", "'The stop action must be called with -a or --action.'", ")", "log", ".", "info", "(", "'Starting node %s'", ",", "name", "...
19.75
22.833333
def download_structure(inputpdbid): """Given a PDB ID, downloads the corresponding PDB structure. Checks for validity of ID and handles error while downloading. Returns the path of the downloaded file.""" try: if len(inputpdbid) != 4 or extract_pdbid(inputpdbid.lower()) == 'UnknownProtein': sysexit(3, 'Invalid PDB ID (Wrong format)\n') pdbfile, pdbid = fetch_pdb(inputpdbid.lower()) pdbpath = tilde_expansion('%s/%s.pdb' % (config.BASEPATH.rstrip('/'), pdbid)) create_folder_if_not_exists(config.BASEPATH) with open(pdbpath, 'w') as g: g.write(pdbfile) write_message('file downloaded as %s\n\n' % pdbpath) return pdbpath, pdbid except ValueError: # Invalid PDB ID, cannot fetch from RCBS server sysexit(3, 'Invalid PDB ID (Entry does not exist)\n')
[ "def", "download_structure", "(", "inputpdbid", ")", ":", "try", ":", "if", "len", "(", "inputpdbid", ")", "!=", "4", "or", "extract_pdbid", "(", "inputpdbid", ".", "lower", "(", ")", ")", "==", "'UnknownProtein'", ":", "sysexit", "(", "3", ",", "'Invali...
49.647059
19.882353
def add(self, title, obj, **kwargs): """ Add a title :param title: str: The title of the menu :param obj: class or method :param kwargs: :return: """ is_class = inspect.isclass(obj) self._push(title=title, view=obj, class_name=obj.im_class.__name__ if not is_class else obj.__name__, is_class=is_class, **kwargs)
[ "def", "add", "(", "self", ",", "title", ",", "obj", ",", "*", "*", "kwargs", ")", ":", "is_class", "=", "inspect", ".", "isclass", "(", "obj", ")", "self", ".", "_push", "(", "title", "=", "title", ",", "view", "=", "obj", ",", "class_name", "="...
31.928571
11.642857
def transfer_and_wait( self, registry_address: PaymentNetworkID, token_address: TokenAddress, amount: TokenAmount, target: Address, identifier: PaymentID = None, transfer_timeout: int = None, secret: Secret = None, secret_hash: SecretHash = None, ): """ Do a transfer with `target` with the given `amount` of `token_address`. """ # pylint: disable=too-many-arguments payment_status = self.transfer_async( registry_address=registry_address, token_address=token_address, amount=amount, target=target, identifier=identifier, secret=secret, secret_hash=secret_hash, ) payment_status.payment_done.wait(timeout=transfer_timeout) return payment_status
[ "def", "transfer_and_wait", "(", "self", ",", "registry_address", ":", "PaymentNetworkID", ",", "token_address", ":", "TokenAddress", ",", "amount", ":", "TokenAmount", ",", "target", ":", "Address", ",", "identifier", ":", "PaymentID", "=", "None", ",", "transf...
34.6
11.52
def smart_import(mpath): """Given a path smart_import will import the module and return the attr reffered to.""" try: rest = __import__(mpath) except ImportError: split = mpath.split('.') rest = smart_import('.'.join(split[:-1])) rest = getattr(rest, split[-1]) return rest
[ "def", "smart_import", "(", "mpath", ")", ":", "try", ":", "rest", "=", "__import__", "(", "mpath", ")", "except", "ImportError", ":", "split", "=", "mpath", ".", "split", "(", "'.'", ")", "rest", "=", "smart_import", "(", "'.'", ".", "join", "(", "s...
34.777778
12.888889
def delete(self, order_id, data=None): """Cancel order and return the order object. Deleting an order causes the order status to change to canceled. The updated order object is returned. """ if not order_id or not order_id.startswith(self.RESOURCE_ID_PREFIX): raise IdentifierError( "Invalid order ID: '{id}'. An order ID should start with '{prefix}'.".format( id=order_id, prefix=self.RESOURCE_ID_PREFIX) ) result = super(Orders, self).delete(order_id, data) return self.get_resource_object(result)
[ "def", "delete", "(", "self", ",", "order_id", ",", "data", "=", "None", ")", ":", "if", "not", "order_id", "or", "not", "order_id", ".", "startswith", "(", "self", ".", "RESOURCE_ID_PREFIX", ")", ":", "raise", "IdentifierError", "(", "\"Invalid order ID: '{...
46.461538
19.307692
def most_frequent(self, k, inplace=False): """Only most frequent k words to be included in the embeddings.""" vocabulary = self.vocabulary.most_frequent(k) vectors = np.asarray([self[w] for w in vocabulary]) if inplace: self.vocabulary = vocabulary self.vectors = vectors return self return Embedding(vectors=vectors, vocabulary=vocabulary)
[ "def", "most_frequent", "(", "self", ",", "k", ",", "inplace", "=", "False", ")", ":", "vocabulary", "=", "self", ".", "vocabulary", ".", "most_frequent", "(", "k", ")", "vectors", "=", "np", ".", "asarray", "(", "[", "self", "[", "w", "]", "for", ...
41.111111
12.444444
def __deserialize_primitive(self, data, klass): """Deserializes string to primitive type. :param data: str. :param klass: class literal. :return: int, long, float, str, bool. """ try: return klass(data) except UnicodeEncodeError: return six.text_type(data) except TypeError: return data
[ "def", "__deserialize_primitive", "(", "self", ",", "data", ",", "klass", ")", ":", "try", ":", "return", "klass", "(", "data", ")", "except", "UnicodeEncodeError", ":", "return", "six", ".", "text_type", "(", "data", ")", "except", "TypeError", ":", "retu...
26.785714
13.5
def histogram(self, key, **dims): """Adds histogram with dimensions to the registry""" return super(MetricsRegistry, self).histogram( self.metadata.register(key, **dims))
[ "def", "histogram", "(", "self", ",", "key", ",", "*", "*", "dims", ")", ":", "return", "super", "(", "MetricsRegistry", ",", "self", ")", ".", "histogram", "(", "self", ".", "metadata", ".", "register", "(", "key", ",", "*", "*", "dims", ")", ")" ...
48.75
7.25
def parse_single(str_): """ Very simple parser to parse expressions represent some single values. :param str_: a string to parse :return: Int | Bool | String >>> parse_single(None) '' >>> parse_single("0") 0 >>> parse_single("123") 123 >>> parse_single("True") True >>> parse_single("a string") 'a string' >>> parse_single('"a string"') 'a string' >>> parse_single("'a string'") 'a string' >>> parse_single("0.1") '0.1' >>> parse_single(" a string contains extra whitespaces ") 'a string contains extra whitespaces' """ if str_ is None: return '' str_ = str_.strip() if not str_: return '' if BOOL_PATTERN.match(str_) is not None: return bool(str_) if INT_PATTERN.match(str_) is not None: return int(str_) if STR_PATTERN.match(str_) is not None: return str_[1:-1] return str_
[ "def", "parse_single", "(", "str_", ")", ":", "if", "str_", "is", "None", ":", "return", "''", "str_", "=", "str_", ".", "strip", "(", ")", "if", "not", "str_", ":", "return", "''", "if", "BOOL_PATTERN", ".", "match", "(", "str_", ")", "is", "not",...
20.613636
21.159091
def generate_tags_multiple_files_strings(input_files, ns, tag, ignore_tags): """ Creates stringified xml output of elements with certain tag. """ for el in generate_tags_multiple_files(input_files, tag, ignore_tags, ns): yield formatting.string_and_clear(el, ns)
[ "def", "generate_tags_multiple_files_strings", "(", "input_files", ",", "ns", ",", "tag", ",", "ignore_tags", ")", ":", "for", "el", "in", "generate_tags_multiple_files", "(", "input_files", ",", "tag", ",", "ignore_tags", ",", "ns", ")", ":", "yield", "formatti...
46.833333
17.833333
def generate_session_id(secret_key=settings.secret_key_bytes(), signed=settings.sign_sessions()): """Generate a random session ID. Typically, each browser tab connected to a Bokeh application has its own session ID. In production deployments of a Bokeh app, session IDs should be random and unguessable - otherwise users of the app could interfere with one another. If session IDs are signed with a secret key, the server can verify that the generator of the session ID was "authorized" (the generator had to know the secret key). This can be used to have a separate process, such as another web application, which generates new sessions on a Bokeh server. This other process may require users to log in before redirecting them to the Bokeh server with a valid session ID, for example. Args: secret_key (str, optional) : Secret key (default: value of 'BOKEH_SECRET_KEY' env var) signed (bool, optional) : Whether to sign the session ID (default: value of 'BOKEH_SIGN_SESSIONS' env var) """ secret_key = _ensure_bytes(secret_key) if signed: # note: '-' can also be in the base64 encoded signature base_id = _get_random_string(secret_key=secret_key) return base_id + '-' + _signature(base_id, secret_key) else: return _get_random_string(secret_key=secret_key)
[ "def", "generate_session_id", "(", "secret_key", "=", "settings", ".", "secret_key_bytes", "(", ")", ",", "signed", "=", "settings", ".", "sign_sessions", "(", ")", ")", ":", "secret_key", "=", "_ensure_bytes", "(", "secret_key", ")", "if", "signed", ":", "#...
47.724138
26.103448
def purge_queue(self, vhost, name): """ Purge all messages from a single queue. This is a convenience method so you aren't forced to supply a list containing a single tuple to the purge_queues method. :param string vhost: The vhost of the queue being purged. :param string name: The name of the queue being purged. :rtype: None """ vhost = quote(vhost, '') name = quote(name, '') path = Client.urls['purge_queue'] % (vhost, name) return self._call(path, 'DELETE')
[ "def", "purge_queue", "(", "self", ",", "vhost", ",", "name", ")", ":", "vhost", "=", "quote", "(", "vhost", ",", "''", ")", "name", "=", "quote", "(", "name", ",", "''", ")", "path", "=", "Client", ".", "urls", "[", "'purge_queue'", "]", "%", "(...
36.466667
17.8
def isoline_vmag(hemi, isolines=None, surface='midgray', min_length=2, **kw): ''' isoline_vmag(hemi) calculates the visual magnification function f using the default set of iso-lines (as returned by neuropythy.vision.visual_isolines()). The hemi argument may alternately be a mesh object. isoline_vmag(hemi, isolns) uses the given iso-lines rather than the default ones. The return value of this funciton is a dictionary whose keys are 'tangential', 'radial', and 'areal', and whose values are the estimated visual magnification functions. These functions are of the form f(x,y) where x and y can be numbers or arrays in the visual field. ''' from neuropythy.util import (curry, zinv) from neuropythy.mri import is_cortex from neuropythy.vision import visual_isolines from neuropythy.geometry import to_mesh # if there's no isolines, get them if isolines is None: isolines = visual_isolines(hemi, **kw) # see if the isolines is a lazy map of visual areas; if so return a lazy map recursing... if pimms.is_vector(isolines.keys(), 'int'): f = lambda k: isoline_vmag(isolines[k], surface=surface, min_length=min_length) return pimms.lazy_map({k:curry(f, k) for k in six.iterkeys(isolines)}) mesh = to_mesh((hemi, surface)) # filter by min length if min_length is not None: isolines = {k: {kk: {kkk: [vvv[ii] for ii in iis] for (kkk,vvv) in six.iteritems(vv)} for (kk,vv) in six.iteritems(v) for iis in [[ii for (ii,u) in enumerate(vv['polar_angles']) if len(u) >= min_length]] if len(iis) > 0} for (k,v) in six.iteritems(isolines)} (rlns,tlns) = [isolines[k] for k in ['eccentricity', 'polar_angle']] if len(rlns) < 2: raise ValueError('fewer than 2 iso-eccentricity lines found') if len(tlns) < 2: raise ValueError('fewer than 2 iso-angle lines found') # grab the visual/surface lines ((rvlns,tvlns),(rslns,tslns)) = [[[u for lns in six.itervalues(xlns) for u in lns[k]] for xlns in (rlns,tlns)] for k in ('visual_coordinates','surface_coordinates')] # calculate some distances (rslen,tslen) = [[np.sqrt(np.sum((sx[:,:-1] - sx[:,1:])**2, 0)) for sx in slns] for slns in (rslns,tslns)] (rvlen,tvlen) = [[np.sqrt(np.sum((vx[:,:-1] - vx[:,1:])**2, 0)) for vx in vlns] for vlns in (rvlns,tvlns)] (rvxy, tvxy) = [[0.5*(vx[:,:-1] + vx[:,1:]) for vx in vlns] for vlns in (rvlns,tvlns)] (rvlen,tvlen,rslen,tslen) = [np.concatenate(u) for u in (rvlen,tvlen,rslen,tslen)] (rvxy,tvxy) = [np.hstack(vxy) for vxy in (rvxy,tvxy)] (rvmag,tvmag) = [vlen * zinv(slen) for (vlen,slen) in zip([rvlen,tvlen],[rslen,tslen])] return {k: {'visual_coordinates':vxy, 'visual_magnification': vmag, 'visual_lengths': vlen, 'surface_lengths': slen} for (k,vxy,vmag,vlen,slen) in zip(['radial','tangential'], [rvxy,tvxy], [rvmag,tvmag], [rvlen,tvlen], [rslen,tslen])}
[ "def", "isoline_vmag", "(", "hemi", ",", "isolines", "=", "None", ",", "surface", "=", "'midgray'", ",", "min_length", "=", "2", ",", "*", "*", "kw", ")", ":", "from", "neuropythy", ".", "util", "import", "(", "curry", ",", "zinv", ")", "from", "neur...
63.5
29.62
def send(self, sock, msg): """Send ``msg`` to destination ``sock``.""" data = pickle.dumps(msg) buf = struct.pack('>I', len(data)) + data sock.sendall(buf)
[ "def", "send", "(", "self", ",", "sock", ",", "msg", ")", ":", "data", "=", "pickle", ".", "dumps", "(", "msg", ")", "buf", "=", "struct", ".", "pack", "(", "'>I'", ",", "len", "(", "data", ")", ")", "+", "data", "sock", ".", "sendall", "(", ...
33.4
10
def get_by_id(self, reply_id): ''' Get the reply by id. ''' reply = MReply.get_by_uid(reply_id) logger.info('get_reply: {0}'.format(reply_id)) self.render('misc/reply/show_reply.html', reply=reply, username=reply.user_name, date=reply.date, vote=reply.vote, uid=reply.uid, userinfo=self.userinfo, kwd={})
[ "def", "get_by_id", "(", "self", ",", "reply_id", ")", ":", "reply", "=", "MReply", ".", "get_by_uid", "(", "reply_id", ")", "logger", ".", "info", "(", "'get_reply: {0}'", ".", "format", "(", "reply_id", ")", ")", "self", ".", "render", "(", "'misc/repl...
31.933333
12.6
def confirm(question: str, default: bool = True) -> bool: """ Requests confirmation of the specified question and returns that result :param question: The question to print to the console for the confirmation :param default: The default value if the user hits enter without entering a value """ result = input('{question} [{yes}/{no}]:'.format( question=question, yes='(Y)' if default else 'Y', no='N' if default else '(N)' )) if not result: return default if result[0].lower() in ['y', 't', '1']: return True return False
[ "def", "confirm", "(", "question", ":", "str", ",", "default", ":", "bool", "=", "True", ")", "->", "bool", ":", "result", "=", "input", "(", "'{question} [{yes}/{no}]:'", ".", "format", "(", "question", "=", "question", ",", "yes", "=", "'(Y)'", "if", ...
27.318182
21.227273
def CheckHost(host_data, os_name=None, cpe=None, labels=None, exclude_checks=None, restrict_checks=None): """Perform all checks on a host using acquired artifacts. Checks are selected based on the artifacts available and the host attributes (e.g. os_name/cpe/labels) provided as either parameters, or in the knowledgebase artifact. A KnowledgeBase artifact should be provided that contains, at a minimum: - OS - Hostname or IP Other knowldegebase attributes may be required for specific checks. CPE is currently unused, pending addition of a CPE module in the GRR client. Labels are arbitrary string labels attached to a client. Args: host_data: A dictionary with artifact names as keys, and rdf data as values. os_name: An OS name (optional). cpe: A CPE string (optional). labels: An iterable of labels (optional). exclude_checks: A list of check ids not to run. A check id in this list will not get run even if included in restrict_checks. restrict_checks: A list of check ids that may be run, if appropriate. Returns: A CheckResults object that contains results for all checks that were performed on the host. """ # Get knowledgebase, os_name from hostdata kb = host_data.get("KnowledgeBase") if os_name is None: os_name = kb.os if cpe is None: # TODO(user): Get CPE (requires new artifact/parser) pass if labels is None: # TODO(user): Get labels (see grr/lib/export.py for acquisition # from client) pass return CheckRegistry.Process( host_data, os_name=os_name, cpe=cpe, labels=labels, restrict_checks=restrict_checks, exclude_checks=exclude_checks)
[ "def", "CheckHost", "(", "host_data", ",", "os_name", "=", "None", ",", "cpe", "=", "None", ",", "labels", "=", "None", ",", "exclude_checks", "=", "None", ",", "restrict_checks", "=", "None", ")", ":", "# Get knowledgebase, os_name from hostdata", "kb", "=", ...
33.288462
22.173077
def discard(self, pid=None): """Discard deposit changes. #. The signal :data:`invenio_records.signals.before_record_update` is sent before the edit execution. #. It restores the last published version. #. The following meta information are saved inside the deposit: .. code-block:: python deposit['$schema'] = deposit_schema_from_record_schema #. The signal :data:`invenio_records.signals.after_record_update` is sent after the edit execution. #. The deposit index is updated. Status required: ``'draft'``. :param pid: Force a pid object. (Default: ``None``) :returns: A new Deposit object. """ pid = pid or self.pid with db.session.begin_nested(): before_record_update.send( current_app._get_current_object(), record=self) _, record = self.fetch_published() self.model.json = deepcopy(record.model.json) self.model.json['$schema'] = self.build_deposit_schema(record) flag_modified(self.model, 'json') db.session.merge(self.model) after_record_update.send( current_app._get_current_object(), record=self) return self.__class__(self.model.json, model=self.model)
[ "def", "discard", "(", "self", ",", "pid", "=", "None", ")", ":", "pid", "=", "pid", "or", "self", ".", "pid", "with", "db", ".", "session", ".", "begin_nested", "(", ")", ":", "before_record_update", ".", "send", "(", "current_app", ".", "_get_current...
32.275
21.475
def call(cmd_args, suppress_output=False): """ Call an arbitary command and return the exit value, stdout, and stderr as a tuple Command can be passed in as either a string or iterable >>> result = call('hatchery', suppress_output=True) >>> result.exitval 0 >>> result = call(['hatchery', 'notreal']) >>> result.exitval 1 """ if not funcy.is_list(cmd_args) and not funcy.is_tuple(cmd_args): cmd_args = shlex.split(cmd_args) logger.info('executing `{}`'.format(' '.join(cmd_args))) call_request = CallRequest(cmd_args, suppress_output=suppress_output) call_result = call_request.run() if call_result.exitval: logger.error('`{}` returned error code {}'.format(' '.join(cmd_args), call_result.exitval)) return call_result
[ "def", "call", "(", "cmd_args", ",", "suppress_output", "=", "False", ")", ":", "if", "not", "funcy", ".", "is_list", "(", "cmd_args", ")", "and", "not", "funcy", ".", "is_tuple", "(", "cmd_args", ")", ":", "cmd_args", "=", "shlex", ".", "split", "(", ...
38.85
20.15
def _StructPackDecoder(wire_type, format): """Return a constructor for a decoder for a fixed-width field. Args: wire_type: The field's wire type. format: The format string to pass to struct.unpack(). """ value_size = struct.calcsize(format) local_unpack = struct.unpack # Reusing _SimpleDecoder is slightly slower than copying a bunch of code, but # not enough to make a significant difference. # Note that we expect someone up-stack to catch struct.error and convert # it to _DecodeError -- this way we don't have to set up exception- # handling blocks every time we parse one value. def InnerDecode(buffer, pos): new_pos = pos + value_size result = local_unpack(format, buffer[pos:new_pos])[0] return (result, new_pos) return _SimpleDecoder(wire_type, InnerDecode)
[ "def", "_StructPackDecoder", "(", "wire_type", ",", "format", ")", ":", "value_size", "=", "struct", ".", "calcsize", "(", "format", ")", "local_unpack", "=", "struct", ".", "unpack", "# Reusing _SimpleDecoder is slightly slower than copying a bunch of code, but", "# not ...
34.73913
19.217391
def confirm_project_avatar(self, project, cropping_properties): """Confirm the temporary avatar image previously uploaded with the specified cropping. After a successful registry with :py:meth:`create_temp_project_avatar`, use this method to confirm the avatar for use. The final avatar can be a subarea of the uploaded image, which is customized with the ``cropping_properties``: the return value of :py:meth:`create_temp_project_avatar` should be used for this argument. :param project: ID or key of the project to confirm the avatar in :param cropping_properties: a dict of cropping properties from :py:meth:`create_temp_project_avatar` """ data = cropping_properties url = self._get_url('project/' + project + '/avatar') r = self._session.post( url, data=json.dumps(data)) return json_loads(r)
[ "def", "confirm_project_avatar", "(", "self", ",", "project", ",", "cropping_properties", ")", ":", "data", "=", "cropping_properties", "url", "=", "self", ".", "_get_url", "(", "'project/'", "+", "project", "+", "'/avatar'", ")", "r", "=", "self", ".", "_se...
52.470588
31.117647
def create_git_tag(self, tag, message, object, type, tagger=github.GithubObject.NotSet): """ :calls: `POST /repos/:owner/:repo/git/tags <http://developer.github.com/v3/git/tags>`_ :param tag: string :param message: string :param object: string :param type: string :param tagger: :class:`github.InputGitAuthor.InputGitAuthor` :rtype: :class:`github.GitTag.GitTag` """ assert isinstance(tag, (str, unicode)), tag assert isinstance(message, (str, unicode)), message assert isinstance(object, (str, unicode)), object assert isinstance(type, (str, unicode)), type assert tagger is github.GithubObject.NotSet or isinstance(tagger, github.InputGitAuthor), tagger post_parameters = { "tag": tag, "message": message, "object": object, "type": type, } if tagger is not github.GithubObject.NotSet: post_parameters["tagger"] = tagger._identity headers, data = self._requester.requestJsonAndCheck( "POST", self.url + "/git/tags", input=post_parameters ) return github.GitTag.GitTag(self._requester, headers, data, completed=True)
[ "def", "create_git_tag", "(", "self", ",", "tag", ",", "message", ",", "object", ",", "type", ",", "tagger", "=", "github", ".", "GithubObject", ".", "NotSet", ")", ":", "assert", "isinstance", "(", "tag", ",", "(", "str", ",", "unicode", ")", ")", "...
42.896552
19.241379
def readInputFile(self, card_name, directory, session, spatial=False, spatialReferenceID=None, **kwargs): """ Read specific input file for a GSSHA project to the database. Args: card_name(str): Name of GSSHA project card. directory (str): Directory containing all GSSHA model files. This method assumes that all files are located in the same directory. session (:mod:`sqlalchemy.orm.session.Session`): SQLAlchemy session object bound to PostGIS enabled database spatial (bool, optional): If True, spatially enabled objects will be read in as PostGIS spatial objects. Defaults to False. spatialReferenceID (int, optional): Integer id of spatial reference system for the model. If no id is provided GsshaPy will attempt to automatically lookup the spatial reference ID. If this process fails, default srid will be used (4326 for WGS 84). Returns: file object """ self.project_directory = directory with tmp_chdir(directory): # Read in replace param file replaceParamFile = self._readReplacementFiles(directory, session, spatial, spatialReferenceID) return self._readXputFile(self.INPUT_FILES, card_name, directory, session, spatial, spatialReferenceID, replaceParamFile, **kwargs)
[ "def", "readInputFile", "(", "self", ",", "card_name", ",", "directory", ",", "session", ",", "spatial", "=", "False", ",", "spatialReferenceID", "=", "None", ",", "*", "*", "kwargs", ")", ":", "self", ".", "project_directory", "=", "directory", "with", "t...
56.961538
31.653846
def parse_hpo_genes(hpo_lines): """Parse HPO gene information Args: hpo_lines(iterable(str)) Returns: diseases(dict): A dictionary with hgnc symbols as keys """ LOG.info("Parsing HPO genes ...") genes = {} for index, line in enumerate(hpo_lines): # First line is header if index == 0: continue if len(line) < 5: continue gene_info = parse_hpo_gene(line) hgnc_symbol = gene_info['hgnc_symbol'] description = gene_info['description'] if hgnc_symbol not in genes: genes[hgnc_symbol] = { 'hgnc_symbol': hgnc_symbol } gene = genes[hgnc_symbol] if description == 'Incomplete penetrance': gene['incomplete_penetrance'] = True if description == 'Autosomal dominant inheritance': gene['ad'] = True if description == 'Autosomal recessive inheritance': gene['ar'] = True if description == 'Mithochondrial inheritance': gene['mt'] = True if description == 'X-linked dominant inheritance': gene['xd'] = True if description == 'X-linked recessive inheritance': gene['xr'] = True if description == 'Y-linked inheritance': gene['x'] = True if description == 'X-linked inheritance': gene['y'] = True LOG.info("Parsing done.") return genes
[ "def", "parse_hpo_genes", "(", "hpo_lines", ")", ":", "LOG", ".", "info", "(", "\"Parsing HPO genes ...\"", ")", "genes", "=", "{", "}", "for", "index", ",", "line", "in", "enumerate", "(", "hpo_lines", ")", ":", "# First line is header", "if", "index", "=="...
32.555556
14.155556
def clean_up(self, dry_run=False, verbosity=1, last_n_days=0, cleanup_path=None, storage=None): """ Iterate through sources. Delete database references to sources not existing, including its corresponding thumbnails (files and database references). """ if dry_run: print ("Dry run...") if not storage: storage = get_storage_class(settings.THUMBNAIL_DEFAULT_STORAGE)() sources_to_delete = [] time_start = time.time() query = Source.objects.all() if last_n_days > 0: today = date.today() query = query.filter( modified__range=(today - timedelta(days=last_n_days), today)) if cleanup_path: query = query.filter(name__startswith=cleanup_path) for source in queryset_iterator(query): self.sources += 1 abs_source_path = self._get_absolute_path(source.name) if not self._check_if_exists(storage, abs_source_path): if verbosity > 0: print ("Source not present:", abs_source_path) self.source_refs_deleted += 1 sources_to_delete.append(source.id) for thumb in source.thumbnails.all(): self.thumbnails_deleted += 1 abs_thumbnail_path = self._get_absolute_path(thumb.name) if self._check_if_exists(storage, abs_thumbnail_path): if not dry_run: storage.delete(abs_thumbnail_path) if verbosity > 0: print ("Deleting thumbnail:", abs_thumbnail_path) if len(sources_to_delete) >= 1000 and not dry_run: self._delete_sources_by_id(sources_to_delete) sources_to_delete = [] if not dry_run: self._delete_sources_by_id(sources_to_delete) self.execution_time = round(time.time() - time_start)
[ "def", "clean_up", "(", "self", ",", "dry_run", "=", "False", ",", "verbosity", "=", "1", ",", "last_n_days", "=", "0", ",", "cleanup_path", "=", "None", ",", "storage", "=", "None", ")", ":", "if", "dry_run", ":", "print", "(", "\"Dry run...\"", ")", ...
38.901961
20.470588
def pop(self): """ Removes the top process from the queue, and resumes its execution. For an empty queue, this method is a no-op. This method may be invoked from anywhere (its use is not confined to processes, as method `join()` is). """ if not self.is_empty(): _, process = heappop(self._waiting) if _logger is not None: self._log(INFO, "pop", process=process.local.name) process.resume()
[ "def", "pop", "(", "self", ")", ":", "if", "not", "self", ".", "is_empty", "(", ")", ":", "_", ",", "process", "=", "heappop", "(", "self", ".", "_waiting", ")", "if", "_logger", "is", "not", "None", ":", "self", ".", "_log", "(", "INFO", ",", ...
47.2
23.4
def detect_deprecation_in_expression(self, expression): """ Detects if an expression makes use of any deprecated standards. Returns: list of tuple: (detecting_signature, original_text, recommended_text)""" # Perform analysis on this expression export = ExportValues(expression) export_values = export.result() # Define our results list results = [] # Check if there is usage of any deprecated solidity variables or functions for dep_var in self.DEPRECATED_SOLIDITY_VARIABLE: if SolidityVariableComposed(dep_var[0]) in export_values: results.append(dep_var) for dep_func in self.DEPRECATED_SOLIDITY_FUNCTIONS: if SolidityFunction(dep_func[0]) in export_values: results.append(dep_func) return results
[ "def", "detect_deprecation_in_expression", "(", "self", ",", "expression", ")", ":", "# Perform analysis on this expression", "export", "=", "ExportValues", "(", "expression", ")", "export_values", "=", "export", ".", "result", "(", ")", "# Define our results list", "re...
39.952381
18.190476
def rescale_around1(self, times): """ Suggests a rescaling factor and new physical time unit to balance the given time multiples around 1. Parameters ---------- times : float array array of times in multiple of the present elementary unit """ if self._unit == self._UNIT_STEP: return times, 'step' # nothing to do m = np.mean(times) mult = 1.0 cur_unit = self._unit # numbers are too small. Making them larger and reducing the unit: if (m < 0.001): while mult*m < 0.001 and cur_unit >= 0: mult *= 1000 cur_unit -= 1 return mult*times, self._unit_names[cur_unit] # numbers are too large. Making them smaller and increasing the unit: if (m > 1000): while mult*m > 1000 and cur_unit <= 5: mult /= 1000 cur_unit += 1 return mult*times, self._unit_names[cur_unit] # nothing to do return times, self._unit
[ "def", "rescale_around1", "(", "self", ",", "times", ")", ":", "if", "self", ".", "_unit", "==", "self", ".", "_UNIT_STEP", ":", "return", "times", ",", "'step'", "# nothing to do", "m", "=", "np", ".", "mean", "(", "times", ")", "mult", "=", "1.0", ...
31.424242
20.878788
def fetch(self, recursive=1, fields=None, detail=None, filters=None, parent_uuid=None, back_refs_uuid=None): """ Fetch collection from API server :param recursive: level of recursion :type recursive: int :param fields: fetch only listed fields. contrail 3.0 required :type fields: [str] :param detail: fetch all fields :type detail: bool :param filters: list of filters :type filters: [(name, value), ...] :param parent_uuid: filter by parent_uuid :type parent_uuid: v4UUID str or list of v4UUID str :param back_refs_uuid: filter by back_refs_uuid :type back_refs_uuid: v4UUID str or list of v4UUID str :rtype: Collection """ params = self._format_fetch_params(fields=fields, detail=detail, filters=filters, parent_uuid=parent_uuid, back_refs_uuid=back_refs_uuid) data = self.session.get_json(self.href, **params) if not self.type: self.data = [Collection(col["link"]["name"], fetch=recursive - 1 > 0, recursive=recursive - 1, fields=self._fetch_fields(fields), detail=detail or self.detail, filters=self._fetch_filters(filters), parent_uuid=self._fetch_parent_uuid(parent_uuid), back_refs_uuid=self._fetch_back_refs_uuid(back_refs_uuid)) for col in data['links'] if col["link"]["rel"] == "collection"] else: # when detail=False, res == {resource_attrs} # when detail=True, res == {'type': {resource_attrs}} self.data = [Resource(self.type, fetch=recursive - 1 > 0, recursive=recursive - 1, **res.get(self.type, res)) for res_type, res_list in data.items() for res in res_list] return self
[ "def", "fetch", "(", "self", ",", "recursive", "=", "1", ",", "fields", "=", "None", ",", "detail", "=", "None", ",", "filters", "=", "None", ",", "parent_uuid", "=", "None", ",", "back_refs_uuid", "=", "None", ")", ":", "params", "=", "self", ".", ...
45.770833
20.229167
def _update_fields_with_objects(self): """ Convert dict fields into objects, where appropriate """ # Update the cover with a photo object if isinstance(self.cover, dict): self.cover = Photo(self._client, self.cover) # Update the photo list with photo objects try: for i, photo in enumerate(self.photos): if isinstance(photo, dict): self.photos[i] = Photo(self._client, photo) except (AttributeError, TypeError): pass
[ "def", "_update_fields_with_objects", "(", "self", ")", ":", "# Update the cover with a photo object", "if", "isinstance", "(", "self", ".", "cover", ",", "dict", ")", ":", "self", ".", "cover", "=", "Photo", "(", "self", ".", "_client", ",", "self", ".", "c...
40.384615
12.769231
def security_rule_delete(security_rule, security_group, resource_group, **kwargs): ''' .. versionadded:: 2019.2.0 Delete a security rule within a specified security group. :param name: The name of the security rule to delete. :param security_group: The network security group containing the security rule. :param resource_group: The resource group name assigned to the network security group. CLI Example: .. code-block:: bash salt-call azurearm_network.security_rule_delete testrule1 testnsg testgroup ''' result = False netconn = __utils__['azurearm.get_client']('network', **kwargs) try: secrule = netconn.security_rules.delete( network_security_group_name=security_group, resource_group_name=resource_group, security_rule_name=security_rule ) secrule.wait() result = True except CloudError as exc: __utils__['azurearm.log_cloud_error']('network', str(exc), **kwargs) return result
[ "def", "security_rule_delete", "(", "security_rule", ",", "security_group", ",", "resource_group", ",", "*", "*", "kwargs", ")", ":", "result", "=", "False", "netconn", "=", "__utils__", "[", "'azurearm.get_client'", "]", "(", "'network'", ",", "*", "*", "kwar...
28.861111
25.75
def parse_tables(self, markup): """ Returns a list of tables in the markup. A Wikipedia table looks like: {| border="1" |- |Cell 1 (no modifier - not aligned) |- |align="right" |Cell 2 (right aligned) |- |} """ tables = [] m = re.findall(self.re["table"], markup) for chunk in m: table = WikipediaTable() table.properties = chunk.split("\n")[0].strip("{|").strip() self.connect_table(table, chunk, markup) # Tables start with "{|". # On the same line can be properties, e.g. {| border="1" # The table heading starts with "|+". # A new row in the table starts with "|-". # The end of the table is marked with "|}". row = None for chunk in chunk.split("\n"): chunk = chunk.strip() if chunk.startswith("|+"): title = self.plain(chunk.strip("|+")) table.title = title elif chunk.startswith("|-"): if row: row.properties = chunk.strip("|-").strip() table.append(row) row = None elif chunk.startswith("|}"): pass elif chunk.startswith("|") \ or chunk.startswith("!"): row = self.parse_table_row(chunk, row) # Append the last row. if row: table.append(row) if len(table) > 0: tables.append(table) return tables
[ "def", "parse_tables", "(", "self", ",", "markup", ")", ":", "tables", "=", "[", "]", "m", "=", "re", ".", "findall", "(", "self", ".", "re", "[", "\"table\"", "]", ",", "markup", ")", "for", "chunk", "in", "m", ":", "table", "=", "WikipediaTable",...
32.980392
15.294118
def union_with_variable(self, variable: str, replacement: VariableReplacement) -> 'Substitution': """Try to create a new substitution with the given variable added. See :meth:`try_add_variable` for a version of this method that modifies the substitution in place. Args: variable_name: The name of the variable to add. replacement: The substitution for the variable. Returns: The new substitution with the variable_name added or merged. Raises: ValueError: if the variable cannot be merged because it conflicts with the existing substitution for the variable. """ new_subst = Substitution(self) new_subst.try_add_variable(variable, replacement) return new_subst
[ "def", "union_with_variable", "(", "self", ",", "variable", ":", "str", ",", "replacement", ":", "VariableReplacement", ")", "->", "'Substitution'", ":", "new_subst", "=", "Substitution", "(", "self", ")", "new_subst", ".", "try_add_variable", "(", "variable", "...
36.26087
24.217391