text
stringlengths
89
104k
code_tokens
list
avg_line_len
float64
7.91
980
score
float64
0
630
def format(self, record): """Format the record as tersely as possible but preserve info.""" super(CliFormatter, self).format(record) localized_time = datetime.datetime.fromtimestamp(record.created) terse_time = localized_time.strftime(u'%H:%M:%S') terse_level = record.levelname[0] terse_name = record.name.split('.')[-1] match = RECORD_LOGGER_RE.match(record.name) if match: # Figure out which OpenHTF subsystem the record came from. subsys_match = SUBSYSTEM_LOGGER_RE.match(record.name) if subsys_match: terse_name = '<{subsys}: {id}>'.format( subsys=subsys_match.group('subsys'), id=subsys_match.group('id')) else: # Fall back to using the last five characters of the test UUID. terse_name = '<test %s>' % match.group('test_uid')[-5:] return '{lvl} {time} {logger} - {msg}'.format(lvl=terse_level, time=terse_time, logger=terse_name, msg=record.message)
[ "def", "format", "(", "self", ",", "record", ")", ":", "super", "(", "CliFormatter", ",", "self", ")", ".", "format", "(", "record", ")", "localized_time", "=", "datetime", ".", "datetime", ".", "fromtimestamp", "(", "record", ".", "created", ")", "terse...
49.681818
16.727273
def spell_correct(string): """ Uses aspell to spell correct an input string. Requires aspell to be installed and added to the path. Returns the spell corrected string if aspell is found, original string if not. string - string """ # Create a temp file so that aspell could be used # By default, tempfile will delete this file when the file handle is closed. f = tempfile.NamedTemporaryFile(mode='w') f.write(string) f.flush() f_path = os.path.abspath(f.name) try: p = os.popen(aspell_path + " -a < " + f_path + " --sug-mode=ultra") # Aspell returns a list of incorrect words with the above flags incorrect = p.readlines() p.close() except Exception: log.exception("aspell process failed; could not spell check") # Return original string if aspell fails return string,0, string finally: f.close() incorrect_words = list() correct_spelling = list() for i in range(1, len(incorrect)): if(len(incorrect[i]) > 10): #Reformat aspell output to make sense match = re.search(":", incorrect[i]) if hasattr(match, "start"): begstring = incorrect[i][2:match.start()] begmatch = re.search(" ", begstring) begword = begstring[0:begmatch.start()] sugstring = incorrect[i][match.start() + 2:] sugmatch = re.search(",", sugstring) if hasattr(sugmatch, "start"): sug = sugstring[0:sugmatch.start()] incorrect_words.append(begword) correct_spelling.append(sug) #Create markup based on spelling errors newstring = string markup_string = string already_subbed=[] for i in range(0, len(incorrect_words)): sub_pat = r"\b" + incorrect_words[i] + r"\b" sub_comp = re.compile(sub_pat) newstring = re.sub(sub_comp, correct_spelling[i], newstring) if incorrect_words[i] not in already_subbed: markup_string=re.sub(sub_comp,'<bs>' + incorrect_words[i] + "</bs>", markup_string) already_subbed.append(incorrect_words[i]) return newstring,len(incorrect_words),markup_string
[ "def", "spell_correct", "(", "string", ")", ":", "# Create a temp file so that aspell could be used", "# By default, tempfile will delete this file when the file handle is closed.", "f", "=", "tempfile", ".", "NamedTemporaryFile", "(", "mode", "=", "'w'", ")", "f", ".", "writ...
36.180328
19.295082
def send_item(self, destination, item_id, force_send): """ Run send method for item_id at destination. :param destination: str which type of operation are we doing (SHARE_DESTINATION or DELIVER_DESTINATION) :param item_id: str D4S2 service id representing the item we want to send :param force_send: bool it's ok to email the item again :return: requests.Response containing the successful result """ data = json.dumps({ 'force': force_send, }) url_suffix = "{}/send/".format(item_id) resp = requests.post(self.make_url(destination, url_suffix), headers=self.json_headers, data=data) self.check_response(resp) return resp
[ "def", "send_item", "(", "self", ",", "destination", ",", "item_id", ",", "force_send", ")", ":", "data", "=", "json", ".", "dumps", "(", "{", "'force'", ":", "force_send", ",", "}", ")", "url_suffix", "=", "\"{}/send/\"", ".", "format", "(", "item_id", ...
48.2
22.6
def getname(obj): """ Return the most qualified name of an object :param obj: object to fetch name :return: name of ``obj`` """ for name_attribute in ('__qualname__', '__name__'): try: # an object always has a class, as per Python data model return getattr(obj, name_attribute, getattr(obj.__class__, name_attribute)) except AttributeError: pass raise TypeError('object of type %r does not define a canonical name' % type(obj))
[ "def", "getname", "(", "obj", ")", ":", "for", "name_attribute", "in", "(", "'__qualname__'", ",", "'__name__'", ")", ":", "try", ":", "# an object always has a class, as per Python data model", "return", "getattr", "(", "obj", ",", "name_attribute", ",", "getattr",...
35.357143
20.214286
def _mass(self,R,z=0.,t=0.): """ NAME: _mass PURPOSE: calculate the mass out to a given radius INPUT: R - radius at which to return the enclosed mass z - (don't specify this) vertical height OUTPUT: mass in natural units HISTORY: 2014-01-29 - Written - Bovy (IAS) """ if z is None: r= R else: r= numpy.sqrt(R**2.+z**2.) return numpy.log(1+r/self.a)-r/self.a/(1.+r/self.a)
[ "def", "_mass", "(", "self", ",", "R", ",", "z", "=", "0.", ",", "t", "=", "0.", ")", ":", "if", "z", "is", "None", ":", "r", "=", "R", "else", ":", "r", "=", "numpy", ".", "sqrt", "(", "R", "**", "2.", "+", "z", "**", "2.", ")", "retur...
29.470588
14.529412
def _read_http_goaway(self, size, kind, flag): """Read HTTP/2 GOAWAY frames. Structure of HTTP/2 GOAWAY frame [RFC 7540]: +-----------------------------------------------+ | Length (24) | +---------------+---------------+---------------+ | Type (8) | Flags (8) | +-+-------------+---------------+-------------------------------+ |R| Stream Identifier (31) | +-+-------------+---------------+-------------------------------+ |R| Last-Stream-ID (31) | +-+-------------------------------------------------------------+ | Error Code (32) | +---------------------------------------------------------------+ | Additional Debug Data (*) | +---------------------------------------------------------------+ Octets Bits Name Description 0 0 http.length Length 3 24 http.type Type (2) 4 32 http.flags Flags 5 40 - Reserved 5 41 http.sid Stream Identifier 9 72 - Reserved 9 73 http.last_sid Last Stream ID 13 104 http.error Error Code 17 136 http.data Additional Debug Data (Optional) """ _dlen = size - 8 if _dlen < 0: raise ProtocolError(f'HTTP/2: [Type {kind}] invalid format', quiet=True) if any((int(bit, base=2) for bit in flag)): raise ProtocolError(f'HTTP/2: [Type {kind}] invalid format', quiet=True) _rsid = self._read_binary(4) _code = self._read_unpack(4) _data = self._read_fileng(_dlen) or None if int(_rsid[0], base=2): raise ProtocolError(f'HTTP/2: [Type {kind}] invalid format', quiet=True) data = dict( flags=None, last_sid=int(_rsid[1:], base=2), error=_ERROR_CODE.get(_code, _code), data=_data, ) return data
[ "def", "_read_http_goaway", "(", "self", ",", "size", ",", "kind", ",", "flag", ")", ":", "_dlen", "=", "size", "-", "8", "if", "_dlen", "<", "0", ":", "raise", "ProtocolError", "(", "f'HTTP/2: [Type {kind}] invalid format'", ",", "quiet", "=", "True", ")"...
47.784314
25.509804
def paginate_activity(visible_date=None): """ Creates "get previous day" / "get next day" pagination for activities. Visible date is the date of the activities currently being shown, represented by a date object. If not provided, it will default to today. #Expects date as default "Aug. 25, 2014" format. """ #if visible_date: # visible_date = datetime.datetime.strptime(visible_date, "%b %d ") if not visible_date: visible_date = datetime.date.today() previous_day = visible_date - datetime.timedelta(days=1) if visible_date == datetime.date.today(): next_day = None else: next_day = visible_date + datetime.timedelta(days=1) return {'previous_day': previous_day, 'next_day': next_day}
[ "def", "paginate_activity", "(", "visible_date", "=", "None", ")", ":", "#if visible_date:", "# visible_date = datetime.datetime.strptime(visible_date, \"%b %d \")", "if", "not", "visible_date", ":", "visible_date", "=", "datetime", ".", "date", ".", "today", "(", ")",...
34.181818
19.909091
def within_depth_range(self, lower_depth=None, upper_depth=None): ''' Selects events within a specified depth range :param float lower_depth: Lower depth for consideration :param float upper_depth: Upper depth for consideration :returns: Instance of :class:`openquake.hmtk.seismicity.catalogue.Catalogue` containing only selected events ''' if not lower_depth: if not upper_depth: # No limiting depths defined - so return entire catalogue! return self.catalogue else: lower_depth = np.inf if not upper_depth: upper_depth = 0.0 is_valid = np.logical_and(self.catalogue.data['depth'] >= upper_depth, self.catalogue.data['depth'] < lower_depth) return self.select_catalogue(is_valid)
[ "def", "within_depth_range", "(", "self", ",", "lower_depth", "=", "None", ",", "upper_depth", "=", "None", ")", ":", "if", "not", "lower_depth", ":", "if", "not", "upper_depth", ":", "# No limiting depths defined - so return entire catalogue!", "return", "self", "....
33.518519
21
def ps(self): """ Get the process information from the system PS command. """ # Get the process ID pid = self.get() # Parent / child processes parent = None children = [] # If the process is running if pid: proc = Popen(['ps', '-ef'], stdout=PIPE) for _line in proc.stdout.readlines(): line = self.unicode(_line.rstrip()) # Get the current PID / parent PID this_pid, this_parent = self._ps_extract_pid(line) try: # If scanning a child process if int(pid) == int(this_parent): children.append('{}; [{}]'.format(this_pid.rstrip(), re.sub(' +', ' ', line))) # If scanning the parent process if int(pid) == int(this_pid): parent = re.sub(' +', ' ', line) # Ignore value errors except ValueError: continue # Return the parent PID and any children processes return (parent, children)
[ "def", "ps", "(", "self", ")", ":", "# Get the process ID", "pid", "=", "self", ".", "get", "(", ")", "# Parent / child processes", "parent", "=", "None", "children", "=", "[", "]", "# If the process is running", "if", "pid", ":", "proc", "=", "Popen", "(", ...
34.388889
16.833333
def verify_valid_dependencies(self): """ Checks if the assigned dependencies are valid valid dependency graphs are: - noncyclic (i.e. no `A -> B -> ... -> A`) - Contain no undefined dependencies (dependencies referencing undefined tasks) """ unobserved_dependencies = set(self.tasks.keys()) target_queue = [] while len(unobserved_dependencies) > 0: target_queue = [unobserved_dependencies.pop()] while target_queue is not []: target_queue += unobserved_dependencies
[ "def", "verify_valid_dependencies", "(", "self", ")", ":", "unobserved_dependencies", "=", "set", "(", "self", ".", "tasks", ".", "keys", "(", ")", ")", "target_queue", "=", "[", "]", "while", "len", "(", "unobserved_dependencies", ")", ">", "0", ":", "tar...
34.294118
16.058824
def calculate(self, T, P, zs, ws, method): r'''Method to calculate surface tension of a liquid mixture at temperature `T`, pressure `P`, mole fractions `zs` and weight fractions `ws` with a given method. This method has no exception handling; see `mixture_property` for that. Parameters ---------- T : float Temperature at which to calculate the property, [K] P : float Pressure at which to calculate the property, [Pa] zs : list[float] Mole fractions of all species in the mixture, [-] ws : list[float] Weight fractions of all species in the mixture, [-] method : str Name of the method to use Returns ------- sigma : float Surface tension of the liquid at given conditions, [N/m] ''' if method == SIMPLE: sigmas = [i(T) for i in self.SurfaceTensions] return mixing_simple(zs, sigmas) elif method == DIGUILIOTEJA: return Diguilio_Teja(T=T, xs=zs, sigmas_Tb=self.sigmas_Tb, Tbs=self.Tbs, Tcs=self.Tcs) elif method == WINTERFELDSCRIVENDAVIS: sigmas = [i(T) for i in self.SurfaceTensions] rhoms = [1./i(T, P) for i in self.VolumeLiquids] return Winterfeld_Scriven_Davis(zs, sigmas, rhoms) else: raise Exception('Method not valid')
[ "def", "calculate", "(", "self", ",", "T", ",", "P", ",", "zs", ",", "ws", ",", "method", ")", ":", "if", "method", "==", "SIMPLE", ":", "sigmas", "=", "[", "i", "(", "T", ")", "for", "i", "in", "self", ".", "SurfaceTensions", "]", "return", "m...
38.026316
20.973684
def set_file(name, source, template=None, context=None, defaults=None, **kwargs): ''' Set debconf selections from a file or a template .. code-block:: yaml <state_id>: debconf.set_file: - source: salt://pathto/pkg.selections <state_id>: debconf.set_file: - source: salt://pathto/pkg.selections?saltenv=myenvironment <state_id>: debconf.set_file: - source: salt://pathto/pkg.selections.jinja2 - template: jinja - context: some_value: "false" source: The location of the file containing the package selections template If this setting is applied then the named templating engine will be used to render the package selections file, currently jinja, mako, and wempy are supported context Overrides default context variables passed to the template. defaults Default context passed to the template. ''' ret = {'name': name, 'changes': {}, 'result': True, 'comment': ''} if context is None: context = {} elif not isinstance(context, dict): ret['result'] = False ret['comment'] = 'Context must be formed as a dict' return ret if defaults is None: defaults = {} elif not isinstance(defaults, dict): ret['result'] = False ret['comment'] = 'Defaults must be formed as a dict' return ret if __opts__['test']: ret['result'] = None ret['comment'] = 'Debconf selections would have been set.' return ret if template: result = __salt__['debconf.set_template'](source, template, context, defaults, **kwargs) else: result = __salt__['debconf.set_file'](source, **kwargs) if result: ret['comment'] = 'Debconf selections were set.' else: ret['result'] = False ret['comment'] = 'Unable to set debconf selections from file.' return ret
[ "def", "set_file", "(", "name", ",", "source", ",", "template", "=", "None", ",", "context", "=", "None", ",", "defaults", "=", "None", ",", "*", "*", "kwargs", ")", ":", "ret", "=", "{", "'name'", ":", "name", ",", "'changes'", ":", "{", "}", ",...
27.760563
24.464789
def remove_layer(svg_source, layer_name): ''' Remove layer(s) from SVG document. Arguments --------- svg_source : str or file-like A file path, URI, or file-like object. layer_name : str or list Layer name or list of layer names to remove from SVG document. Returns ------- StringIO.StringIO File-like object containing XML document with layer(s) removed. ''' # Parse input file. xml_root = lxml.etree.parse(svg_source) svg_root = xml_root.xpath('/svg:svg', namespaces=INKSCAPE_NSMAP)[0] if isinstance(layer_name, str): layer_name = [layer_name] for layer_name_i in layer_name: # Remove existing layer from source, in-memory XML (source file remains # unmodified). layer_xpath = '//svg:g[@inkscape:label="%s"]' % layer_name_i layer_groups = svg_root.xpath(layer_xpath, namespaces=INKSCAPE_NSMAP) if layer_groups: for g in layer_groups: g.getparent().remove(g) # Write result to `StringIO`. output = StringIO.StringIO() xml_root.write(output) output.seek(0) return output
[ "def", "remove_layer", "(", "svg_source", ",", "layer_name", ")", ":", "# Parse input file.", "xml_root", "=", "lxml", ".", "etree", ".", "parse", "(", "svg_source", ")", "svg_root", "=", "xml_root", ".", "xpath", "(", "'/svg:svg'", ",", "namespaces", "=", "...
29.447368
21.394737
def expect_re(self, regexp): """Test against the given regular expression and returns the match object. :param regexp: the expression to be tested. :returns: the match object. """ prefix_len = self.match_prefix( self.prefix, self.next_line(require_prefix=False) ) if prefix_len >= 0: match = self._expect_re(regexp, self.pos + prefix_len) self.match = match return match else: return None
[ "def", "expect_re", "(", "self", ",", "regexp", ")", ":", "prefix_len", "=", "self", ".", "match_prefix", "(", "self", ".", "prefix", ",", "self", ".", "next_line", "(", "require_prefix", "=", "False", ")", ")", "if", "prefix_len", ">=", "0", ":", "mat...
33.266667
15.333333
def backup(self): """ Backup the main dataframe """ try: self.backup_df = self.df.copy() except Exception as e: self.err(e, "Can not backup data") return self.ok("Dataframe backed up")
[ "def", "backup", "(", "self", ")", ":", "try", ":", "self", ".", "backup_df", "=", "self", ".", "df", ".", "copy", "(", ")", "except", "Exception", "as", "e", ":", "self", ".", "err", "(", "e", ",", "\"Can not backup data\"", ")", "return", "self", ...
25.9
10.1
def first_available(self, *quantities): """ Return the first available quantity in the input arguments. Return `None` if none of them is available. """ for i, q in enumerate(quantities): if self.has_quantity(q): if i: warnings.warn('{} not available; using {} instead'.format(quantities[0], q)) return q
[ "def", "first_available", "(", "self", ",", "*", "quantities", ")", ":", "for", "i", ",", "q", "in", "enumerate", "(", "quantities", ")", ":", "if", "self", ".", "has_quantity", "(", "q", ")", ":", "if", "i", ":", "warnings", ".", "warn", "(", "'{}...
39.8
13.6
def filtered_list(cls, name=None, obj=None): """List datacenters matching name and compatible with obj""" options = {} if name: options['id'] = cls.usable_id(name) def obj_ok(dc, obj): if not obj or obj['datacenter_id'] == dc['id']: return True return False return [x for x in cls.list(options) if obj_ok(x, obj)]
[ "def", "filtered_list", "(", "cls", ",", "name", "=", "None", ",", "obj", "=", "None", ")", ":", "options", "=", "{", "}", "if", "name", ":", "options", "[", "'id'", "]", "=", "cls", ".", "usable_id", "(", "name", ")", "def", "obj_ok", "(", "dc",...
31
16.769231
def pack_value(self, val): """Convert 8-byte string into 16-byte list""" if isinstance(val, bytes): val = list(iterbytes(val)) slen = len(val) if self.pad: pad = b'\0\0' * (slen % 2) else: pad = b'' return struct.pack('>' + 'H' * slen, *val) + pad, slen, None
[ "def", "pack_value", "(", "self", ",", "val", ")", ":", "if", "isinstance", "(", "val", ",", "bytes", ")", ":", "val", "=", "list", "(", "iterbytes", "(", "val", ")", ")", "slen", "=", "len", "(", "val", ")", "if", "self", ".", "pad", ":", "pad...
25.692308
19.615385
def ordinary_diffusion(target, pore_area='pore.area', throat_area='throat.area', pore_diffusivity='pore.diffusivity', throat_diffusivity='throat.diffusivity', conduit_lengths='throat.conduit_lengths', conduit_shape_factors='throat.poisson_shape_factors'): r""" Calculate the diffusive conductance of conduits in network, where a conduit is ( 1/2 pore - full throat - 1/2 pore ). See the notes section. Parameters ---------- target : OpenPNM Object The object which this model is associated with. This controls the length of the calculated array, and also provides access to other necessary properties. pore_diffusivity : string Dictionary key of the pore diffusivity values throat_diffusivity : string Dictionary key of the throat diffusivity values pore_area : string Dictionary key of the pore area values throat_area : string Dictionary key of the throat area values conduit_shape_factors : string Dictionary key of the conduit DIFFUSION shape factor values Returns ------- g : ndarray Array containing diffusive conductance values for conduits in the geometry attached to the given physics object. Notes ----- (1) This function requires that all the necessary phase properties already be calculated. (2) This function calculates the specified property for the *entire* network then extracts the values for the appropriate throats at the end. (3) This function assumes cylindrical throats with constant cross-section area. Corrections for different shapes and variable cross-section area can be imposed by passing the proper flow_shape_factor argument. """ return generic_conductance(target=target, transport_type='diffusion', pore_area=pore_area, throat_area=throat_area, pore_diffusivity=pore_diffusivity, throat_diffusivity=throat_diffusivity, conduit_lengths=conduit_lengths, conduit_shape_factors=conduit_shape_factors)
[ "def", "ordinary_diffusion", "(", "target", ",", "pore_area", "=", "'pore.area'", ",", "throat_area", "=", "'throat.area'", ",", "pore_diffusivity", "=", "'pore.diffusivity'", ",", "throat_diffusivity", "=", "'throat.diffusivity'", ",", "conduit_lengths", "=", "'throat....
38.711864
25.338983
def unparse_qsl(qsl, sort=False, reverse=False): """Reverse conversion for parse_qsl""" result = [] items = qsl if sort: items = sorted(items, key=lambda x: x[0], reverse=reverse) for keys, values in items: query_name = quote(keys) result.append(query_name + "=" + quote(values)) return "&".join(result)
[ "def", "unparse_qsl", "(", "qsl", ",", "sort", "=", "False", ",", "reverse", "=", "False", ")", ":", "result", "=", "[", "]", "items", "=", "qsl", "if", "sort", ":", "items", "=", "sorted", "(", "items", ",", "key", "=", "lambda", "x", ":", "x", ...
34.2
15.8
def get_queryset(self): """ Returns all the approved topics or posts. """ qs = super().get_queryset() qs = qs.filter(approved=True) return qs
[ "def", "get_queryset", "(", "self", ")", ":", "qs", "=", "super", "(", ")", ".", "get_queryset", "(", ")", "qs", "=", "qs", ".", "filter", "(", "approved", "=", "True", ")", "return", "qs" ]
33.8
9.6
def _active_mounts_freebsd(ret): ''' List active mounts on FreeBSD systems ''' for line in __salt__['cmd.run_stdout']('mount -p').split('\n'): comps = re.sub(r"\s+", " ", line).split() ret[comps[1]] = {'device': comps[0], 'fstype': comps[2], 'opts': _resolve_user_group_names(comps[3].split(','))} return ret
[ "def", "_active_mounts_freebsd", "(", "ret", ")", ":", "for", "line", "in", "__salt__", "[", "'cmd.run_stdout'", "]", "(", "'mount -p'", ")", ".", "split", "(", "'\\n'", ")", ":", "comps", "=", "re", ".", "sub", "(", "r\"\\s+\"", ",", "\" \"", ",", "li...
38.5
18.5
def set_privatekey(self, pkey): """ Set the certificate portion of the PKCS #12 structure. :param pkey: The new private key, or :py:const:`None` to unset it. :type pkey: :py:class:`PKey` or :py:const:`None` :return: ``None`` """ if not isinstance(pkey, PKey): raise TypeError("pkey must be a PKey instance") self._pkey = pkey
[ "def", "set_privatekey", "(", "self", ",", "pkey", ")", ":", "if", "not", "isinstance", "(", "pkey", ",", "PKey", ")", ":", "raise", "TypeError", "(", "\"pkey must be a PKey instance\"", ")", "self", ".", "_pkey", "=", "pkey" ]
32.666667
17.666667
def __validInputs(self): """Validates the inputs of the constructor.""" #if not isinstance(self.__column, Column): # raise Sitools2Exception("column must be an instance of Column") try: float(self.__minVal) except ValueError as ex: raise Sitools2Exception(ex) try: float(self.__maxVal) except ValueError as ex: raise Sitools2Exception(ex) if float(self.__minVal) >= float(self.__maxVal): raise Sitools2Exception("maxVal must be superior to minVal")
[ "def", "__validInputs", "(", "self", ")", ":", "#if not isinstance(self.__column, Column):", "# raise Sitools2Exception(\"column must be an instance of Column\")", "try", ":", "float", "(", "self", ".", "__minVal", ")", "except", "ValueError", "as", "ex", ":", "raise", ...
38
15.466667
def addcert(self, cert): """ Adds a certificate (probably intermediate CA) to the SignedData structure """ if libcrypto.CMS_add1_cert(self.ptr, cert.cert) <= 0: raise CMSError("Cannot add cert")
[ "def", "addcert", "(", "self", ",", "cert", ")", ":", "if", "libcrypto", ".", "CMS_add1_cert", "(", "self", ".", "ptr", ",", "cert", ".", "cert", ")", "<=", "0", ":", "raise", "CMSError", "(", "\"Cannot add cert\"", ")" ]
34.285714
13.714286
def get_bbox_list(self, crs=None, buffer=None, reduce_bbox_sizes=None): """Returns a list of bounding boxes that are the result of the split :param crs: Coordinate reference system in which the bounding boxes should be returned. If None the CRS will be the default CRS of the splitter. :type crs: CRS or None :param buffer: A percentage of each BBox size increase. This will cause neighbouring bounding boxes to overlap. :type buffer: float or None :param reduce_bbox_sizes: If `True` it will reduce the sizes of bounding boxes so that they will tightly fit the given geometry in `shape_list`. This overrides the same parameter from constructor :type reduce_bbox_sizes: bool :return: List of bounding boxes :rtype: list(BBox) """ bbox_list = self.bbox_list if buffer: bbox_list = [bbox.buffer(buffer) for bbox in bbox_list] if reduce_bbox_sizes is None: reduce_bbox_sizes = self.reduce_bbox_sizes if reduce_bbox_sizes: bbox_list = self._reduce_sizes(bbox_list) if crs: return [bbox.transform(crs) for bbox in bbox_list] return bbox_list
[ "def", "get_bbox_list", "(", "self", ",", "crs", "=", "None", ",", "buffer", "=", "None", ",", "reduce_bbox_sizes", "=", "None", ")", ":", "bbox_list", "=", "self", ".", "bbox_list", "if", "buffer", ":", "bbox_list", "=", "[", "bbox", ".", "buffer", "(...
47
24.884615
def exit_with_error(message): """ Display formatted error message and exit call """ click.secho(message, err=True, bg='red', fg='white') sys.exit(0)
[ "def", "exit_with_error", "(", "message", ")", ":", "click", ".", "secho", "(", "message", ",", "err", "=", "True", ",", "bg", "=", "'red'", ",", "fg", "=", "'white'", ")", "sys", ".", "exit", "(", "0", ")" ]
39.25
13
def to_iso639_1(key): """Find ISO 639-1 code for language specified by key. >>> to_iso639_1("swe") u'sv' >>> to_iso639_1("English") u'en' """ item = find(whatever=key) if not item: raise NonExistentLanguageError('Language does not exist.') return item[u'iso639_1']
[ "def", "to_iso639_1", "(", "key", ")", ":", "item", "=", "find", "(", "whatever", "=", "key", ")", "if", "not", "item", ":", "raise", "NonExistentLanguageError", "(", "'Language does not exist.'", ")", "return", "item", "[", "u'iso639_1'", "]" ]
24.833333
18.166667
def write_back(self, documents, doc_ids=None, eids=None): """ Write back documents by doc_id :param documents: a list of document to write back :param doc_ids: a list of document IDs which need to be written back :returns: a list of document IDs that have been written """ doc_ids = _get_doc_ids(doc_ids, eids) if doc_ids is not None and not len(documents) == len(doc_ids): raise ValueError( 'The length of documents and doc_ids is not match.') if doc_ids is None: doc_ids = [doc.doc_id for doc in documents] # Since this function will write docs back like inserting, to ensure # here only process existing or removed instead of inserting new, # raise error if doc_id exceeded the last. if len(doc_ids) > 0 and max(doc_ids) > self._last_id: raise IndexError( 'ID exceeds table length, use existing or removed doc_id.') data = self._read() # Document specified by ID documents.reverse() for doc_id in doc_ids: data[doc_id] = dict(documents.pop()) self._write(data) return doc_ids
[ "def", "write_back", "(", "self", ",", "documents", ",", "doc_ids", "=", "None", ",", "eids", "=", "None", ")", ":", "doc_ids", "=", "_get_doc_ids", "(", "doc_ids", ",", "eids", ")", "if", "doc_ids", "is", "not", "None", "and", "not", "len", "(", "do...
34.852941
21.970588
def _check_message_valid(msg): """Check packet length valid and that checksum is good.""" try: if int(msg[:2], 16) != (len(msg) - 2): raise ValueError("Elk message length incorrect") _check_checksum(msg) except IndexError: raise ValueError("Elk message length incorrect")
[ "def", "_check_message_valid", "(", "msg", ")", ":", "try", ":", "if", "int", "(", "msg", "[", ":", "2", "]", ",", "16", ")", "!=", "(", "len", "(", "msg", ")", "-", "2", ")", ":", "raise", "ValueError", "(", "\"Elk message length incorrect\"", ")", ...
39
14.25
def build_network(self, network=None, *args, **kwargs): """ Core method to construct PyPSA Network object. """ # TODO: build_network takes care of divergences in database design and # future PyPSA changes from PyPSA's v0.6 on. This concept should be # replaced, when the oedb has a revision system in place, because # sometime this will break!!! if network != None: network = network else: network = pypsa.Network() network.set_snapshots(self.timeindex) timevarying_override = False if pypsa.__version__ == '0.11.0': old_to_new_name = {'Generator': {'p_min_pu_fixed': 'p_min_pu', 'p_max_pu_fixed': 'p_max_pu', 'source': 'carrier', 'dispatch': 'former_dispatch'}, 'Bus': {'current_type': 'carrier'}, 'Transformer': {'trafo_id': 'transformer_id'}, 'Storage': {'p_min_pu_fixed': 'p_min_pu', 'p_max_pu_fixed': 'p_max_pu', 'soc_cyclic': 'cyclic_state_of_charge', 'soc_initial': 'state_of_charge_initial', 'source': 'carrier'}} timevarying_override = True else: old_to_new_name = {'Storage': {'soc_cyclic': 'cyclic_state_of_charge', 'soc_initial': 'state_of_charge_initial'}} for comp, comp_t_dict in self.config.items(): # TODO: This is confusing, should be fixed in db pypsa_comp_name = 'StorageUnit' if comp == 'Storage' else comp df = self.fetch_by_relname(comp) if comp in old_to_new_name: tmp = old_to_new_name[comp] df.rename(columns=tmp, inplace=True) network.import_components_from_dataframe(df, pypsa_comp_name) if comp_t_dict: for comp_t, columns in comp_t_dict.items(): for col in columns: df_series = self.series_fetch_by_relname(comp_t, col) # TODO: VMagPuSet is not implemented. if timevarying_override and comp == 'Generator' \ and not df_series.empty: idx = df[df.former_dispatch == 'flexible'].index idx = [i for i in idx if i in df_series.columns] df_series.drop(idx, axis=1, inplace=True) try: pypsa.io.import_series_from_dataframe( network, df_series, pypsa_comp_name, col) except (ValueError, AttributeError): print("Series %s of component %s could not be " "imported" % (col, pypsa_comp_name)) # populate carrier attribute in PyPSA network network.import_components_from_dataframe( self.fetch_by_relname(carr_ormclass), 'Carrier') self.network = network return network
[ "def", "build_network", "(", "self", ",", "network", "=", "None", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "# TODO: build_network takes care of divergences in database design and", "# future PyPSA changes from PyPSA's v0.6 on. This concept should be", "# replaced, w...
38.033333
22.833333
def newSession(): """ Returns a new Requests session with pre-loaded default HTTP Headers Generates a new Requests session and consults with the Configuration class to determine if a Configuration exists and attempts to use the configured HTTP Request headers first. If this fails, it attempts to create a new default configuration and use those values. Finally, if a configuration cannot be initiaized it uses the hard-coded Mozilla headers. Returns request-client - The configured Requests session Raises HTTPException """ from neolib.config.Configuration import Configuration s = requests.session() if not Configuration.loaded(): if not Configuration.initialize(): s.headers.update(Page._defaultVars) else: s.headers.update(Configuration.getConfig().core.HTTPHeaders.toDict()) else: s.headers.update(Configuration.getConfig().core.HTTPHeaders.toDict()) return requests.session()
[ "def", "newSession", "(", ")", ":", "from", "neolib", ".", "config", ".", "Configuration", "import", "Configuration", "s", "=", "requests", ".", "session", "(", ")", "if", "not", "Configuration", ".", "loaded", "(", ")", ":", "if", "not", "Configuration", ...
41.037037
23.851852
def OnPasteFormat(self, event): """Paste format event handler""" with undo.group(_("Paste format")): self.grid.actions.paste_format() self.grid.ForceRefresh() self.grid.update_attribute_toolbar() self.grid.actions.zoom()
[ "def", "OnPasteFormat", "(", "self", ",", "event", ")", ":", "with", "undo", ".", "group", "(", "_", "(", "\"Paste format\"", ")", ")", ":", "self", ".", "grid", ".", "actions", ".", "paste_format", "(", ")", "self", ".", "grid", ".", "ForceRefresh", ...
29.555556
12.888889
def marketYesterdayDF(token='', version=''): '''This returns previous day adjusted price data for whole market https://iexcloud.io/docs/api/#previous-day-prices Available after 4am ET Tue-Sat Args: symbol (string); Ticker to request token (string); Access token version (string); API version Returns: DataFrame: result ''' x = marketYesterday(token, version) data = [] for key in x: data.append(x[key]) data[-1]['symbol'] = key df = pd.DataFrame(data) _toDatetime(df) _reindex(df, 'symbol') return df
[ "def", "marketYesterdayDF", "(", "token", "=", "''", ",", "version", "=", "''", ")", ":", "x", "=", "marketYesterday", "(", "token", ",", "version", ")", "data", "=", "[", "]", "for", "key", "in", "x", ":", "data", ".", "append", "(", "x", "[", "...
25.26087
18.913043
def followee_num(self): """获取关注了多少人. :return: 关注的人数 :rtype: int """ if self.url is None: return 0 else: number = int(self.soup.find( 'div', class_='zm-profile-side-following').a.strong.text) return number
[ "def", "followee_num", "(", "self", ")", ":", "if", "self", ".", "url", "is", "None", ":", "return", "0", "else", ":", "number", "=", "int", "(", "self", ".", "soup", ".", "find", "(", "'div'", ",", "class_", "=", "'zm-profile-side-following'", ")", ...
24.5
16.916667
def _getParLabelAndUnit(self, param): """ checks param to see if it contains a parent link (ie star.) then returns the correct unit and label for the job from the parDicts :return: """ firstObject = self.objectList[0] if isinstance(firstObject, ac.Planet): if 'star.' in param: return _starPars[param[5:]] # cut off star. part else: return _planetPars[param] elif isinstance(firstObject, ac.Star): return _starPars[param] else: raise TypeError('Only Planets and Star object are currently supported, you gave {0}'.format(type(firstObject)))
[ "def", "_getParLabelAndUnit", "(", "self", ",", "param", ")", ":", "firstObject", "=", "self", ".", "objectList", "[", "0", "]", "if", "isinstance", "(", "firstObject", ",", "ac", ".", "Planet", ")", ":", "if", "'star.'", "in", "param", ":", "return", ...
39.470588
17.705882
def set_source_filter(self, source): """ Only search for tweets entered via given source :param source: String. Name of the source to search for. An example \ would be ``source=twitterfeed`` for tweets submitted via TwitterFeed :raises: TwitterSearchException """ if isinstance(source, str if py3k else basestring) and len(source) >= 2: self.source_filter = source else: raise TwitterSearchException(1009)
[ "def", "set_source_filter", "(", "self", ",", "source", ")", ":", "if", "isinstance", "(", "source", ",", "str", "if", "py3k", "else", "basestring", ")", "and", "len", "(", "source", ")", ">=", "2", ":", "self", ".", "source_filter", "=", "source", "el...
39.666667
19.333333
def create_mackup_home(self): """If the Mackup home folder does not exist, create it.""" if not os.path.isdir(self.mackup_folder): if utils.confirm("Mackup needs a directory to store your" " configuration files\n" "Do you want to create it now? <{}>" .format(self.mackup_folder)): os.makedirs(self.mackup_folder) else: utils.error("Mackup can't do anything without a home =(")
[ "def", "create_mackup_home", "(", "self", ")", ":", "if", "not", "os", ".", "path", ".", "isdir", "(", "self", ".", "mackup_folder", ")", ":", "if", "utils", ".", "confirm", "(", "\"Mackup needs a directory to store your\"", "\" configuration files\\n\"", "\"Do yo...
52.6
16.8
def abort(self): """ ensure the master exit from Barrier """ self.mutex.release() self.turnstile.release() self.mutex.release() self.turnstile2.release()
[ "def", "abort", "(", "self", ")", ":", "self", ".", "mutex", ".", "release", "(", ")", "self", ".", "turnstile", ".", "release", "(", ")", "self", ".", "mutex", ".", "release", "(", ")", "self", ".", "turnstile2", ".", "release", "(", ")" ]
31.333333
10.5
def copy_tree_root(src_dir: str, dest_parent: str) -> None: """ Copies a directory ``src_dir`` into the directory ``dest_parent``. That is, with a file structure like: .. code-block:: none /source/thing/a.txt /source/thing/b.txt /source/thing/somedir/c.txt the command .. code-block:: python copy_tree_root("/source/thing", "/dest") ends up creating .. code-block:: none /dest/thing/a.txt /dest/thing/b.txt /dest/thing/somedir/c.txt """ dirname = os.path.basename(os.path.normpath(src_dir)) dest_dir = os.path.join(dest_parent, dirname) shutil.copytree(src_dir, dest_dir)
[ "def", "copy_tree_root", "(", "src_dir", ":", "str", ",", "dest_parent", ":", "str", ")", "->", "None", ":", "dirname", "=", "os", ".", "path", ".", "basename", "(", "os", ".", "path", ".", "normpath", "(", "src_dir", ")", ")", "dest_dir", "=", "os",...
23.428571
20.142857
def get_all(self, keys): """ Returns the entries for the given keys. **Warning: The returned map is NOT backed by the original map, so changes to the original map are NOT reflected in the returned map, and vice-versa.** **Warning 2: This method uses __hash__ and __eq__ methods of binary form of the key, not the actual implementations of __hash__ and __eq__ defined in key's class.** :param keys: (Collection), keys to get. :return: (dict), dictionary of map entries. """ check_not_none(keys, "keys can't be None") if not keys: return ImmediateFuture({}) partition_service = self._client.partition_service partition_to_keys = {} for key in keys: check_not_none(key, "key can't be None") key_data = self._to_data(key) partition_id = partition_service.get_partition_id(key_data) try: partition_to_keys[partition_id][key] = key_data except KeyError: partition_to_keys[partition_id] = {key: key_data} return self._get_all_internal(partition_to_keys)
[ "def", "get_all", "(", "self", ",", "keys", ")", ":", "check_not_none", "(", "keys", ",", "\"keys can't be None\"", ")", "if", "not", "keys", ":", "return", "ImmediateFuture", "(", "{", "}", ")", "partition_service", "=", "self", ".", "_client", ".", "part...
37.225806
22.516129
def compute_pmap_stats(pmaps, stats, weights, imtls): """ :param pmaps: a list of R probability maps :param stats: a sequence of S statistic functions :param weights: a list of ImtWeights :param imtls: a DictArray of intensity measure types :returns: a probability map with S internal values """ sids = set() p0 = next(iter(pmaps)) L = p0.shape_y for pmap in pmaps: sids.update(pmap) assert pmap.shape_y == L, (pmap.shape_y, L) if len(sids) == 0: raise ValueError('All empty probability maps!') sids = numpy.array(sorted(sids), numpy.uint32) nstats = len(stats) curves = numpy.zeros((len(pmaps), len(sids), L), numpy.float64) for i, pmap in enumerate(pmaps): for j, sid in enumerate(sids): if sid in pmap: curves[i, j] = pmap[sid].array[:, 0] out = p0.__class__.build(L, nstats, sids) for imt in imtls: slc = imtls(imt) w = [weight[imt] for weight in weights] for i, array in enumerate(compute_stats(curves[:, :, slc], stats, w)): for j, sid in numpy.ndenumerate(sids): out[sid].array[slc, i] = array[j] return out
[ "def", "compute_pmap_stats", "(", "pmaps", ",", "stats", ",", "weights", ",", "imtls", ")", ":", "sids", "=", "set", "(", ")", "p0", "=", "next", "(", "iter", "(", "pmaps", ")", ")", "L", "=", "p0", ".", "shape_y", "for", "pmap", "in", "pmaps", "...
33.527778
14.305556
def get(name, stype, **kwargs): """Returns the rcParams specified in the style file given by `name` and `stype`. Parameters ---------- name: str The name of the style. stype: str Any of ('context', 'style', 'palette'). kwargs: - stylelib_url: str Overwrite the value in the local config with the specified url. - ignore_cache: bool Ignore files in the cache and force loading from the stylelib. Raises ------ ValueError: If `stype` is not any of ('context', 'style', 'palette') Returns ------- rcParams: dict The parameter dict of the file. """ stype = str(stype) params = {} if stype in MPLS_STYPES: params.update(__get(name, stype, **kwargs)) else: raise ValueError('unexpected stype: {}! Must be any of {!r}'.format(stype, MPLS_STYPES)) # color palette hack if params.get('axes.prop_cycle'): params['axes.prop_cycle'] = mpl.rcsetup.cycler('color', params['axes.prop_cycle']) return params
[ "def", "get", "(", "name", ",", "stype", ",", "*", "*", "kwargs", ")", ":", "stype", "=", "str", "(", "stype", ")", "params", "=", "{", "}", "if", "stype", "in", "MPLS_STYPES", ":", "params", ".", "update", "(", "__get", "(", "name", ",", "stype"...
26.842105
24.447368
def parser(self, lines): """Given a set of lines parse the into a MOP Header""" while len(lines) > 0: if lines[0].startswith('##') and lines[1].startswith('# '): # A two-line keyword/value line starts here. self._header_append(lines.pop(0), lines.pop(0)) elif lines[0].startswith('# '): # Lines with single comments are exposure numbers unless preceeded by double comment line self._append_file_id(lines.pop(0)) elif lines[0].startswith('##'): # Double comment lines without a single comment following are column headers for dataset. self._set_column_names(lines.pop(0)[2:]) else: # Last line of the header reached. return self raise IOError("Failed trying to read header")
[ "def", "parser", "(", "self", ",", "lines", ")", ":", "while", "len", "(", "lines", ")", ">", "0", ":", "if", "lines", "[", "0", "]", ".", "startswith", "(", "'##'", ")", "and", "lines", "[", "1", "]", ".", "startswith", "(", "'# '", ")", ":", ...
53.625
20.125
def generate_iv_for_export(self, client_random, server_random, con_end, read_or_write, req_len): """ Generate IV for EXPORT ciphersuite, i.e. weakens it. An export IV generation example is given in section 6.3.1 of RFC 2246. See also page 86 of EKR's book. """ s = con_end + read_or_write s = (s == "clientwrite" or s == "serverread") if self.tls_version < 0x0300: return None elif self.tls_version == 0x0300: if s: tbh = client_random + server_random else: tbh = server_random + client_random iv = _tls_hash_algs["MD5"]().digest(tbh)[:req_len] else: iv_block = self.prf("", b"IV block", client_random + server_random, 2 * req_len) if s: iv = iv_block[:req_len] else: iv = iv_block[req_len:] return iv
[ "def", "generate_iv_for_export", "(", "self", ",", "client_random", ",", "server_random", ",", "con_end", ",", "read_or_write", ",", "req_len", ")", ":", "s", "=", "con_end", "+", "read_or_write", "s", "=", "(", "s", "==", "\"clientwrite\"", "or", "s", "==",...
37.071429
14.5
def __get_verb(counts): """Let's fetch a VERB :param counts: """ cursor = CONN.cursor() check_query = "select verb_id from surverbs" cursor.execute(check_query) check_result = cursor.fetchall() id_list = [] for row in check_result: id_list.append(row[0]) rand = random.randint(1, counts['max_verb']) while rand not in id_list: rand = random.randint(1, counts['max_verb']) query = "select * from surverbs where verb_id = {0}".format(rand) cursor.execute(query) result = cursor.fetchone() # cursor.close() return result[1]
[ "def", "__get_verb", "(", "counts", ")", ":", "cursor", "=", "CONN", ".", "cursor", "(", ")", "check_query", "=", "\"select verb_id from surverbs\"", "cursor", ".", "execute", "(", "check_query", ")", "check_result", "=", "cursor", ".", "fetchall", "(", ")", ...
22.384615
20.153846
def _unpack_bin(self, packed): """ Internal. Decodes 16 bit RGB565 into python list [R,G,B] """ output = struct.unpack('H', packed) bits16 = output[0] r = (bits16 & 0xF800) >> 11 g = (bits16 & 0x7E0) >> 5 b = (bits16 & 0x1F) return [int(r << 3), int(g << 2), int(b << 3)]
[ "def", "_unpack_bin", "(", "self", ",", "packed", ")", ":", "output", "=", "struct", ".", "unpack", "(", "'H'", ",", "packed", ")", "bits16", "=", "output", "[", "0", "]", "r", "=", "(", "bits16", "&", "0xF800", ")", ">>", "11", "g", "=", "(", ...
30.363636
11.818182
def get_field_kwargs(field_name, model_field): """ Creating a default instance of a basic non-relational field. """ kwargs = {} # The following will only be used by ModelField classes. # Gets removed for everything else. kwargs['model_field'] = model_field if hasattr(model_field, 'verbose_name') and needs_label(model_field, field_name): kwargs['label'] = capfirst(model_field.verbose_name) if hasattr(model_field, 'help_text'): kwargs['help_text'] = model_field.help_text if isinstance(model_field, me_fields.DecimalField): precision = model_field.precision max_value = getattr(model_field, 'max_value', None) if max_value is not None: max_length = len(str(max_value)) + precision else: max_length = 65536 kwargs['decimal_places'] = precision kwargs['max_digits'] = max_length if isinstance(model_field, me_fields.GeoJsonBaseField): kwargs['geo_type'] = model_field._type if isinstance(model_field, me_fields.SequenceField) or model_field.primary_key or model_field.db_field == '_id': # If this field is read-only, then return early. # Further keyword arguments are not valid. kwargs['read_only'] = True return kwargs if model_field.default and not isinstance(model_field, me_fields.ComplexBaseField): kwargs['default'] = model_field.default if model_field.null: kwargs['allow_null'] = True if model_field.null and isinstance(model_field, me_fields.StringField): kwargs['allow_blank'] = True if 'default' not in kwargs: kwargs['required'] = model_field.required # handle special cases - compound fields: mongoengine.ListField/DictField if kwargs['required'] is True: if isinstance(model_field, me_fields.ListField) or isinstance(model_field, me_fields.DictField): kwargs['allow_empty'] = False if model_field.choices: # If this model field contains choices, then return early. # Further keyword arguments are not valid. kwargs['choices'] = model_field.choices return kwargs if isinstance(model_field, me_fields.StringField): if model_field.regex: kwargs['regex'] = model_field.regex max_length = getattr(model_field, 'max_length', None) if max_length is not None and isinstance(model_field, me_fields.StringField): kwargs['max_length'] = max_length min_length = getattr(model_field, 'min_length', None) if min_length is not None and isinstance(model_field, me_fields.StringField): kwargs['min_length'] = min_length max_value = getattr(model_field, 'max_value', None) if max_value is not None and isinstance(model_field, NUMERIC_FIELD_TYPES): kwargs['max_value'] = max_value min_value = getattr(model_field, 'min_value', None) if min_value is not None and isinstance(model_field, NUMERIC_FIELD_TYPES): kwargs['min_value'] = min_value return kwargs
[ "def", "get_field_kwargs", "(", "field_name", ",", "model_field", ")", ":", "kwargs", "=", "{", "}", "# The following will only be used by ModelField classes.", "# Gets removed for everything else.", "kwargs", "[", "'model_field'", "]", "=", "model_field", "if", "hasattr", ...
37.772152
21.898734
def _get_metadap_dap(name, version=''): '''Return data for dap of given or latest version.''' m = metadap(name) if not m: raise DapiCommError('DAP {dap} not found.'.format(dap=name)) if not version: d = m['latest_stable'] or m['latest'] if d: d = data(d) else: d = dap(name, version) if not d: raise DapiCommError( 'DAP {dap} doesn\'t have version {version}.'.format(dap=name, version=version)) return m, d
[ "def", "_get_metadap_dap", "(", "name", ",", "version", "=", "''", ")", ":", "m", "=", "metadap", "(", "name", ")", "if", "not", "m", ":", "raise", "DapiCommError", "(", "'DAP {dap} not found.'", ".", "format", "(", "dap", "=", "name", ")", ")", "if", ...
33.066667
20.933333
def _set_ext_vni(self, v, load=False): """ Setter method for ext_vni, mapped from YANG variable /overlay/access_list/type/vxlan/extended/ext_seq/ext_vni (uint32) If this variable is read-only (config: false) in the source YANG file, then _set_ext_vni is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_ext_vni() directly. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1 .. 16777215']}), is_leaf=True, yang_name="ext-vni", rest_name="vni", parent=self, choice=(u'choice-ext-vni', u'case-ext-vni'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'vni number: 1..16777215', u'alt-name': u'vni', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-vxlan-visibility', defining_module='brocade-vxlan-visibility', yang_type='uint32', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """ext_vni must be of a type compatible with uint32""", 'defined-type': "uint32", 'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1 .. 16777215']}), is_leaf=True, yang_name="ext-vni", rest_name="vni", parent=self, choice=(u'choice-ext-vni', u'case-ext-vni'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'vni number: 1..16777215', u'alt-name': u'vni', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-vxlan-visibility', defining_module='brocade-vxlan-visibility', yang_type='uint32', is_config=True)""", }) self.__ext_vni = t if hasattr(self, '_set'): self._set()
[ "def", "_set_ext_vni", "(", "self", ",", "v", ",", "load", "=", "False", ")", ":", "if", "hasattr", "(", "v", ",", "\"_utype\"", ")", ":", "v", "=", "v", ".", "_utype", "(", "v", ")", "try", ":", "t", "=", "YANGDynClass", "(", "v", ",", "base",...
96.181818
46.272727
def send_task(self, request, response): """send off a celery task for the current page and recache""" # TODO is this too messy? from bettercache.tasks import GeneratePage try: GeneratePage.apply_async((strip_wsgi(request),)) except: logger.error("failed to send celery task") self.set_cache(request, response)
[ "def", "send_task", "(", "self", ",", "request", ",", "response", ")", ":", "# TODO is this too messy?", "from", "bettercache", ".", "tasks", "import", "GeneratePage", "try", ":", "GeneratePage", ".", "apply_async", "(", "(", "strip_wsgi", "(", "request", ")", ...
37.3
14.6
async def log_source(self, **params): """ Logging users request sources """ if params.get("message"): params = json.loads(params.get("message", "{}")) if not params: return {"error":400, "reason":"Missed required fields"} # Insert new source if does not exists the one database = client[settings.DBNAME] source_collection = database[settings.SOURCE] await source_collection.update({"public_key":params.get("public_key")}, {"$addToSet":{"source":params.get("source")}}, upsert=True) return {"result": "ok"}
[ "async", "def", "log_source", "(", "self", ",", "*", "*", "params", ")", ":", "if", "params", ".", "get", "(", "\"message\"", ")", ":", "params", "=", "json", ".", "loads", "(", "params", ".", "get", "(", "\"message\"", ",", "\"{}\"", ")", ")", "if...
29.611111
18.277778
def build_images(): # type: () -> None """ Build all docker images for the project. """ registry = conf.get('docker.registry') docker_images = conf.get('docker.images', []) for image in docker_images: build_image(registry, image)
[ "def", "build_images", "(", ")", ":", "# type: () -> None", "registry", "=", "conf", ".", "get", "(", "'docker.registry'", ")", "docker_images", "=", "conf", ".", "get", "(", "'docker.images'", ",", "[", "]", ")", "for", "image", "in", "docker_images", ":", ...
31.375
12.875
def _PrintStorageInformationAsText(self, storage_reader): """Prints information about the store as human-readable text. Args: storage_reader (StorageReader): storage reader. """ table_view = views.ViewsFactory.GetTableView( self._views_format_type, title='Plaso Storage Information') table_view.AddRow(['Filename', os.path.basename(self._storage_file_path)]) table_view.AddRow(['Format version', storage_reader.format_version]) table_view.AddRow( ['Serialization format', storage_reader.serialization_format]) table_view.Write(self._output_writer) if storage_reader.storage_type == definitions.STORAGE_TYPE_SESSION: self._PrintSessionsOverview(storage_reader) self._PrintSessionsDetails(storage_reader) storage_counters = self._CalculateStorageCounters(storage_reader) if 'parsers' not in storage_counters: self._output_writer.Write( 'Unable to determine number of events generated per parser.\n') else: self._PrintParsersCounter(storage_counters['parsers']) if 'analysis_reports' not in storage_counters: self._output_writer.Write( 'Unable to determine number of reports generated per plugin.\n') else: self._PrintAnalysisReportCounter(storage_counters['analysis_reports']) if 'event_labels' not in storage_counters: self._output_writer.Write( 'Unable to determine number of event tags generated per label.\n') else: self._PrintEventLabelsCounter(storage_counters['event_labels']) self._PrintWarningCounters(storage_counters) if self._verbose: self._PrintWarningsDetails(storage_reader) self._PrintAnalysisReportsDetails(storage_reader) elif storage_reader.storage_type == definitions.STORAGE_TYPE_TASK: self._PrintTasksInformation(storage_reader)
[ "def", "_PrintStorageInformationAsText", "(", "self", ",", "storage_reader", ")", ":", "table_view", "=", "views", ".", "ViewsFactory", ".", "GetTableView", "(", "self", ".", "_views_format_type", ",", "title", "=", "'Plaso Storage Information'", ")", "table_view", ...
39.234043
23.702128
def check_request_parameters(self, parameters: dict = dict): """Check parameters passed to avoid errors and help debug. :param dict response: search request parameters """ # -- SEMANTIC QUERY --------------------------------------------------- li_args = parameters.get("q").split() logging.debug(li_args) # Unicity li_filters = [i.split(":")[0] for i in li_args] filters_count = Counter(li_filters) li_filters_must_be_unique = ("coordinate-system", "format", "owner", "type") for i in filters_count: if i in li_filters_must_be_unique and filters_count.get(i) > 1: raise ValueError( "This query filter must be unique: {}" " and it occured {} times.".format(i, filters_count.get(i)) ) # dict dico_query = FILTER_KEYS.copy() for i in li_args: if i.startswith("action"): dico_query["action"].append(i.split(":")[1:][0]) continue elif i.startswith("catalog"): dico_query["catalog"].append(i.split(":")[1:][0]) continue elif i.startswith("contact") and i.split(":")[1] == "group": dico_query["contact:group"].append(i.split(":")[1:][1]) continue elif i.startswith("contact"): dico_query["contact:isogeo"].append(i.split(":", 1)[1]) continue elif i.startswith("coordinate-system"): dico_query["coordinate-system"].append(i.split(":")[1:][0]) continue elif i.startswith("data-source"): dico_query["data-source"].append(i.split(":")[1:][0]) continue elif i.startswith("format"): dico_query["format"].append(i.split(":")[1:][0]) continue elif i.startswith("has-no"): dico_query["has-no"].append(i.split(":")[1:][0]) continue elif i.startswith("keyword:isogeo"): dico_query["keyword:isogeo"].append(i.split(":")[1:][1]) continue elif i.startswith("keyword:inspire-theme"): dico_query["keyword:inspire-theme"].append(i.split(":")[1:][1]) continue elif i.startswith("license:isogeo"): dico_query["license:isogeo"].append(i.split(":")[1:][1:]) continue elif i.startswith("license"): dico_query["license:group"].append(i.split(":", 1)[1:][0:]) continue elif i.startswith("owner"): dico_query["owner"].append(i.split(":")[1:][0]) continue elif i.startswith("provider"): dico_query["provider"].append(i.split(":")[1:][0]) continue elif i.startswith("share"): dico_query["share"].append(i.split(":")[1:][0]) continue elif i.startswith("type"): dico_query["type"].append(i.split(":")[1:][0]) continue else: # logging.debug(i.split(":")[1], i.split(":")[1].isdigit()) dico_query["text"].append(i) continue # Values dico_filters = {i.split(":")[0]: i.split(":")[1:] for i in li_args} if dico_filters.get("type", ("dataset",))[0].lower() not in FILTER_TYPES: raise ValueError( "type value must be one of: {}".format(" | ".join(FILTER_TYPES)) ) elif dico_filters.get("action", ("download",))[0].lower() not in FILTER_ACTIONS: raise ValueError( "action value must be one of: {}".format(" | ".join(FILTER_ACTIONS)) ) elif ( dico_filters.get("provider", ("manual",))[0].lower() not in FILTER_PROVIDERS ): raise ValueError( "provider value must be one of: {}".format(" | ".join(FILTER_PROVIDERS)) ) else: logging.debug(dico_filters) # -- GEOGRAPHIC ------------------------------------------------------- in_box = parameters.get("box") in_geo = parameters.get("geo") # geometric relation in_rel = parameters.get("rel") if in_rel and in_box is None and in_geo is None: raise ValueError("'rel' should'nt be used without box or geo.") elif in_rel not in GEORELATIONS and in_rel is not None: raise ValueError( "{} is not a correct value for 'georel'." " Must be one of: {}.".format(in_rel, " | ".join(GEORELATIONS)) )
[ "def", "check_request_parameters", "(", "self", ",", "parameters", ":", "dict", "=", "dict", ")", ":", "# -- SEMANTIC QUERY ---------------------------------------------------", "li_args", "=", "parameters", ".", "get", "(", "\"q\"", ")", ".", "split", "(", ")", "lo...
43.635514
20.35514
def get_task_positions_objs(client, list_id): ''' Gets a list containing the object that encapsulates information about the order lists are laid out in. This list will always contain exactly one object. See https://developer.wunderlist.com/documentation/endpoints/positions for more info Return: A list containing a single ListPositionsObj-mapped object ''' params = { 'list_id' : int(list_id) } response = client.authenticated_request(client.api.Endpoints.TASK_POSITIONS, params=params) return response.json()
[ "def", "get_task_positions_objs", "(", "client", ",", "list_id", ")", ":", "params", "=", "{", "'list_id'", ":", "int", "(", "list_id", ")", "}", "response", "=", "client", ".", "authenticated_request", "(", "client", ".", "api", ".", "Endpoints", ".", "TA...
39.928571
35.071429
def get_instance_property(instance, property_name): """Retrieves property of an instance, keeps retrying until getting a non-None""" name = get_name(instance) while True: try: value = getattr(instance, property_name) if value is not None: break print(f"retrieving {property_name} on {name} produced None, retrying") time.sleep(RETRY_INTERVAL_SEC) instance.reload() continue except Exception as e: print(f"retrieving {property_name} on {name} failed with {e}, retrying") time.sleep(RETRY_INTERVAL_SEC) try: instance.reload() except Exception: pass continue return value
[ "def", "get_instance_property", "(", "instance", ",", "property_name", ")", ":", "name", "=", "get_name", "(", "instance", ")", "while", "True", ":", "try", ":", "value", "=", "getattr", "(", "instance", ",", "property_name", ")", "if", "value", "is", "not...
28.434783
21.304348
def sort_samples(self, stable=True, inplace=False, ascending=True): """Sort the columns of the matrix alphabetically by sample name. Parameters ---------- stable: bool, optional Whether to use a stable sorting algorithm. [True] inplace: bool, optional Whether to perform the operation in place.[False] ascending: bool, optional Whether to sort in ascending order [True] Returns ------- `ExpMatrix` The sorted matrix. """ kind = 'quicksort' if stable: kind = 'mergesort' return self.sort_index(axis=1, kind=kind, inplace=inplace, ascending=ascending)
[ "def", "sort_samples", "(", "self", ",", "stable", "=", "True", ",", "inplace", "=", "False", ",", "ascending", "=", "True", ")", ":", "kind", "=", "'quicksort'", "if", "stable", ":", "kind", "=", "'mergesort'", "return", "self", ".", "sort_index", "(", ...
32.954545
18
def _advance_to_next_stage(self, config_ids, losses): """ SuccessiveHalving simply continues the best based on the current loss. """ ranks = np.argsort(np.argsort(losses)) return(ranks < max(self.min_samples_advance, self.num_configs[self.stage] * (1-self.resampling_rate)) )
[ "def", "_advance_to_next_stage", "(", "self", ",", "config_ids", ",", "losses", ")", ":", "ranks", "=", "np", ".", "argsort", "(", "np", ".", "argsort", "(", "losses", ")", ")", "return", "(", "ranks", "<", "max", "(", "self", ".", "min_samples_advance",...
40.428571
21.285714
def login(request, template_name=None, extra_context=None, **kwargs): """Logs a user in using the :class:`~openstack_auth.forms.Login` form.""" # If the user enabled websso and the default redirect # redirect to the default websso url if (request.method == 'GET' and utils.is_websso_enabled and utils.is_websso_default_redirect()): protocol = utils.get_websso_default_redirect_protocol() region = utils.get_websso_default_redirect_region() origin = utils.build_absolute_uri(request, '/auth/websso/') url = ('%s/auth/OS-FEDERATION/websso/%s?origin=%s' % (region, protocol, origin)) return shortcuts.redirect(url) # If the user enabled websso and selects default protocol # from the dropdown, We need to redirect user to the websso url if request.method == 'POST': auth_type = request.POST.get('auth_type', 'credentials') if utils.is_websso_enabled() and auth_type != 'credentials': region_id = request.POST.get('region') auth_url = getattr(settings, 'WEBSSO_KEYSTONE_URL', forms.get_region_endpoint(region_id)) url = utils.get_websso_url(request, auth_url, auth_type) return shortcuts.redirect(url) if not request.is_ajax(): # If the user is already authenticated, redirect them to the # dashboard straight away, unless the 'next' parameter is set as it # usually indicates requesting access to a page that requires different # permissions. if (request.user.is_authenticated and auth.REDIRECT_FIELD_NAME not in request.GET and auth.REDIRECT_FIELD_NAME not in request.POST): return shortcuts.redirect(settings.LOGIN_REDIRECT_URL) # Get our initial region for the form. initial = {} current_region = request.session.get('region_endpoint', None) requested_region = request.GET.get('region', None) regions = dict(getattr(settings, "AVAILABLE_REGIONS", [])) if requested_region in regions and requested_region != current_region: initial.update({'region': requested_region}) if request.method == "POST": form = functional.curry(forms.Login) else: form = functional.curry(forms.Login, initial=initial) if extra_context is None: extra_context = {'redirect_field_name': auth.REDIRECT_FIELD_NAME} extra_context['csrf_failure'] = request.GET.get('csrf_failure') choices = getattr(settings, 'WEBSSO_CHOICES', ()) extra_context['show_sso_opts'] = (utils.is_websso_enabled() and len(choices) > 1) if not template_name: if request.is_ajax(): template_name = 'auth/_login.html' extra_context['hide'] = True else: template_name = 'auth/login.html' res = django_auth_views.login(request, template_name=template_name, authentication_form=form, extra_context=extra_context, **kwargs) # Save the region in the cookie, this is used as the default # selected region next time the Login form loads. if request.method == "POST": utils.set_response_cookie(res, 'login_region', request.POST.get('region', '')) utils.set_response_cookie(res, 'login_domain', request.POST.get('domain', '')) # Set the session data here because django's session key rotation # will erase it if we set it earlier. if request.user.is_authenticated: auth_user.set_session_from_user(request, request.user) regions = dict(forms.Login.get_region_choices()) region = request.user.endpoint login_region = request.POST.get('region') region_name = regions.get(login_region) request.session['region_endpoint'] = region request.session['region_name'] = region_name expiration_time = request.user.time_until_expiration() threshold_days = getattr( settings, 'PASSWORD_EXPIRES_WARNING_THRESHOLD_DAYS', -1) if expiration_time is not None and \ expiration_time.days <= threshold_days: expiration_time = str(expiration_time).rsplit(':', 1)[0] msg = (_('Please consider changing your password, it will expire' ' in %s minutes') % expiration_time).replace(':', ' Hours and ') messages.warning(request, msg) return res
[ "def", "login", "(", "request", ",", "template_name", "=", "None", ",", "extra_context", "=", "None", ",", "*", "*", "kwargs", ")", ":", "# If the user enabled websso and the default redirect", "# redirect to the default websso url", "if", "(", "request", ".", "method...
46.438776
19.367347
def significance_fdr(p, alpha): """Calculate significance by controlling for the false discovery rate. This function determines which of the p-values in `p` can be considered significant. Correction for multiple comparisons is performed by controlling the false discovery rate (FDR). The FDR is the maximum fraction of p-values that are wrongly considered significant [1]_. Parameters ---------- p : array, shape (channels, channels, nfft) p-values. alpha : float Maximum false discovery rate. Returns ------- s : array, dtype=bool, shape (channels, channels, nfft) Significance of each p-value. References ---------- .. [1] Y. Benjamini, Y. Hochberg. Controlling the false discovery rate: a practical and powerful approach to multiple testing. J. Royal Stat. Soc. Series B 57(1): 289-300, 1995. """ i = np.argsort(p, axis=None) m = i.size - np.sum(np.isnan(p)) j = np.empty(p.shape, int) j.flat[i] = np.arange(1, i.size + 1) mask = p <= alpha * j / m if np.sum(mask) == 0: return mask # find largest k so that p_k <= alpha*k/m k = np.max(j[mask]) # reject all H_i for i = 0...k s = j <= k return s
[ "def", "significance_fdr", "(", "p", ",", "alpha", ")", ":", "i", "=", "np", ".", "argsort", "(", "p", ",", "axis", "=", "None", ")", "m", "=", "i", ".", "size", "-", "np", ".", "sum", "(", "np", ".", "isnan", "(", "p", ")", ")", "j", "=", ...
27.886364
22.818182
def has_opt(self, opt): """Check if option is present in configuration file Parameters ----------- opt : string Name of option (e.g. output-file-format) """ for sec in self.sections: val = self.cp.has_option(sec, opt) if val: return val return False
[ "def", "has_opt", "(", "self", ",", "opt", ")", ":", "for", "sec", "in", "self", ".", "sections", ":", "val", "=", "self", ".", "cp", ".", "has_option", "(", "sec", ",", "opt", ")", "if", "val", ":", "return", "val", "return", "False" ]
24.714286
17.142857
def encrypt(self, password): """Encrypt the password. """ if not password or not self._crypter: return password or b'' return self._crypter.encrypt(password)
[ "def", "encrypt", "(", "self", ",", "password", ")", ":", "if", "not", "password", "or", "not", "self", ".", "_crypter", ":", "return", "password", "or", "b''", "return", "self", ".", "_crypter", ".", "encrypt", "(", "password", ")" ]
32.666667
4.833333
def add(assetclass: int, symbol: str): """ Add a stock to an asset class """ assert isinstance(symbol, str) assert isinstance(assetclass, int) symbol = symbol.upper() app = AppAggregate() new_item = app.add_stock_to_class(assetclass, symbol) print(f"Record added: {new_item}.")
[ "def", "add", "(", "assetclass", ":", "int", ",", "symbol", ":", "str", ")", ":", "assert", "isinstance", "(", "symbol", ",", "str", ")", "assert", "isinstance", "(", "assetclass", ",", "int", ")", "symbol", "=", "symbol", ".", "upper", "(", ")", "ap...
33.111111
10.777778
def mail2blogger(entry, **kwargs): """This signal handler cross-posts published ``Entry``'s to Blogger. For this to work, the following settings must be non-False; e.g.: BLARGG = { 'mail2blogger': True, 'mail2blogger_email': 'user@example.com', } """ enabled = blargg_settings.get('mail2blogger', False) recipient = blargg_settings.get('mail2blogger_email', None) if enabled and recipient: # Send HTML (and text-only) email msg = EmailMultiAlternatives( entry.title, # Subject striptags(entry.crossposted_content), # Text-only settings.DEFAULT_FROM_EMAIL, # From [recipient] # List of Recipients ) msg.attach_alternative(entry.crossposted_content, "text/html") msg.send(fail_silently=True)
[ "def", "mail2blogger", "(", "entry", ",", "*", "*", "kwargs", ")", ":", "enabled", "=", "blargg_settings", ".", "get", "(", "'mail2blogger'", ",", "False", ")", "recipient", "=", "blargg_settings", ".", "get", "(", "'mail2blogger_email'", ",", "None", ")", ...
35.913043
16.652174
def _bashcomplete(cmd, prog_name, complete_var=None): """Internal handler for the bash completion support.""" if complete_var is None: complete_var = '_%s_COMPLETE' % (prog_name.replace('-', '_')).upper() complete_instr = os.environ.get(complete_var) if not complete_instr: return from ._bashcomplete import bashcomplete if bashcomplete(cmd, prog_name, complete_var, complete_instr): fast_exit(1)
[ "def", "_bashcomplete", "(", "cmd", ",", "prog_name", ",", "complete_var", "=", "None", ")", ":", "if", "complete_var", "is", "None", ":", "complete_var", "=", "'_%s_COMPLETE'", "%", "(", "prog_name", ".", "replace", "(", "'-'", ",", "'_'", ")", ")", "."...
39.545455
18.181818
def round_float(f, digits, rounding=ROUND_HALF_UP): """ Accurate float rounding from http://stackoverflow.com/a/15398691. """ return Decimal(str(f)).quantize(Decimal(10) ** (-1 * digits), rounding=rounding)
[ "def", "round_float", "(", "f", ",", "digits", ",", "rounding", "=", "ROUND_HALF_UP", ")", ":", "return", "Decimal", "(", "str", "(", "f", ")", ")", ".", "quantize", "(", "Decimal", "(", "10", ")", "**", "(", "-", "1", "*", "digits", ")", ",", "r...
42.166667
13.166667
def partitionSum(M,I,T,step=None): """ INPUT PARAMETERS: M: HITRAN molecule number (required) I: HITRAN isotopologue number (required) T: temperature conditions (required) step: step to calculate temperatures (optional) OUTPUT PARAMETERS: TT: list of temperatures (present only if T is a list) PartSum: partition sums calculated on a list of temperatures --- DESCRIPTION: Calculate range of partition sums at different temperatures. This function uses a python implementation of TIPS-2011 code: Reference: A. L. Laraia, R. R. Gamache, J. Lamouroux, I. E. Gordon, L. S. Rothman. Total internal partition sums to support planetary remote sensing. Icarus, Volume 215, Issue 1, September 2011, Pages 391–400 http://dx.doi.org/10.1016/j.icarus.2011.06.004 Output depends on a structure of input parameter T so that: 1) If T is a scalar/list and step IS NOT provided, then calculate partition sums over each value of T. 2) If T is a list and step parameter IS provided, then calculate partition sums between T[0] and T[1] with a given step. --- EXAMPLE OF USAGE: PartSum = partitionSum(1,1,[296,1000]) TT,PartSum = partitionSum(1,1,[296,1000],step=0.1) --- """ # partitionSum if not step: if type(T) not in set([list,tuple]): return BD_TIPS_2011_PYTHON(M,I,T)[1] else: return [BD_TIPS_2011_PYTHON(M,I,temp)[1] for temp in T] else: #n = (T[1]-T[0])/step #TT = linspace(T[0],T[1],n) TT = arange(T[0],T[1],step) return TT,array([BD_TIPS_2011_PYTHON(M,I,temp)[1] for temp in TT])
[ "def", "partitionSum", "(", "M", ",", "I", ",", "T", ",", "step", "=", "None", ")", ":", "# partitionSum", "if", "not", "step", ":", "if", "type", "(", "T", ")", "not", "in", "set", "(", "[", "list", ",", "tuple", "]", ")", ":", "return", "BD_T...
40.727273
21.363636
def img(self, id): """Serve Pylons' stock images""" return self._serve_file(os.path.join(media_path, 'img', id))
[ "def", "img", "(", "self", ",", "id", ")", ":", "return", "self", ".", "_serve_file", "(", "os", ".", "path", ".", "join", "(", "media_path", ",", "'img'", ",", "id", ")", ")" ]
42.666667
16.333333
def notify(cls, user_or_email_, object_id=None, **filters): """Start notifying the given user or email address when this event occurs and meets the criteria given in ``filters``. Return the created (or the existing matching) Watch so you can call :meth:`~tidings.models.Watch.activate()` on it if you're so inclined. Implementations in subclasses may take different arguments; see the docstring of :meth:`is_notifying()`. Send an activation email if an anonymous watch is created and :data:`~django.conf.settings.TIDINGS_CONFIRM_ANONYMOUS_WATCHES` is ``True``. If the activation request fails, raise a ActivationRequestFailed exception. Calling :meth:`notify()` twice for an anonymous user will send the email each time. """ # A test-for-existence-then-create race condition exists here, but it # doesn't matter: de-duplication on fire() and deletion of all matches # on stop_notifying() nullify its effects. try: # Pick 1 if >1 are returned: watch = cls._watches_belonging_to_user( user_or_email_, object_id=object_id, **filters)[0:1].get() except Watch.DoesNotExist: create_kwargs = {} if cls.content_type: create_kwargs['content_type'] = \ ContentType.objects.get_for_model(cls.content_type) create_kwargs['email' if isinstance(user_or_email_, string_types) else 'user'] = user_or_email_ # Letters that can't be mistaken for other letters or numbers in # most fonts, in case people try to type these: distinguishable_letters = \ 'abcdefghjkmnpqrstuvwxyzABCDEFGHJKLMNPQRTUVWXYZ' secret = ''.join(random.choice(distinguishable_letters) for x in range(10)) # Registered users don't need to confirm, but anonymous users do. is_active = ('user' in create_kwargs or not settings.TIDINGS_CONFIRM_ANONYMOUS_WATCHES) if object_id: create_kwargs['object_id'] = object_id watch = Watch.objects.create( secret=secret, is_active=is_active, event_type=cls.event_type, **create_kwargs) for k, v in iteritems(filters): WatchFilter.objects.create(watch=watch, name=k, value=hash_to_unsigned(v)) # Send email for inactive watches. if not watch.is_active: email = watch.user.email if watch.user else watch.email message = cls._activation_email(watch, email) try: message.send() except SMTPException as e: watch.delete() raise ActivationRequestFailed(e.recipients) return watch
[ "def", "notify", "(", "cls", ",", "user_or_email_", ",", "object_id", "=", "None", ",", "*", "*", "filters", ")", ":", "# A test-for-existence-then-create race condition exists here, but it", "# doesn't matter: de-duplication on fire() and deletion of all matches", "# on stop_not...
46.140625
18.46875
def env(config, endpoint): """Print RENKU environment variables. Run this command to configure your Renku client: $ eval "$(renku env)" """ access_token = config['endpoints'][endpoint]['token']['access_token'] click.echo('export {0}={1}'.format('RENKU_ENDPOINT', endpoint)) click.echo('export {0}={1}'.format('RENKU_ACCESS_TOKEN', access_token)) click.echo('# Run this command to configure your Renku client:') click.echo('# eval "$(renku env)"')
[ "def", "env", "(", "config", ",", "endpoint", ")", ":", "access_token", "=", "config", "[", "'endpoints'", "]", "[", "endpoint", "]", "[", "'token'", "]", "[", "'access_token'", "]", "click", ".", "echo", "(", "'export {0}={1}'", ".", "format", "(", "'RE...
36.692308
21.615385
def draw_multi_dispersion_chart(self, nan_locs): """Draws a multi dimensional dispersion chart, each color corresponds to a different target variable. """ for index, nan_values in enumerate(nan_locs): label, nan_locations = nan_values # if features passed in then, label as such if self.classes_ is not None: label = self.classes_[index] color = self.colors[index] x_, y_ = list(zip(*nan_locations)) self.ax.scatter(x_, y_, alpha=self.alpha, marker=self.marker, color=color, label=label)
[ "def", "draw_multi_dispersion_chart", "(", "self", ",", "nan_locs", ")", ":", "for", "index", ",", "nan_values", "in", "enumerate", "(", "nan_locs", ")", ":", "label", ",", "nan_locations", "=", "nan_values", "# if features passed in then, label as such", "if", "sel...
39.733333
15.6
def get_string_u_at_rva(self, rva, max_length = 2**16): """Get an Unicode string located at the given address.""" try: # If the RVA is invalid all would blow up. Some EXEs seem to be # specially nasty and have an invalid RVA. data = self.get_data(rva, 2) except PEFormatError, e: return None #length = struct.unpack('<H', data)[0] s = u'' for idx in xrange(max_length): try: uchr = struct.unpack('<H', self.get_data(rva+2*idx, 2))[0] except struct.error: break if unichr(uchr) == u'\0': break s += unichr(uchr) return s
[ "def", "get_string_u_at_rva", "(", "self", ",", "rva", ",", "max_length", "=", "2", "**", "16", ")", ":", "try", ":", "# If the RVA is invalid all would blow up. Some EXEs seem to be", "# specially nasty and have an invalid RVA.", "data", "=", "self", ".", "get_data", "...
31.041667
18.666667
def match(self, path_info: str) -> MatchResult: """ parse path_info and detect urlvars of url pattern """ matched = self.regex.match(path_info) if matched is None: return None matchlength = len(matched.group(0)) matchdict = matched.groupdict() try: matchdict = self.convert_values(matchdict) except ValueError: return None return MatchResult(matchdict, matchlength)
[ "def", "match", "(", "self", ",", "path_info", ":", "str", ")", "->", "MatchResult", ":", "matched", "=", "self", ".", "regex", ".", "match", "(", "path_info", ")", "if", "matched", "is", "None", ":", "return", "None", "matchlength", "=", "len", "(", ...
32
13.533333
def _program_files_from_executable(self, executable, required_paths, parent_dir=False): """ Get a list of program files by expanding a list of path patterns and interpreting it as relative to the executable. This method can be used as helper for implementing the method program_files(). Contrary to the default implementation of program_files(), this method does not explicitly add the executable to the list of returned files, it assumes that required_paths contains a path that covers the executable. @param executable: the path to the executable of the tool (typically the result of executable()) @param required_paths: a list of required path patterns @param parent_dir: whether required_paths are relative to the directory of executable or the parent directory @return a list of paths as strings, suitable for result of program_files() """ base_dir = os.path.dirname(executable) if parent_dir: base_dir = os.path.join(base_dir, os.path.pardir) return util.flatten( util.expand_filename_pattern(path, base_dir) for path in required_paths)
[ "def", "_program_files_from_executable", "(", "self", ",", "executable", ",", "required_paths", ",", "parent_dir", "=", "False", ")", ":", "base_dir", "=", "os", ".", "path", ".", "dirname", "(", "executable", ")", "if", "parent_dir", ":", "base_dir", "=", "...
64.944444
31.5
def activate(username): """Activate a user. Example: \b ```bash $ polyaxon user activate david ``` """ try: PolyaxonClient().user.activate_user(username) except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e: Printer.print_error('Could not activate user `{}`.'.format(username)) Printer.print_error('Error message `{}`.'.format(e)) sys.exit(1) Printer.print_success("User `{}` was activated successfully.".format(username))
[ "def", "activate", "(", "username", ")", ":", "try", ":", "PolyaxonClient", "(", ")", ".", "user", ".", "activate_user", "(", "username", ")", "except", "(", "PolyaxonHTTPError", ",", "PolyaxonShouldExitError", ",", "PolyaxonClientException", ")", "as", "e", "...
28.277778
26.611111
def _proxy(self): """ Generate an instance context for the instance, the context is capable of performing various actions. All instance actions are proxied to the context :returns: SettingsContext for this SettingsInstance :rtype: twilio.rest.voice.v1.dialing_permissions.settings.SettingsContext """ if self._context is None: self._context = SettingsContext(self._version, ) return self._context
[ "def", "_proxy", "(", "self", ")", ":", "if", "self", ".", "_context", "is", "None", ":", "self", ".", "_context", "=", "SettingsContext", "(", "self", ".", "_version", ",", ")", "return", "self", ".", "_context" ]
42.181818
22.363636
def set_proxy_pool(self, proxies, auth=None, https=True): """设置代理池 :param proxies: proxy列表, 形如 ``["ip1:port1", "ip2:port2"]`` :param auth: 如果代理需要验证身份, 通过这个参数提供, 比如 :param https: 默认为 True, 传入 False 则不设置 https 代理 .. code-block:: python from requests.auth import HTTPProxyAuth auth = HTTPProxyAuth('laike9m', '123') :说明: 每次 GET/POST 请求会随机选择列表中的代理 """ from random import choice if https: self.proxies = [{'http': p, 'https': p} for p in proxies] else: self.proxies = [{'http': p} for p in proxies] def get_with_random_proxy(url, **kwargs): proxy = choice(self.proxies) kwargs['proxies'] = proxy if auth: kwargs['auth'] = auth return self._session.original_get(url, **kwargs) def post_with_random_proxy(url, *args, **kwargs): proxy = choice(self.proxies) kwargs['proxies'] = proxy if auth: kwargs['auth'] = auth return self._session.original_post(url, *args, **kwargs) self._session.original_get = self._session.get self._session.get = get_with_random_proxy self._session.original_post = self._session.post self._session.post = post_with_random_proxy
[ "def", "set_proxy_pool", "(", "self", ",", "proxies", ",", "auth", "=", "None", ",", "https", "=", "True", ")", ":", "from", "random", "import", "choice", "if", "https", ":", "self", ".", "proxies", "=", "[", "{", "'http'", ":", "p", ",", "'https'", ...
35.131579
17
def get_access_token(self, http=None, additional_claims=None): """Create a signed jwt. Args: http: unused additional_claims: dict, additional claims to add to the payload of the JWT. Returns: An AccessTokenInfo with the signed jwt """ if additional_claims is None: if self.access_token is None or self.access_token_expired: self.refresh(None) return client.AccessTokenInfo( access_token=self.access_token, expires_in=self._expires_in()) else: # Create a 1 time token token, unused_expiry = self._create_token(additional_claims) return client.AccessTokenInfo( access_token=token, expires_in=self._MAX_TOKEN_LIFETIME_SECS)
[ "def", "get_access_token", "(", "self", ",", "http", "=", "None", ",", "additional_claims", "=", "None", ")", ":", "if", "additional_claims", "is", "None", ":", "if", "self", ".", "access_token", "is", "None", "or", "self", ".", "access_token_expired", ":", ...
40.3
17.1
def add_row(self, obj): """ fill a new row with the given obj obj instance of the exporter's model """ row = {} for column in self.headers: value = '' if '__col__' in column: if isinstance(column['__col__'], ColumnProperty): value = self._get_column_cell_val(obj, column) elif isinstance(column['__col__'], RelationshipProperty): value = self._get_relationship_cell_val(obj, column) row[column['name']] = value self._datas.append(self.format_row(row))
[ "def", "add_row", "(", "self", ",", "obj", ")", ":", "row", "=", "{", "}", "for", "column", "in", "self", ".", "headers", ":", "value", "=", "''", "if", "'__col__'", "in", "column", ":", "if", "isinstance", "(", "column", "[", "'__col__'", "]", ","...
26.913043
22.043478
def callback(self, username, request): """ Having :username: return user's identifiers or None. """ credentials = self._get_credentials(request) if credentials: username, api_key = credentials if self.check: return self.check(username, api_key, request)
[ "def", "callback", "(", "self", ",", "username", ",", "request", ")", ":", "credentials", "=", "self", ".", "_get_credentials", "(", "request", ")", "if", "credentials", ":", "username", ",", "api_key", "=", "credentials", "if", "self", ".", "check", ":", ...
44.428571
9.857143
def validate_opts(opts, brokers_num): """Basic option validation. Returns True if the options are not valid, False otherwise. :param opts: the command line options :type opts: map :param brokers_num: the number of brokers :type brokers_num: integer :returns: bool """ if opts.skip < 0 or opts.skip >= brokers_num: print("Error: --skip must be >= 0 and < #brokers") return True if opts.check_count < 0: print("Error: --check-count must be >= 0") return True if opts.unhealthy_time_limit < 0: print("Error: --unhealthy-time-limit must be >= 0") return True if opts.check_count == 0: print("Warning: no check will be performed") if opts.check_interval < 0: print("Error: --check-interval must be >= 0") return True return False
[ "def", "validate_opts", "(", "opts", ",", "brokers_num", ")", ":", "if", "opts", ".", "skip", "<", "0", "or", "opts", ".", "skip", ">=", "brokers_num", ":", "print", "(", "\"Error: --skip must be >= 0 and < #brokers\"", ")", "return", "True", "if", "opts", "...
33.16
13.84
def update_warning(self): """Update the warning label, buttons state and sequence text.""" new_qsequence = self.new_qsequence new_sequence = self.new_sequence self.text_new_sequence.setText( new_qsequence.toString(QKeySequence.NativeText)) conflicts = self.check_conflicts() if len(self._qsequences) == 0: warning = SEQUENCE_EMPTY tip = '' icon = QIcon() elif conflicts: warning = SEQUENCE_CONFLICT template = '<i>{0}<b>{1}</b>{2}</i>' tip_title = _('The new shortcut conflicts with:') + '<br>' tip_body = '' for s in conflicts: tip_body += ' - {0}: {1}<br>'.format(s.context, s.name) tip_body = tip_body[:-4] # Removing last <br> tip_override = '<br>Press <b>OK</b> to unbind ' tip_override += 'it' if len(conflicts) == 1 else 'them' tip_override += ' and assign it to <b>{}</b>'.format(self.name) tip = template.format(tip_title, tip_body, tip_override) icon = get_std_icon('MessageBoxWarning') elif new_sequence in BLACKLIST: warning = IN_BLACKLIST template = '<i>{0}<b>{1}</b></i>' tip_title = _('Forbidden key sequence!') + '<br>' tip_body = '' use = BLACKLIST[new_sequence] if use is not None: tip_body = use tip = template.format(tip_title, tip_body) icon = get_std_icon('MessageBoxWarning') elif self.check_singlekey() is False or self.check_ascii() is False: warning = INVALID_KEY template = '<i>{0}</i>' tip = _('Invalid key sequence entered') + '<br>' icon = get_std_icon('MessageBoxWarning') else: warning = NO_WARNING tip = 'This shortcut is valid.' icon = get_std_icon('DialogApplyButton') self.warning = warning self.conflicts = conflicts self.helper_button.setIcon(icon) self.button_ok.setEnabled( self.warning in [NO_WARNING, SEQUENCE_CONFLICT]) self.label_warning.setText(tip) # Everytime after update warning message, update the label height new_height = self.label_warning.sizeHint().height() self.label_warning.setMaximumHeight(new_height)
[ "def", "update_warning", "(", "self", ")", ":", "new_qsequence", "=", "self", ".", "new_qsequence", "new_sequence", "=", "self", ".", "new_sequence", "self", ".", "text_new_sequence", ".", "setText", "(", "new_qsequence", ".", "toString", "(", "QKeySequence", "....
43.854545
14.272727
def _partition(s, sep, find): """ (str|unicode).(partition|rpartition) for Python 2.4/2.5. """ idx = find(sep) if idx != -1: left = s[0:idx] return left, sep, s[len(left)+len(sep):]
[ "def", "_partition", "(", "s", ",", "sep", ",", "find", ")", ":", "idx", "=", "find", "(", "sep", ")", "if", "idx", "!=", "-", "1", ":", "left", "=", "s", "[", "0", ":", "idx", "]", "return", "left", ",", "sep", ",", "s", "[", "len", "(", ...
26.25
12.5
def options(self, context, module_options): ''' PATH Path to the file containing raw shellcode to inject PROCID Process ID to inject into (default: current powershell process) ''' if not 'PATH' in module_options: context.log.error('PATH option is required!') exit(1) self.shellcode_path = os.path.expanduser(module_options['PATH']) if not os.path.exists(self.shellcode_path): context.log.error('Invalid path to shellcode!') exit(1) self.procid = None if 'PROCID' in module_options.keys(): self.procid = module_options['PROCID'] self.ps_script = obfs_ps_script('powersploit/CodeExecution/Invoke-Shellcode.ps1')
[ "def", "options", "(", "self", ",", "context", ",", "module_options", ")", ":", "if", "not", "'PATH'", "in", "module_options", ":", "context", ".", "log", ".", "error", "(", "'PATH option is required!'", ")", "exit", "(", "1", ")", "self", ".", "shellcode_...
35.666667
25.47619
def get_mean_and_stddevs(self, sctx, rctx, dctx, imt, stddev_types): """ Returns the mean and standard deviations """ # Return Distance Tables imls = self._return_tables(rctx.mag, imt, "IMLs") # Get distance vector for the given magnitude idx = numpy.searchsorted(self.m_w, rctx.mag) dists = self.distances[:, 0, idx - 1] # Get mean and standard deviations mean = self._get_mean(imls, dctx, dists) stddevs = self._get_stddevs(dists, rctx.mag, dctx, imt, stddev_types) if self.amplification: # Apply amplification mean_amp, sigma_amp = self.amplification.get_amplification_factors( imt, sctx, rctx, getattr(dctx, self.distance_type), stddev_types) mean = numpy.log(mean) + numpy.log(mean_amp) for iloc in range(len(stddev_types)): stddevs[iloc] *= sigma_amp[iloc] return mean, stddevs else: return numpy.log(mean), stddevs
[ "def", "get_mean_and_stddevs", "(", "self", ",", "sctx", ",", "rctx", ",", "dctx", ",", "imt", ",", "stddev_types", ")", ":", "# Return Distance Tables", "imls", "=", "self", ".", "_return_tables", "(", "rctx", ".", "mag", ",", "imt", ",", "\"IMLs\"", ")",...
41.076923
13.230769
def get_json(self): """Create JSON data for slot. :returns: JSON data for slot as follows: { "@SlotIdx":0, "OnboardControllers":{ "OnboardController": [ ] }, "AddOnCards":{ "AddOnCard": [ ] } } """ json = self.get_basic_json() if self.onboard_cards: json['OnboardControllers'] = { 'OnboardController': [c.get_json() for c in self.onboard_cards.values()] } if self.addon_cards: json['AddOnCards'] = { 'AddOnCard': [c.get_json() for c in self.addon_cards.values()] } return json
[ "def", "get_json", "(", "self", ")", ":", "json", "=", "self", ".", "get_basic_json", "(", ")", "if", "self", ".", "onboard_cards", ":", "json", "[", "'OnboardControllers'", "]", "=", "{", "'OnboardController'", ":", "[", "c", ".", "get_json", "(", ")", ...
28.25
16.392857
def secure(self): ''' Creates a hard link to the target file in the vault directory and saves information about the target file in the database ''' verbose('Saving information about target into conman database') self._id = self.db.insertTarget(self.name, self.path) verbose('Creating a hard link from {} to {} directory'.format( str(self), config.CONMAN_PATH )) link(self.real_path, self.vault_path)
[ "def", "secure", "(", "self", ")", ":", "verbose", "(", "'Saving information about target into conman database'", ")", "self", ".", "_id", "=", "self", ".", "db", ".", "insertTarget", "(", "self", ".", "name", ",", "self", ".", "path", ")", "verbose", "(", ...
40.333333
25.5
def tanh_warp_arb(X, l1, l2, lw, x0): r"""Warps the `X` coordinate with the tanh model .. math:: l = \frac{l_1 + l_2}{2} - \frac{l_1 - l_2}{2}\tanh\frac{x-x_0}{l_w} Parameters ---------- X : :py:class:`Array`, (`M`,) or scalar float `M` locations to evaluate length scale at. l1 : positive float Small-`X` saturation value of the length scale. l2 : positive float Large-`X` saturation value of the length scale. lw : positive float Length scale of the transition between the two length scales. x0 : float Location of the center of the transition between the two length scales. Returns ------- l : :py:class:`Array`, (`M`,) or scalar float The value of the length scale at the specified point. """ if isinstance(X, scipy.ndarray): if isinstance(X, scipy.matrix): X = scipy.asarray(X, dtype=float) return 0.5 * ((l1 + l2) - (l1 - l2) * scipy.tanh((X - x0) / lw)) else: return 0.5 * ((l1 + l2) - (l1 - l2) * mpmath.tanh((X - x0) / lw))
[ "def", "tanh_warp_arb", "(", "X", ",", "l1", ",", "l2", ",", "lw", ",", "x0", ")", ":", "if", "isinstance", "(", "X", ",", "scipy", ".", "ndarray", ")", ":", "if", "isinstance", "(", "X", ",", "scipy", ".", "matrix", ")", ":", "X", "=", "scipy"...
34.645161
20.935484
def call_fdel(self, obj) -> None: """Remove the predefined custom value and call the delete function.""" self.fdel(obj) try: del vars(obj)[self.name] except KeyError: pass
[ "def", "call_fdel", "(", "self", ",", "obj", ")", "->", "None", ":", "self", ".", "fdel", "(", "obj", ")", "try", ":", "del", "vars", "(", "obj", ")", "[", "self", ".", "name", "]", "except", "KeyError", ":", "pass" ]
31.571429
13.857143
def write_reaction(self, value_dict): con = self.connection or self._connect() self._initialize(con) cur = con.cursor() ase_ids = value_dict['ase_ids'] energy_corrections = value_dict.get('energy_corrections', {}) key_list = get_key_list(start_index=1) values = [value_dict[key] for key in key_list] key_str = get_key_str('reaction', start_index=1) value_str = get_value_str(values) insert_command = \ """INSERT INTO reaction ({0}) VALUES ({1}) RETURNING id;"""\ .format(key_str, value_str) cur.execute(insert_command) id = cur.fetchone()[0] reaction_system_values = [] """ Write to reaction_system tables""" for name, ase_id in ase_ids.items(): if name in energy_corrections: energy_correction = energy_corrections[name] else: energy_correction = 0 reaction_system_values += [tuple([name, energy_correction, ase_id, id])] key_str = get_key_str('reaction_system') insert_command = """INSERT INTO reaction_system ({0}) VALUES %s ON CONFLICT DO NOTHING;""".format(key_str) execute_values(cur=cur, sql=insert_command, argslist=reaction_system_values, page_size=1000) if self.connection is None: con.commit() con.close() return id
[ "def", "write_reaction", "(", "self", ",", "value_dict", ")", ":", "con", "=", "self", ".", "connection", "or", "self", ".", "_connect", "(", ")", "self", ".", "_initialize", "(", "con", ")", "cur", "=", "con", ".", "cursor", "(", ")", "ase_ids", "="...
31.695652
18.043478
def compute_process_sigmas(self, dt, fx=None, **fx_args): """ computes the values of sigmas_f. Normally a user would not call this, but it is useful if you need to call update more than once between calls to predict (to update for multiple simultaneous measurements), so the sigmas correctly reflect the updated state x, P. """ if fx is None: fx = self.fx # calculate sigma points for given mean and covariance sigmas = self.points_fn.sigma_points(self.x, self.P) for i, s in enumerate(sigmas): self.sigmas_f[i] = fx(s, dt, **fx_args)
[ "def", "compute_process_sigmas", "(", "self", ",", "dt", ",", "fx", "=", "None", ",", "*", "*", "fx_args", ")", ":", "if", "fx", "is", "None", ":", "fx", "=", "self", ".", "fx", "# calculate sigma points for given mean and covariance", "sigmas", "=", "self",...
37.235294
22.176471
def get_firmware(self): """Get the current firmware version.""" self.get_status() try: self.firmware = self.data['fw_version'] except TypeError: self.firmware = 'Unknown' return self.firmware
[ "def", "get_firmware", "(", "self", ")", ":", "self", ".", "get_status", "(", ")", "try", ":", "self", ".", "firmware", "=", "self", ".", "data", "[", "'fw_version'", "]", "except", "TypeError", ":", "self", ".", "firmware", "=", "'Unknown'", "return", ...
27.555556
15.666667
async def create_payment_address(wallet_handle: int, payment_method: str, config: str) -> str: """ Create the payment address for specified payment method This method generates private part of payment address and stores it in a secure place. Ideally it should be secret in libindy wallet (see crypto module). Note that payment method should be able to resolve this secret by fully resolvable payment address format. :param wallet_handle: wallet handle (created by open_wallet). :param payment_method: Payment method to use (for example, 'sov'). :param config: payment address config as json: { seed: <str>, // allows deterministic creation of payment address } :return: payment_address: public identifier of payment address in fully resolvable payment address format. """ logger = logging.getLogger(__name__) logger.debug("create_payment_address: >>> wallet_handle: %r, payment_method: %r, config: %r", wallet_handle, payment_method, config) if not hasattr(create_payment_address, "cb"): logger.debug("create_payment_address: Creating callback") create_payment_address.cb = create_cb(CFUNCTYPE(None, c_int32, c_int32, c_char_p)) c_wallet_handle = c_int32(wallet_handle) c_payment_method = c_char_p(payment_method.encode('utf-8')) config = c_char_p(config.encode('utf-8')) request_result = await do_call('indy_create_payment_address', c_wallet_handle, c_payment_method, config, create_payment_address.cb) res = request_result.decode() logger.debug("create_payment_address: <<< res: %r", res) return res
[ "async", "def", "create_payment_address", "(", "wallet_handle", ":", "int", ",", "payment_method", ":", "str", ",", "config", ":", "str", ")", "->", "str", ":", "logger", "=", "logging", ".", "getLogger", "(", "__name__", ")", "logger", ".", "debug", "(", ...
40.326087
23.065217
def predict(self, X): """ Perform classification on an array of test vectors X. Parameters ---------- X : array-like, shape = [n_samples, n_features] Returns ------- C : array, shape = [n_samples] Predicted target values for X """ jll = self._joint_log_likelihood(X) return delayed(self.classes_)[da.argmax(jll, axis=1)]
[ "def", "predict", "(", "self", ",", "X", ")", ":", "jll", "=", "self", ".", "_joint_log_likelihood", "(", "X", ")", "return", "delayed", "(", "self", ".", "classes_", ")", "[", "da", ".", "argmax", "(", "jll", ",", "axis", "=", "1", ")", "]" ]
31.384615
13.538462
def find_le(self, dt): '''Find the index corresponding to the rightmost value less than or equal to *dt*. If *dt* is less than :func:`dynts.TimeSeries.end` a :class:`dynts.exceptions.LeftOutOfBound` exception will raise. *dt* must be a python datetime.date instance.''' i = bisect_right(self.dates, dt) if i: return i-1 raise LeftOutOfBound
[ "def", "find_le", "(", "self", ",", "dt", ")", ":", "i", "=", "bisect_right", "(", "self", ".", "dates", ",", "dt", ")", "if", "i", ":", "return", "i", "-", "1", "raise", "LeftOutOfBound" ]
32.083333
14.583333
def v_reference_leaf_leafref(ctx, stmt): """Verify that all leafrefs in a leaf or leaf-list have correct path""" if (hasattr(stmt, 'i_leafref') and stmt.i_leafref is not None and stmt.i_leafref_expanded is False): path_type_spec = stmt.i_leafref not_req_inst = not(path_type_spec.require_instance) x = validate_leafref_path(ctx, stmt, path_type_spec.path_spec, path_type_spec.path_, accept_non_config_target=not_req_inst ) if x is None: return ptr, expanded_path, path_list = x path_type_spec.i_target_node = ptr path_type_spec.i_expanded_path = expanded_path path_type_spec.i_path_list = path_list stmt.i_leafref_expanded = True if ptr is not None: chk_status(ctx, stmt, ptr) stmt.i_leafref_ptr = (ptr, path_type_spec.pos)
[ "def", "v_reference_leaf_leafref", "(", "ctx", ",", "stmt", ")", ":", "if", "(", "hasattr", "(", "stmt", ",", "'i_leafref'", ")", "and", "stmt", ".", "i_leafref", "is", "not", "None", "and", "stmt", ".", "i_leafref_expanded", "is", "False", ")", ":", "pa...
41.391304
11.521739
def get_prefix(self, include_version=True): """Return the appropriate URL prefix to prepend to requests, based on the host provided in settings. """ host = settings.host if '://' not in host: host = 'https://%s' % host.strip('/') elif host.startswith('http://') and settings.verify_ssl: raise exc.TowerCLIError( 'Can not verify ssl with non-https protocol. Change the ' 'verify_ssl configuration setting to continue.' ) # Validate that we have either an http or https based URL url_pieces = urlparse(host) if url_pieces[0] not in ['http', 'https']: raise exc.ConnectionError('URL must be http(s), {} is not valid'.format(url_pieces[0])) prefix = urljoin(host, '/api/') if include_version: # We add the / to the end of {} so that our URL has the ending slash. prefix = urljoin(prefix, "{}/".format(CUR_API_VERSION)) return prefix
[ "def", "get_prefix", "(", "self", ",", "include_version", "=", "True", ")", ":", "host", "=", "settings", ".", "host", "if", "'://'", "not", "in", "host", ":", "host", "=", "'https://%s'", "%", "host", ".", "strip", "(", "'/'", ")", "elif", "host", "...
43.826087
18.826087
def update(self, reservation_status=values.unset, worker_activity_sid=values.unset, instruction=values.unset, dequeue_post_work_activity_sid=values.unset, dequeue_from=values.unset, dequeue_record=values.unset, dequeue_timeout=values.unset, dequeue_to=values.unset, dequeue_status_callback_url=values.unset, call_from=values.unset, call_record=values.unset, call_timeout=values.unset, call_to=values.unset, call_url=values.unset, call_status_callback_url=values.unset, call_accept=values.unset, redirect_call_sid=values.unset, redirect_accept=values.unset, redirect_url=values.unset, to=values.unset, from_=values.unset, status_callback=values.unset, status_callback_method=values.unset, status_callback_event=values.unset, timeout=values.unset, record=values.unset, muted=values.unset, beep=values.unset, start_conference_on_enter=values.unset, end_conference_on_exit=values.unset, wait_url=values.unset, wait_method=values.unset, early_media=values.unset, max_participants=values.unset, conference_status_callback=values.unset, conference_status_callback_method=values.unset, conference_status_callback_event=values.unset, conference_record=values.unset, conference_trim=values.unset, recording_channels=values.unset, recording_status_callback=values.unset, recording_status_callback_method=values.unset, conference_recording_status_callback=values.unset, conference_recording_status_callback_method=values.unset, region=values.unset, sip_auth_username=values.unset, sip_auth_password=values.unset, dequeue_status_callback_event=values.unset, post_work_activity_sid=values.unset, supervisor_mode=values.unset, supervisor=values.unset, end_conference_on_customer_exit=values.unset, beep_on_customer_entrance=values.unset): """ Update the ReservationInstance :param ReservationInstance.Status reservation_status: New reservation status :param unicode worker_activity_sid: New worker activity sid if rejecting a reservation :param unicode instruction: Assignment instruction for reservation :param unicode dequeue_post_work_activity_sid: New worker activity sid after executing a Dequeue instruction :param unicode dequeue_from: Caller ID for the call to the worker when executing a Dequeue instruction :param unicode dequeue_record: Attribute to record both legs of a call when executing a Dequeue instruction :param unicode dequeue_timeout: Timeout for call when executing a Dequeue instruction :param unicode dequeue_to: Contact URI of the worker when executing a Dequeue instruction :param unicode dequeue_status_callback_url: Callback URL for completed call event when executing a Dequeue instruction :param unicode call_from: Caller ID for the outbound call when executing a Call instruction :param unicode call_record: Attribute to record both legs of a call when executing a Call instruction :param unicode call_timeout: Timeout for call when executing a Call instruction :param unicode call_to: Contact URI of the worker when executing a Call instruction :param unicode call_url: TwiML URI executed on answering the worker's leg as a result of the Call instruction :param unicode call_status_callback_url: Callback URL for completed call event when executing a Call instruction :param bool call_accept: Flag to determine if reservation should be accepted when executing a Call instruction :param unicode redirect_call_sid: Call sid of the call parked in the queue when executing a Redirect instruction :param bool redirect_accept: Flag to determine if reservation should be accepted when executing a Redirect instruction :param unicode redirect_url: TwiML URI to redirect the call to when executing the Redirect instruction :param unicode to: Contact URI of the worker when executing a Conference instruction :param unicode from_: Caller ID for the call to the worker when executing a Conference instruction :param unicode status_callback: The status_callback :param unicode status_callback_method: The status_callback_method :param ReservationInstance.CallStatus status_callback_event: The status_callback_event :param unicode timeout: Timeout for call when executing a Conference instruction :param bool record: The record :param bool muted: The muted :param unicode beep: The beep :param bool start_conference_on_enter: The start_conference_on_enter :param bool end_conference_on_exit: The end_conference_on_exit :param unicode wait_url: The wait_url :param unicode wait_method: The wait_method :param bool early_media: The early_media :param unicode max_participants: The max_participants :param unicode conference_status_callback: The conference_status_callback :param unicode conference_status_callback_method: The conference_status_callback_method :param ReservationInstance.ConferenceEvent conference_status_callback_event: The conference_status_callback_event :param unicode conference_record: The conference_record :param unicode conference_trim: The conference_trim :param unicode recording_channels: The recording_channels :param unicode recording_status_callback: The recording_status_callback :param unicode recording_status_callback_method: The recording_status_callback_method :param unicode conference_recording_status_callback: The conference_recording_status_callback :param unicode conference_recording_status_callback_method: The conference_recording_status_callback_method :param unicode region: The region :param unicode sip_auth_username: The sip_auth_username :param unicode sip_auth_password: The sip_auth_password :param unicode dequeue_status_callback_event: Call progress events sent via webhooks as a result of a Dequeue instruction :param unicode post_work_activity_sid: New worker activity sid after executing a Conference instruction :param ReservationInstance.SupervisorMode supervisor_mode: Supervisor mode when executing the Supervise instruction :param unicode supervisor: Supervisor sid/uri when executing the Supervise instruction :param bool end_conference_on_customer_exit: The end_conference_on_customer_exit :param bool beep_on_customer_entrance: The beep_on_customer_entrance :returns: Updated ReservationInstance :rtype: twilio.rest.taskrouter.v1.workspace.task.reservation.ReservationInstance """ data = values.of({ 'ReservationStatus': reservation_status, 'WorkerActivitySid': worker_activity_sid, 'Instruction': instruction, 'DequeuePostWorkActivitySid': dequeue_post_work_activity_sid, 'DequeueFrom': dequeue_from, 'DequeueRecord': dequeue_record, 'DequeueTimeout': dequeue_timeout, 'DequeueTo': dequeue_to, 'DequeueStatusCallbackUrl': dequeue_status_callback_url, 'CallFrom': call_from, 'CallRecord': call_record, 'CallTimeout': call_timeout, 'CallTo': call_to, 'CallUrl': call_url, 'CallStatusCallbackUrl': call_status_callback_url, 'CallAccept': call_accept, 'RedirectCallSid': redirect_call_sid, 'RedirectAccept': redirect_accept, 'RedirectUrl': redirect_url, 'To': to, 'From': from_, 'StatusCallback': status_callback, 'StatusCallbackMethod': status_callback_method, 'StatusCallbackEvent': serialize.map(status_callback_event, lambda e: e), 'Timeout': timeout, 'Record': record, 'Muted': muted, 'Beep': beep, 'StartConferenceOnEnter': start_conference_on_enter, 'EndConferenceOnExit': end_conference_on_exit, 'WaitUrl': wait_url, 'WaitMethod': wait_method, 'EarlyMedia': early_media, 'MaxParticipants': max_participants, 'ConferenceStatusCallback': conference_status_callback, 'ConferenceStatusCallbackMethod': conference_status_callback_method, 'ConferenceStatusCallbackEvent': serialize.map(conference_status_callback_event, lambda e: e), 'ConferenceRecord': conference_record, 'ConferenceTrim': conference_trim, 'RecordingChannels': recording_channels, 'RecordingStatusCallback': recording_status_callback, 'RecordingStatusCallbackMethod': recording_status_callback_method, 'ConferenceRecordingStatusCallback': conference_recording_status_callback, 'ConferenceRecordingStatusCallbackMethod': conference_recording_status_callback_method, 'Region': region, 'SipAuthUsername': sip_auth_username, 'SipAuthPassword': sip_auth_password, 'DequeueStatusCallbackEvent': serialize.map(dequeue_status_callback_event, lambda e: e), 'PostWorkActivitySid': post_work_activity_sid, 'SupervisorMode': supervisor_mode, 'Supervisor': supervisor, 'EndConferenceOnCustomerExit': end_conference_on_customer_exit, 'BeepOnCustomerEntrance': beep_on_customer_entrance, }) payload = self._version.update( 'POST', self._uri, data=data, ) return ReservationInstance( self._version, payload, workspace_sid=self._solution['workspace_sid'], task_sid=self._solution['task_sid'], sid=self._solution['sid'], )
[ "def", "update", "(", "self", ",", "reservation_status", "=", "values", ".", "unset", ",", "worker_activity_sid", "=", "values", ".", "unset", ",", "instruction", "=", "values", ".", "unset", ",", "dequeue_post_work_activity_sid", "=", "values", ".", "unset", ...
62.09816
28.797546
def list_tag(self, limit=500, offset=0): """List `all` the tags for this Thing Returns lists of tags, as below #!python [ "mytag1", "mytag2" "ein_name", "nochein_name" ] - OR... Raises [IOTException](./Exceptions.m.html#IoticAgent.IOT.Exceptions.IOTException) containing the error if the infrastructure detects a problem Raises [LinkException](../Core/AmqpLink.m.html#IoticAgent.Core.AmqpLink.LinkException) if there is a communications problem between you and the infrastructure `limit` (optional) (integer) Return at most this many tags `offset` (optional) (integer) Return tags starting at this offset """ evt = self._client._request_entity_tag_list(self.__lid, limit=limit, offset=offset) self._client._wait_and_except_if_failed(evt) return evt.payload['tags']
[ "def", "list_tag", "(", "self", ",", "limit", "=", "500", ",", "offset", "=", "0", ")", ":", "evt", "=", "self", ".", "_client", ".", "_request_entity_tag_list", "(", "self", ".", "__lid", ",", "limit", "=", "limit", ",", "offset", "=", "offset", ")"...
32.689655
26.62069