Unnamed: 0
int64
0
389k
code
stringlengths
26
79.6k
docstring
stringlengths
1
46.9k
373,900
def iterlines(s): prevnl = -1 while True: nextnl = s.find(, prevnl + 1) if nextnl < 0: yield s[(prevnl+1):] break else: yield s[(prevnl+1):nextnl] prevnl = nextnl
A generator form of s.split('\n') for reducing memory overhead. Parameters ---------- s : str A multi-line string. Yields ------ line : str A string.
373,901
def byte_href_anchors(self, chars=False): input_buffer = self.clean_html if chars: input_buffer = input_buffer.decode() idx = 0 parts = input_buffer.split() assert len(.join(parts) ) == len(input_buffer) for part in parts: m = anchors_re.match(part) if not m: idx += len(part) + 4 continue before = m.group() ahref = m.group() idx += len(before) + len(ahref) first = idx length = len(m.group()) idx += length + 4 if chars: yield m.group().encode(), first, length, m.group().encode() else: yield m.group(), first, length, m.group() assert idx - 4 == len(input_buffer)
simple, regex-based extractor of anchor tags, so we can compute BYTE offsets for anchor texts and associate them with their href. Generates tuple(href_string, first_byte, byte_length, anchor_text)
373,902
def dens_in_meanmatterdens(vo,ro,H=70.,Om=0.3): return dens_in_criticaldens(vo,ro,H=H)/Om
NAME: dens_in_meanmatterdens PURPOSE: convert density to units of the mean matter density INPUT: vo - velocity unit in km/s ro - length unit in kpc H= (default: 70) Hubble constant in km/s/Mpc Om= (default: 0.3) Omega matter OUTPUT: conversion from units where vo=1. at ro=1. to units of the mean matter density HISTORY: 2014-01-28 - Written - Bovy (IAS)
373,903
def update_file_ext(filename, ext=, sep=): r path, filename = os.path.split(filename) if ext and ext[0] == sep: ext = ext[1:] return os.path.join(path, sep.join(filename.split(sep)[:-1 if filename.count(sep) > 1 else 1] + [ext]))
r"""Force the file or path str to end with the indicated extension Note: a dot (".") is assumed to delimit the extension >>> from __future__ import unicode_literals >>> update_file_ext('/home/hobs/extremofile', 'bac') '/home/hobs/extremofile.bac' >>> update_file_ext('/home/hobs/piano.file/', 'music') '/home/hobs/piano.file/.music' >>> update_file_ext('/home/ninja.hobs/Anglofile', '.uk') '/home/ninja.hobs/Anglofile.uk' >>> update_file_ext('/home/ninja-corsi/audio', 'file', sep='-') '/home/ninja-corsi/audio-file'
373,904
def set_bind(self): IntegerEntry.set_bind(self) self.bind(, lambda e: self.set(self.imin)) self.bind(, lambda e: self.set(self.imax))
Sets key bindings -- we need this more than once
373,905
def close_available(self): return { : self.volume_long - self.volume_long_frozen, : self.volume_short - self.volume_short_frozen }
可平仓数量 Returns: [type] -- [description]
373,906
def _create(self, tree): tablename = tree.table indexes = [] global_indexes = [] hash_key = None range_key = None attrs = {} for declaration in tree.attrs: name, type_ = declaration[:2] if len(declaration) > 2: index = declaration[2] else: index = None if index is not None: if index[0] == "HASH": field = hash_key = DynamoKey(name, data_type=TYPES[type_]) elif index[0] == "RANGE": field = range_key = DynamoKey(name, data_type=TYPES[type_]) else: index_type = index[0] kwargs = {} if index_type[0] in ("ALL", "INDEX"): factory = LocalIndex.all elif index_type[0] == "KEYS": factory = LocalIndex.keys elif index_type[0] == "INCLUDE": factory = LocalIndex.include kwargs["includes"] = [resolve(v) for v in index.include_vars] index_name = resolve(index[1]) field = DynamoKey(name, data_type=TYPES[type_]) idx = factory(index_name, field, **kwargs) indexes.append(idx) else: field = DynamoKey(name, data_type=TYPES[type_]) attrs[field.name] = field for gindex in tree.global_indexes: global_indexes.append(self._parse_global_index(gindex, attrs)) throughput = None if tree.throughput: throughput = Throughput(*map(resolve, tree.throughput)) try: ret = self.connection.create_table( tablename, hash_key, range_key, indexes=indexes, global_indexes=global_indexes, throughput=throughput, ) except DynamoDBError as e: if e.kwargs["Code"] == "ResourceInUseException" or tree.not_exists: return False raise return True
Run a SELECT statement
373,907
def orth_gs(order, dist, normed=False, sort="GR", cross_truncation=1., **kws): logger = logging.getLogger(__name__) dim = len(dist) if isinstance(order, int): if order == 0: return chaospy.poly.Poly(1, dim=dim) basis = chaospy.poly.basis( 0, order, dim, sort, cross_truncation=cross_truncation) else: basis = order basis = list(basis) polynomials = [basis[0]] if normed: for idx in range(1, len(basis)): for idy in range(idx): orth = chaospy.descriptives.E( basis[idx]*polynomials[idy], dist, **kws) basis[idx] = basis[idx] - polynomials[idy]*orth norms = chaospy.descriptives.E(polynomials[-1]**2, dist, **kws) if norms <= 0: logger.warning("Warning: Polynomial cutoff at term %d", idx) break basis[idx] = basis[idx] / numpy.sqrt(norms) polynomials.append(basis[idx]) else: norms = [1.] for idx in range(1, len(basis)): for idy in range(idx): orth = chaospy.descriptives.E( basis[idx]*polynomials[idy], dist, **kws) basis[idx] = basis[idx] - polynomials[idy] * orth / norms[idy] norms.append( chaospy.descriptives.E(polynomials[-1]**2, dist, **kws)) if norms[-1] <= 0: logger.warning("Warning: Polynomial cutoff at term %d", idx) break polynomials.append(basis[idx]) return chaospy.poly.Poly(polynomials, dim=dim, shape=(len(polynomials),))
Gram-Schmidt process for generating orthogonal polynomials. Args: order (int, Poly): The upper polynomial order. Alternative a custom polynomial basis can be used. dist (Dist): Weighting distribution(s) defining orthogonality. normed (bool): If True orthonormal polynomials will be used instead of monic. sort (str): Ordering argument passed to poly.basis. If custom basis is used, argument is ignored. cross_truncation (float): Use hyperbolic cross truncation scheme to reduce the number of terms in expansion. Returns: (Poly): The orthogonal polynomial expansion. Examples: >>> Z = chaospy.J(chaospy.Normal(), chaospy.Normal()) >>> print(chaospy.around(chaospy.orth_gs(2, Z), 4)) [1.0, q1, q0, q1^2-1.0, q0q1, q0^2-1.0]
373,908
def air_gap(self, volume=None, height=None): if not self.tip_attached: log.warning("Cannot perform air_gap without a tip attached.") if height is None: height = 5 do_publish(self.broker, commands.air_gap, self.air_gap, , self, None, self, volume, height) if volume != 0: location = self.previous_placeable.top(height) self, None, self, volume, height) return self
Pull air into the :any:`Pipette` current tip Notes ----- If no `location` is passed, the pipette will touch_tip from it's current position. Parameters ---------- volume : number The amount in uL to aspirate air into the tube. (Default will use all remaining volume in tip) height : number The number of millimiters to move above the current Placeable to perform and air-gap aspirate (Default will be 10mm above current Placeable) Returns ------- This instance of :class:`Pipette`. Examples -------- .. >>> from opentrons import instruments, robot # doctest: +SKIP >>> robot.reset() # doctest: +SKIP >>> p300 = instruments.P300_Single(mount='left') # doctest: +SKIP >>> p300.aspirate(50, plate[0]) # doctest: +SKIP >>> p300.air_gap(50) # doctest: +SKIP
373,909
def _get_question_map(self, question_id): if question_id.get_authority() == ASSESSMENT_AUTHORITY: key = match_value = ObjectId(question_id.get_identifier()) else: key = match_value = str(question_id) for question_map in self._my_map[]: if question_map[key] == match_value: return question_map raise errors.NotFound()
get question map from questions matching question_id This can make sense of both Section assigned Ids or normal Question/Item Ids
373,910
def writeProxy(self, obj): proxy = self.context.getProxyForObject(obj) self.writeObject(proxy, is_proxy=True)
Encodes a proxied object to the stream. @since: 0.6
373,911
def match_range(self, field, start=None, stop=None, inclusive=True, required=True, new_group=False): if start is None: start = "*" if stop is None: stop = "*" if start == "*" and stop == "*": return self.match_exists(field, required=required, new_group=new_group) if inclusive: value = "[" + str(start) + " TO " + str(stop) + "]" else: value = "{" + str(start) + " TO " + str(stop) + "}" return self.match_field(field, value, required=required, new_group=new_group)
Add a ``field:[some range]`` term to the query. Matches will have a ``value`` in the range in the ``field``. Arguments: field (str): The field to check for the value. The field must be namespaced according to Elasticsearch rules using the dot syntax. For example, ``"mdf.source_name"`` is the ``source_name`` field of the ``mdf`` dictionary. start (str or int): The starting value, or ``None`` for no lower bound. **Default:** ``None``. stop (str or int): The ending value, or ``None`` for no upper bound. **Default:** ``None``. inclusive (bool): If ``True``, the ``start`` and ``stop`` values will be included in the search. If ``False``, the start and stop values will not be included in the search. **Default:** ``True``. required (bool): If ``True``, will add term with ``AND``. If ``False``, will use ``OR``. **Default:** ``True``. new_group (bool): If ``True``, will separate the term into a new parenthetical group. If ``False``, will not. **Default:** ``False``. Returns: SearchHelper: Self
373,912
def url_request(target_url, output_file): request = urllib.request.urlopen(target_url) with open(output_file, ) as targets: total_length = int(request.headers.get()) with click.progressbar(length=total_length, label=) as bar: while True: data = request.read(4096) if not data: break targets.write(data) bar.update(len(data))
Use urllib to download the requested file from the target URL. Use the click progress bar to print download progress :param target_url: URL from which the file is to be downloaded :param output_file: Name and path of local copy of file
373,913
def bitmask(*args): mask = 0 for a in args: if type(a) is tuple: for b in range(a[1], a[0]+1): mask |= 1 << b elif type(a) is list: for b in a: mask |= 1 << b elif type(a) is int: mask |= 1 << a return mask
! @brief Returns a mask with specified bit ranges set. An integer mask is generated based on the bits and bit ranges specified by the arguments. Any number of arguments can be provided. Each argument may be either a 2-tuple of integers, a list of integers, or an individual integer. The result is the combination of masks produced by the arguments. - 2-tuple: The tuple is a bit range with the first element being the MSB and the second element the LSB. All bits from LSB up to and included MSB are set. - list: Each bit position specified by the list elements is set. - int: The specified bit position is set. @return An integer mask value computed from the logical OR'ing of masks generated by each argument. Example: @code >>> hex(bitmask((23,17),1)) 0xfe0002 >>> hex(bitmask([4,0,2],(31,24)) 0xff000015 @endcode
373,914
def add_product_version_to_build_configuration(id=None, name=None, product_version_id=None): data = remove_product_version_from_build_configuration_raw(id, name, product_version_id) if data: return utils.format_json_list(data)
Associate an existing ProductVersion with a BuildConfiguration
373,915
def make_published(self, request, queryset): cnt1 = queryset.filter( date_published__isnull=True, publish=False, ).update(date_published=timezone.now(), publish=True) cnt2 = queryset.filter( date_published__isnull=False, publish=False, ).update(publish=True) messages.add_message( request, messages.INFO, __(, , cnt1+cnt2) % { : cnt1+cnt2, })
Bulk action to mark selected posts as published. If the date_published field is empty the current time is saved as date_published. queryset must not be empty (ensured by DjangoCMS).
373,916
def css(self, *props, **kwprops): self._stable = False styles = {} if props: if len(props) == 1 and isinstance(props[0], Mapping): styles = props[0] else: raise WrongContentError(self, props, "Arguments not valid") elif kwprops: styles = kwprops else: raise WrongContentError(self, None, "args OR wkargs are needed") return self.attr(style=styles)
Adds css properties to this element.
373,917
def list_all_categories(cls, **kwargs): kwargs[] = True if kwargs.get(): return cls._list_all_categories_with_http_info(**kwargs) else: (data) = cls._list_all_categories_with_http_info(**kwargs) return data
List Categories Return a list of Categories This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async=True >>> thread = api.list_all_categories(async=True) >>> result = thread.get() :param async bool :param int page: page number :param int size: page size :param str sort: page order :return: page[Category] If the method is called asynchronously, returns the request thread.
373,918
def _scroll(clicks, x=None, y=None): startx, starty = _position() width, height = _size() if x is None: x = startx else: if x < 0: x = 0 elif x >= width: x = width - 1 if y is None: y = starty else: if y < 0: y = 0 elif y >= height: y = height - 1 try: _sendMouseEvent(MOUSEEVENTF_WHEEL, x, y, dwData=clicks) except (PermissionError, OSError): pass
Send the mouse vertical scroll event to Windows by calling the mouse_event() win32 function. Args: clicks (int): The amount of scrolling to do. A positive value is the mouse wheel moving forward (scrolling up), a negative value is backwards (down). x (int): The x position of the mouse event. y (int): The y position of the mouse event. Returns: None
373,919
def active_url(context, urls, css=None): request = context[] if request.get_full_path in (reverse(url) for url in urls.split()): return css if css else return
Highlight menu item based on url tag. Returns a css class if ``request.path`` is in given ``url``. :param url: Django url to be reversed. :param css: Css class to be returned for highlighting. Return active if none set.
373,920
def i4_sobol_generate(dim_num, n, skip=1): r = np.full((n, dim_num), np.nan) for j in range(n): seed = j + skip r[j, 0:dim_num], next_seed = i4_sobol(dim_num, seed) return r
i4_sobol_generate generates a Sobol dataset. Parameters: Input, integer dim_num, the spatial dimension. Input, integer N, the number of points to generate. Input, integer SKIP, the number of initial points to skip. Output, real R(M,N), the points.
373,921
def subsets_of_fileinfo_from_txt(filename): if not os.path.isfile(filename): raise ValueError("File " + filename + " not found!") with open(filename) as f: file_content = f.read().splitlines() dict_of_subsets_of_fileinfo = {} label = None sublist_of_fileinfo = [] idict = 0 ifiles = 0 nfiles = 0 sublist_finished = True for line in file_content: if len(line) > 0: if line[0] != : if label is None: if line[0] == "@": nfiles = int(line[1:].split()[0]) label = line[1:].split()[1] sublist_of_fileinfo = [] ifiles = 0 sublist_finished = False else: raise ValueError("Expected @ symbol not found!") else: if line[0] == "@": raise ValueError("Unexpected @ symbol found!") tmplist = line.split() tmpfile = tmplist[0] if len(tmplist) > 1: tmpinfo = tmplist[1:] else: tmpinfo = None if not os.path.isfile(tmpfile): raise ValueError("File " + tmpfile + " not found!") sublist_of_fileinfo.append(FileInfo(tmpfile, tmpinfo)) ifiles += 1 if ifiles == nfiles: dict_of_subsets_of_fileinfo[idict] = {} tmpdict = dict_of_subsets_of_fileinfo[idict] tmpdict[] = label tmpdict[] = sublist_of_fileinfo idict += 1 label = None sublist_of_fileinfo = [] ifiles = 0 sublist_finished = True if not sublist_finished: raise ValueError("Unexpected end of sublist of files.") return dict_of_subsets_of_fileinfo
Returns a dictionary with subsets of FileInfo instances from a TXT file. Each subset of files must be preceded by a line: @ <number> <label> where <number> indicates the number of files in that subset, and <label> is a label for that subset. Any additional text beyond <label> in the same line is ignored. Note that blank lines or lines starting by the hash symbol are also ignored. The name of the files comprising each subset will be obtained from the first contiguous character string in every line (thus, the rest of the line will be discarded). Parameters ---------- filename : string Name of a TXT file containing a list of FITS files grouped in different subsets by the @ symbol. Returns ------- dict_of_subsets_of_fileinfo : dictionary Dictionary containing as many entries as different subsets of files available. Each value of the dictionary is a dictionary with a label (sequential number starting at zero) and the list of FileInfo instances within subset.
373,922
def build(self, message): context = None if message.message_type in [Types.CALL_REQ, Types.CALL_RES]: self.verify_message(message) context = self.build_context(message) if message.flags == common.FlagsType.fragment: self.message_buffer[message.id] = context num = 0 for i, arg in enumerate(context.argstreams): if arg.state != StreamState.completed: num = i break self.close_argstream(context, num) return context elif message.message_type in [Types.CALL_REQ_CONTINUE, Types.CALL_RES_CONTINUE]: context = self.message_buffer.get(message.id) if context is None: raise FatalProtocolError( "missing call message after receiving continue message", message.id, ) dst = 0 for i, arg in enumerate(context.argstreams): if arg.state != StreamState.completed: dst = i break try: self.verify_message(message) except InvalidChecksumError as e: context.argstreams[dst].set_exception(e) raise src = 0 while src < len(message.args): context.argstreams[dst].write(message.args[src]) dst += 1 src += 1 if message.flags != FlagsType.fragment: assert (len(context.argstreams) == CallContinueMessage.max_args_num) self.message_buffer.pop(message.id, None) context.flags = FlagsType.none self.close_argstream(context, dst - 1) return None elif message.message_type == Types.ERROR: context = self.message_buffer.pop(message.id, None) if context is None: log.info(, message) return None else: error = TChannelError.from_code( message.code, description=message.description, tracing=context.tracing, ) context.set_exception(error) return error else: return message
buffer all the streaming messages based on the message id. Reconstruct all fragments together. :param message: incoming message :return: next complete message or None if streaming is not done
373,923
def tree_match(cls, field, string): s subscription key matches the string. {}:tree_{}:::'.join(pieces[0:i+1]) for i in range(len(pieces)) ) return sorted(map( lambda id: cls.get(id), map( debyte_string, ans ) ), key=lambda x:x.id)
Given a tree index, retrieves the ids atached to the given prefix, think of if as a mechanism for pattern suscription, where two models attached to the `a`, `a:b` respectively are found by the `a:b` string, because both model's subscription key matches the string.
373,924
def to_json(self): return { : self.user_id, : self.username, : self.roles, : self.auth_system }
Exports the object to a JSON friendly dict Returns: Dict representation of the object
373,925
def reserve_items(self, parent_item, *items): conn = redis.StrictRedis(connection_pool=self.pool) self._run_expiration(conn) script = conn.register_script() result = script(keys=[self._key_available(), self._key_workers(), self._key_reservations(parent_item)], args=([parent_item, self._get_worker_id(conn)] + list(items))) if result == -1: raise LostLease(parent_item) return result
Reserve a set of items until a parent item is returned. Prevent ``check_out_item()`` from returning any of ``items`` until ``parent_item`` is completed or times out. For each item, if it is not already checked out or reserved by some other parent item, it is associated with ``parent_item``, and the reservation will be released when ``parent_item`` completes or times out. Returns a list that is a subset of ``items`` for which we could get the reservation. Raises ``LostLease`` if this queue instance no longer owns ``parent_item``. If any of the items do not exist, they are silently ignored.
373,926
def to_json(self): endpoints = [] for endpoint in self.endpoints: list_fields = endpoint.fields resource_type = endpoint.Meta.resource_type table = endpoint.Meta.table data = endpoint.to_dict() data[] = resource_type.get_type_of_fields( list_fields, table, ) endpoints.append(data) data = { : self.title, : sorted(endpoints, key=lambda x: x[]), } return json.dumps(data)
Prepare data for the initial state of the admin-on-rest
373,927
def skip(cb, msg, attributes): if not all(a in msg for a in attributes): return True plot = get_cb_plot(cb) return (not getattr(plot, , False) or not hasattr(plot.current_frame, ))
Skips applying transforms if data is not geographic.
373,928
def logged_exception(self, e): if str(e) not in self._errors: self._errors.append(str(e)) self.set_error_state() self.buildstate.state.exception_type = str(e.__class__.__name__) self.buildstate.state.exception = str(e)
Record the exception, but don't log it; it's already been logged :param e: Exception to log.
373,929
def project_update(object_id, input_params={}, always_retry=True, **kwargs): return DXHTTPRequest( % object_id, input_params, always_retry=always_retry, **kwargs)
Invokes the /project-xxxx/update API method. For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Projects#API-method%3A-%2Fproject-xxxx%2Fupdate
373,930
def guest_create_network_interface(self, userid, os_version, guest_networks, active=False): if len(guest_networks) == 0: errmsg = ("API guest_create_network_interface: " "Network information is required but not provided") raise exception.SDKInvalidInputFormat(msg=errmsg) for network in guest_networks: vdev = nic_id = mac_addr = ip_addr = OSA = None if in network.keys(): vdev = network[] if in network.keys(): OSA = network[] if in network.keys(): nic_id = network[] if (( in network.keys()) and (network[] is not None)): mac_addr = network[] if not zvmutils.valid_mac_addr(mac_addr): errmsg = ("API guest_create_network_interface: " "Invalid mac address, format should be " "xx:xx:xx:xx:xx:xx, and x is a hexadecimal " "digit") raise exception.SDKInvalidInputFormat(msg=errmsg) if (( in network.keys()) and (network[] is not None)): ip_addr = network[] if not netaddr.valid_ipv4(ip_addr): errmsg = ("API guest_create_network_interface: " "Invalid management IP address, it should be " "the value between 0.0.0.0 and 255.255.255.255") raise exception.SDKInvalidInputFormat(msg=errmsg) if (( in network.keys()) and (network[] is not None)): if not isinstance(network[], list): raise exception.SDKInvalidInputTypes( , str(list), str(type(network[]))) for dns in network[]: if not netaddr.valid_ipv4(dns): errmsg = ("API guest_create_network_interface: " "Invalid dns IP address, it should be the " "value between 0.0.0.0 and 255.255.255.255") raise exception.SDKInvalidInputFormat(msg=errmsg) if (( in network.keys()) and (network[] is not None)): if not netaddr.valid_ipv4( network[]): errmsg = ("API guest_create_network_interface: " "Invalid gateway IP address, it should be " "the value between 0.0.0.0 and 255.255.255.255") raise exception.SDKInvalidInputFormat(msg=errmsg) if (( in network.keys()) and (network[] is not None)): if not zvmutils.valid_cidr(network[]): errmsg = ("API guest_create_network_interface: " "Invalid CIDR, format should be a.b.c.d/n, and " "a.b.c.d is IP address, n is the value " "between 0-32") raise exception.SDKInvalidInputFormat(msg=errmsg) try: if OSA is None: used_vdev = self._networkops.create_nic(userid, vdev=vdev, nic_id=nic_id, mac_addr=mac_addr, active=active) else: used_vdev = self._networkops.dedicate_OSA(userid, OSA, vdev=vdev, active=active) network[] = used_vdev except exception.SDKBaseException: LOG.error(() % userid) raise try: self._networkops.network_configuration(userid, os_version, guest_networks, active=active) except exception.SDKBaseException: LOG.error(() % userid) raise return guest_networks
Create network interface(s) for the guest inux system. It will create the nic for the guest, add NICDEF record into the user direct. It will also construct network interface configuration files and punch the files to the guest. These files will take effect when initializing and configure guest. :param str userid: the user id of the guest :param str os_version: operating system version of the guest :param list guest_networks: a list of network info for the guest. It has one dictionary that contain some of the below keys for each network, the format is: {'ip_addr': (str) IP address or None, 'dns_addr': (list) dns addresses or None, 'gateway_addr': (str) gateway address or None, 'cidr': (str) cidr format, 'nic_vdev': (str)nic VDEV, 1- to 4- hexadecimal digits or None, 'nic_id': (str) nic identifier or None, 'mac_addr': (str) mac address or None, it is only be used when changing the guest's user direct. Format should be xx:xx:xx:xx:xx:xx, and x is a hexadecimal digit 'osa_device': (str) OSA address or None} Example for guest_networks: [{'ip_addr': '192.168.95.10', 'dns_addr': ['9.0.2.1', '9.0.3.1'], 'gateway_addr': '192.168.95.1', 'cidr': "192.168.95.0/24", 'nic_vdev': '1000', 'mac_addr': '02:00:00:12:34:56'}, {'ip_addr': '192.168.96.10', 'dns_addr': ['9.0.2.1', '9.0.3.1'], 'gateway_addr': '192.168.96.1', 'cidr': "192.168.96.0/24", 'nic_vdev': '1003}] :param bool active: whether add a nic on active guest system :returns: guest_networks list, including nic_vdev for each network :rtype: list
373,931
def extract(self, msg): def normal(key): v = msg.get(key) if v is None: return v normalizer = self.normalizers.get(key, lambda x: x) return normalizer(v) def odict(keys): return collections.OrderedDict((k, normal(k)) for k in keys) def match(m): return (msg.get(k) in v for k, v in m.items()) if m else () accept = all(match(self.accept)) reject = any(match(self.reject)) if reject or not accept: keys = () elif self.keys_by_type is None: keys = [k for k in msg.keys() if k not in self.omit] else: keys = self.keys_by_type.get(msg.get()) return odict(keys)
Yield an ordered dictionary if msg['type'] is in keys_by_type.
373,932
def get_collection(self, lang=None, task=None): if lang: id = "{}{}".format(Downloader.LANG_PREFIX, lang) elif task: id = "{}{}".format(Downloader.TASK_PREFIX, task) else: raise ValueError("You should pass either the task or the lang") try: return self.info(id) except ValueError as e: if lang: raise LanguageNotSupported("Language {} is not supported".format(id)) if task: raise TaskNotSupported("Task {} is not supported".format(id))
Return the collection that represents a specific language or task. Args: lang (string): Language code. task (string): Task name.
373,933
def delete_variable(self, key): key = str(key) if self.is_locked(key): raise RuntimeError("Global variable is locked") with self.__global_lock: if key in self.__global_variable_dictionary: access_key = self.lock_variable(key, block=True) del self.__global_variable_dictionary[key] self.unlock_variable(key, access_key) del self.__variable_locks[key] del self.__variable_references[key] else: raise AttributeError("Global variable %s does not exist!" % str(key)) logger.debug("Global variable %s was deleted!" % str(key))
Deletes a global variable :param key: the key of the global variable to be deleted :raises exceptions.AttributeError: if the global variable does not exist
373,934
def _cmp_by_router_id(local_asn, path1, path2): def get_asn(path_source): if path_source is None: return local_asn else: return path_source.remote_as def get_router_id(path, local_bgp_id): path_source = path.source if path_source is None: return local_bgp_id else: originator_id = path.get_pattr(BGP_ATTR_TYPE_ORIGINATOR_ID) if originator_id: return originator_id.value return path_source.protocol.recv_open_msg.bgp_identifier path_source1 = path1.source path_source2 = path2.source if path_source1 is None and path_source2 is None: return None asn1 = get_asn(path_source1) asn2 = get_asn(path_source2) is_ebgp1 = asn1 != local_asn is_ebgp2 = asn2 != local_asn if is_ebgp1 and is_ebgp2: return None if ((is_ebgp1 is True and is_ebgp2 is False) or (is_ebgp1 is False and is_ebgp2 is True)): raise ValueError( ) if path_source1 is not None: local_bgp_id = path_source1.protocol.sent_open_msg.bgp_identifier else: local_bgp_id = path_source2.protocol.sent_open_msg.bgp_identifier router_id1 = get_router_id(path1, local_bgp_id) router_id2 = get_router_id(path2, local_bgp_id) if router_id1 == router_id2: return None from ryu.services.protocols.bgp.utils.bgp import from_inet_ptoi if from_inet_ptoi(router_id1) < from_inet_ptoi(router_id2): return path1 else: return path2
Select the route received from the peer with the lowest BGP router ID. If both paths are eBGP paths, then we do not do any tie breaking, i.e we do not pick best-path based on this criteria. RFC: http://tools.ietf.org/html/rfc5004 We pick best path between two iBGP paths as usual.
373,935
def discord_to_users(self, memberlist): _memberlist = self.get_discord_user(x.id for x in memberlist) return list(set(x.owner() for x in _memberlist))
expects a list of discord.py user objects returns a list of TrainerDex.py user objects
373,936
def get_iam_policy(self, client=None): client = self._require_client(client) query_params = {} if self.user_project is not None: query_params["userProject"] = self.user_project info = client._connection.api_request( method="GET", path="%s/iam" % (self.path,), query_params=query_params, _target_object=None, ) return Policy.from_api_repr(info)
Retrieve the IAM policy for the bucket. See https://cloud.google.com/storage/docs/json_api/v1/buckets/getIamPolicy If :attr:`user_project` is set, bills the API request to that project. :type client: :class:`~google.cloud.storage.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back to the ``client`` stored on the current bucket. :rtype: :class:`google.api_core.iam.Policy` :returns: the policy instance, based on the resource returned from the ``getIamPolicy`` API request.
373,937
def write(filepath, data, **kwargs): if filepath.lower().endswith(): return _write_csv(filepath, data, kwargs) elif filepath.lower().endswith(): return _write_json(filepath, data, kwargs) elif filepath.lower().endswith(): return _write_jsonl(filepath, data, kwargs) elif filepath.lower().endswith(): return _write_pickle(filepath, data, kwargs) elif (filepath.lower().endswith() or filepath.lower().endswith()): raise NotImplementedError( ) elif (filepath.lower().endswith() or filepath.lower().endswith()): raise NotImplementedError( ) else: raise NotImplementedError({}\.format(filepath))
Write a file. Supported formats: * CSV * JSON, JSONL * pickle Parameters ---------- filepath : str Path to the file that should be read. This methods action depends mainly on the file extension. data : dict or list Content that should be written kwargs : dict Any keywords for the specific file format. Returns ------- data : str or bytes
373,938
def xmatch_kdtree(kdtree, extra, extdecl, xmatchdistdeg, closestonly=True): external ext_cosdecl = np.cos(np.radians(extdecl)) ext_sindecl = np.sin(np.radians(extdecl)) ext_cosra = np.cos(np.radians(extra)) ext_sinra = np.sin(np.radians(extra)) ext_xyz = np.column_stack((ext_cosra*ext_cosdecl, ext_sinra*ext_cosdecl, ext_sindecl)) ext_xyzdist = 2.0 * np.sin(np.radians(xmatchdistdeg)/2.0) our_kdt = kdtree ext_kdt = sps.cKDTree(ext_xyz) extkd_matchinds = our_kdt.query_ball_tree(ext_kdt, ext_xyzdist) ext_matchinds = [] kdt_matchinds = [] for extind, mind in enumerate(extkd_matchinds): if len(mind) > 0: kdt_matchinds.append(extind) if closestonly: ext_matchinds.append(mind[0]) else: ext_matchinds.append(mind) return kdt_matchinds, ext_matchinds
This cross-matches between `kdtree` and (`extra`, `extdecl`) arrays. Returns the indices of the kdtree and the indices of extra, extdecl that xmatch successfully. Parameters ---------- kdtree : scipy.spatial.CKDTree This is a kdtree object generated by the `make_kdtree` function. extra,extdecl : array-like These are np.arrays of 'external' coordinates in decimal degrees that will be cross-matched against the objects in `kdtree`. xmatchdistdeg : float The match radius to use for the cross-match in decimal degrees. closestonly : bool If closestonly is True, then this function returns only the closest matching indices in (extra, extdecl) for each object in kdtree if there are any matches. Otherwise, it returns a list of indices in (extra, extdecl) for all matches within xmatchdistdeg between kdtree and (extra, extdecl). Returns ------- tuple of lists Returns a tuple of the form:: (list of `kdtree` indices matching to external objects, list of all `extra`/`extdecl` indices that match to each element in `kdtree` within the specified cross-match distance)
373,939
def redirect_n_times(n): assert n > 0 absolute = request.args.get("absolute", "false").lower() == "true" if n == 1: return redirect(url_for("view_get", _external=absolute)) if absolute: return _redirect("absolute", n, True) else: return _redirect("relative", n, False)
302 Redirects n times. --- tags: - Redirects parameters: - in: path name: n type: int produces: - text/html responses: 302: description: A redirection.
373,940
def validate_windows_cred_winexe(host, username=, password=None, retries=10, retry_delay=1): cmd = "winexe -U //{2} \"hostname\"".format( username, password, host ) logging_cmd = "winexe -U //{1} \"hostname\"".format( username, host ) for i in range(retries): ret_code = win_cmd( cmd, logging_command=logging_cmd ) return ret_code == 0
Check if the windows credentials are valid
373,941
def add_bookmark(self, time): if self.annot is None: msg = lg.debug(msg) error_dialog = QErrorMessage() error_dialog.setWindowTitle() error_dialog.showMessage(msg) error_dialog.exec() return answer = QInputDialog.getText(self, , s nameAdded Bookmark at ' + str(time)) self.update_annotations()
Run this function when user adds a new bookmark. Parameters ---------- time : tuple of float start and end of the new bookmark, in s
373,942
def _set_show_mpls_ldp_path(self, v, load=False): if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=show_mpls_ldp_path.show_mpls_ldp_path, is_leaf=True, yang_name="show-mpls-ldp-path", rest_name="show-mpls-ldp-path", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions={u: {u: u, u: u}}, namespace=, defining_module=, yang_type=, is_config=True) except (TypeError, ValueError): raise ValueError({ : , : "rpc", : , }) self.__show_mpls_ldp_path = t if hasattr(self, ): self._set()
Setter method for show_mpls_ldp_path, mapped from YANG variable /brocade_mpls_rpc/show_mpls_ldp_path (rpc) If this variable is read-only (config: false) in the source YANG file, then _set_show_mpls_ldp_path is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_show_mpls_ldp_path() directly.
373,943
def make_orthographic_map(central_longitude=0, central_latitude=0, figsize=(8, 8), add_land=True, land_color=, add_ocean=False, ocean_color=, grid_lines=True, lat_grid=[-80., -60., -30., 0., 30., 60., 80.], lon_grid=[-180., -150., -120., -90., -60., -30., 0., 30., 60., 90., 120., 150., 180.]): tanlightblue if not has_cartopy: print() return fig = plt.figure(figsize=figsize) map_projection = ccrs.Orthographic( central_longitude=central_longitude, central_latitude=central_latitude) ax = plt.axes(projection=map_projection) ax.set_global() if add_ocean == True: ax.add_feature(cartopy.feature.OCEAN, zorder=0, facecolor=ocean_color) if add_land == True: ax.add_feature(cartopy.feature.LAND, zorder=0, facecolor=land_color, edgecolor=) if grid_lines == True: ax.gridlines(xlocs=lon_grid, ylocs=lat_grid, linewidth=1, color=, linestyle=) return ax
Function creates and returns an orthographic map projection using cartopy Example ------- >>> map_axis = make_orthographic_map(central_longitude=200,central_latitude=30) Optional Parameters ----------- central_longitude : central longitude of projection (default is 0) central_latitude : central latitude of projection (default is 0) figsize : size of the figure (default is 8x8) add_land : chose whether land is plotted on map (default is true) land_color : specify land color (default is 'tan') add_ocean : chose whether land is plotted on map (default is False, change to True to plot) ocean_color : specify ocean color (default is 'lightblue') grid_lines : chose whether gird lines are plotted on map (default is true) lat_grid : specify the latitude grid (default is 30 degree spacing) lon_grid : specify the longitude grid (default is 30 degree spacing)
373,944
def process_track(self, track, frame_size=400, hop_size=160, sr=None, start=0, end=float(), utterance=None, corpus=None): frame_settings = units.FrameSettings(frame_size, hop_size) if end != float(): samples = track.read_samples(sr=sr, offset=start, duration=end-start) else: samples = track.read_samples(sr=sr, offset=start) if sr is None: sr = track.sampling_rate if samples.size <= 0: raise ValueError(.format(track.idx)) num_frames = frame_settings.num_frames(samples.size) num_pad_samples = (num_frames - 1) * hop_size + frame_size if num_pad_samples > samples.size: samples = np.pad(samples, (0, num_pad_samples - samples.size), mode=, constant_values=0) sampling_rate = sr or utterance.sampling_rate frames = librosa.util.frame(samples, frame_length=frame_size, hop_length=hop_size).T return self.process_frames(frames, sampling_rate, 0, last=True, utterance=utterance, corpus=corpus)
Process the track in **offline** mode, in one go. Args: track (Track): The track to process. frame_size (int): The number of samples per frame. hop_size (int): The number of samples between two frames. sr (int): Use the given sampling rate. If ``None``, uses the native sampling rate from the underlying data. start (float): The point within the track in seconds, to start processing from. end (float): The point within the track in seconds, to end processing. utterance (Utterance): The utterance that is associated with this track, if available. corpus (Corpus): The corpus this track is part of, if available. Returns: np.ndarray: The processed features.
373,945
def delete_file(f): fp = f.get_fullpath() log.info("Deleting file %s", fp) os.remove(fp)
Delete the given file :param f: the file to delete :type f: :class:`JB_File` :returns: None :rtype: None :raises: :class:`OSError`
373,946
def get_names_in_namespace_page(namespace_id, offset, count, proxy=None, hostport=None): assert proxy or hostport, if proxy is None: proxy = connect_hostport(hostport) assert count <= 100, .format(count) names_schema = { : , : { : { : , : { : , : True }, }, }, : [ , ], } schema = json_response_schema( names_schema ) resp = {} try: resp = proxy.get_names_in_namespace(namespace_id, offset, count) resp = json_validate(schema, resp) if json_is_error(resp): return resp valid_names = [] for n in resp[]: if not is_name_valid(str(n)): log.error(.format(str(n))) else: valid_names.append(n) return valid_names except ValidationError as e: if BLOCKSTACK_DEBUG: log.exception(e) resp = {: , : 502} return resp except socket.timeout: log.error("Connection timed out") resp = {: , : 503} return resp except socket.error as se: log.error("Connection error {}".format(se.errno)) resp = {: , : 502} return resp except Exception as ee: if BLOCKSTACK_DEBUG: log.exception(ee) log.error("Caught exception while connecting to Blockstack node: {}".format(ee)) resp = {: , : 500} return resp
Get a page of names in a namespace Returns the list of names on success Returns {'error': ...} on error
373,947
def _from_binary_stdinfo(cls, binary_stream): if len(binary_stream) == cls._REPR.size: t_created, t_changed, t_mft_changed, t_accessed, flags, m_ver, ver, \ c_id, o_id, s_id, quota_charged, usn = cls._REPR.unpack(binary_stream) nw_obj = cls( ( Timestamps((convert_filetime(t_created), convert_filetime(t_changed), convert_filetime(t_mft_changed), convert_filetime(t_accessed)) ), FileInfoFlags(flags), m_ver, ver, c_id, o_id, s_id, quota_charged, usn)) else: t_created, t_changed, t_mft_changed, t_accessed, flags, m_ver, ver, \ c_id = cls._REPR_NO_NFTS_3_EXTENSION.unpack(binary_stream) nw_obj = cls( ( Timestamps((convert_filetime(t_created), convert_filetime(t_changed), convert_filetime(t_mft_changed), convert_filetime(t_accessed)) ), FileInfoFlags(flags), m_ver, ver, c_id, None, None, None, None)) _MOD_LOGGER.debug("Attempted to unpack STANDARD_INFORMATION from \"%s\"\nResult: %s", binary_stream.tobytes(), nw_obj) return nw_obj
See base class.
373,948
def getJob(self, jobID): url = self._url + "/jobs/%s" % (jobID) return GPJob(url=url, securityHandler=self._securityHandler, proxy_port=self._proxy_port, proxy_url=self._proxy_url)
returns the results or status of a job
373,949
def find_n75(contig_lengths_dict, genome_length_dict): n75_dict = dict() for file_name, contig_lengths in contig_lengths_dict.items(): currentlength = 0 for contig_length in contig_lengths: currentlength += contig_length if currentlength >= genome_length_dict[file_name] * 0.75: n75_dict[file_name] = contig_length break return n75_dict
Calculate the N75 for each strain. N75 is defined as the largest contig such that at least 3/4 of the total genome size is contained in contigs equal to or larger than this contig :param contig_lengths_dict: dictionary of strain name: reverse-sorted list of all contig lengths :param genome_length_dict: dictionary of strain name: total genome length :return: n75_dict: dictionary of strain name: N75
373,950
def http_error_default(self, url, fp, errcode, errmsg, headers): return addinfourl(fp, headers, "http:" + url, errcode)
Default error handling -- don't raise an exception.
373,951
def _query_init(k, oracle, query, method=): if method == : a = np.subtract(query, [oracle.f_array[t] for t in oracle.latent[oracle.data[k]]]) dvec = (a * a).sum(axis=1) _d = dvec.argmin() return oracle.latent[oracle.data[k]][_d], dvec[_d] else: a = np.subtract(query, oracle.f_array[k]) dvec = (a * a).sum() return k, dvec
A helper function for query-matching function initialization.
373,952
def remove_api_key_from_groups(self, api_key, body, **kwargs): kwargs[] = True if kwargs.get(): return self.remove_api_key_from_groups_with_http_info(api_key, body, **kwargs) else: (data) = self.remove_api_key_from_groups_with_http_info(api_key, body, **kwargs) return data
Remove API key from groups. # noqa: E501 An endpoint for removing API key from groups. **Example usage:** `curl -X DELETE https://api.us-east-1.mbedcloud.com/v3/api-keys/{apikey-id}/groups -d '[0162056a9a1586f30242590700000000,0117056a9a1586f30242590700000000]' -H 'content-type: application/json' -H 'Authorization: Bearer API_KEY'` # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass asynchronous=True >>> thread = api.remove_api_key_from_groups(api_key, body, asynchronous=True) >>> result = thread.get() :param asynchronous bool :param str api_key: The ID of the API key to be removed from the group. (required) :param list[str] body: A list of IDs of the groups to be updated. (required) :return: UpdatedResponse If the method is called asynchronously, returns the request thread.
373,953
def log(**data): entry = { "module": data["params"]["module"], "event": data["params"]["event"], "timestamp": data["params"]["timestamp"], "arguments": data["params"]["arguments"] } history.create(entry)
RPC method for logging events Makes entry with new account creating Return None
373,954
def layers(self): from keras.layers import Input L = dict() for key in self.fields: L[key] = Input(name=key, shape=self.fields[key].shape, dtype=self.fields[key].dtype) return L
Construct Keras input layers for the given transformer Returns ------- layers : {field: keras.layers.Input} A dictionary of keras input layers, keyed by the corresponding field keys.
373,955
def _init_log(level=logging.DEBUG): log = logging.getLogger(__file__) log.setLevel(level) handler = logging.StreamHandler(sys.stdout) handler.setLevel(level) formatter = logging.Formatter(, ) handler.setFormatter(formatter) log.addHandler(handler) return log
Initialise the logging object. Args: level (int): Logging level. Returns: Logger: Python logging object.
373,956
def remove_feature_flag_accounts(self, feature, account_id): path = {} data = {} params = {} path["account_id"] = account_id path["feature"] = feature self.logger.debug("DELETE /api/v1/accounts/{account_id}/features/flags/{feature} with query params: {params} and form data: {data}".format(params=params, data=data, **path)) return self.generic_request("DELETE", "/api/v1/accounts/{account_id}/features/flags/{feature}".format(**path), data=data, params=params, single_item=True)
Remove feature flag. Remove feature flag for a given Account, Course, or User. (Note that the flag must be defined on the Account, Course, or User directly.) The object will then inherit the feature flags from a higher account, if any exist. If this flag was 'on' or 'off', then lower-level account flags that were masked by this one will apply again.
373,957
def parse(input, identifier: str = None, use_cache=False, clear_cache=True, pattern="*.qface", profile=EProfile.FULL): inputs = input if isinstance(input, (list, tuple)) else [input] logger.debug(.format(inputs)) identifier = if not identifier else identifier system = System() cache = None if use_cache: cache = shelve.open() if identifier in cache and clear_cache: del cache[identifier] if identifier in cache: system = cache[identifier] for input in inputs: path = Path.getcwd() / str(input) if path.isfile(): FileSystem.parse_document(path, system) else: for document in path.walkfiles(pattern): FileSystem.parse_document(document, system) if use_cache: cache[identifier] = system return system
Input can be either a file or directory or a list of files or directory. A directory will be parsed recursively. The function returns the resulting system. Stores the result of the run in the domain cache named after the identifier. :param path: directory to parse :param identifier: identifies the parse run. Used to name the cache :param clear_cache: clears the domain cache (defaults to true)
373,958
def is_draft_version(self): pub_ver = getattr(self, , None) latest_ver = getattr(self, , None) this_ver = getattr(self, , None) return this_ver and latest_ver and (this_ver == latest_ver) and (latest_ver != pub_ver)
Return if this version is the draft version of a layer
373,959
def display_image(self, reset=1): try: fb = self.server.controller.get_frame(self.frame) except KeyError: fb = self.server.controller.init_frame(self.frame) if not fb.height: width = fb.width height = int(len(fb.buffer) / width) fb.height = height if (len(fb.buffer) > 0) and (height > 0): self.server.controller.display(self.frame, width, height, True) else: self.server.controller.display(self.frame, fb.width, fb.height, False)
Utility routine used to display an updated frame from a framebuffer.
373,960
def get_gain(data, attr, class_attr, method=DEFAULT_DISCRETE_METRIC, only_sub=0, prefer_fewer_values=False, entropy_func=None): entropy_func = entropy_func or entropy val_freq = defaultdict(float) subset_entropy = 0.0 for record in data: val_freq[record.get(attr)] += 1.0 for val in val_freq.keys(): val_prob = val_freq[val] / sum(val_freq.values()) data_subset = [record for record in data if record.get(attr) == val] e = entropy_func(data_subset, class_attr, method=method) subset_entropy += val_prob * e if only_sub: return subset_entropy main_entropy = entropy_func(data, class_attr, method=method) if prefer_fewer_values: return ((main_entropy - subset_entropy), 1./len(val_freq)) else: return (main_entropy - subset_entropy)
Calculates the information gain (reduction in entropy) that would result by splitting the data on the chosen attribute (attr). Parameters: prefer_fewer_values := Weights the gain by the count of the attribute's unique values. If multiple attributes have the same gain, but one has slightly fewer attributes, this will cause the one with fewer attributes to be preferred.
373,961
def check_available(self): success = True start_time = datetime.datetime.utcnow() message = LOGGER.debug( % self.id) signals.post_save.disconnect(layer_post_save, sender=Layer) try: self.update_thumbnail() except ValueError, err: if str(err).startswith("unknown url type:"): LOGGER.debug( % str(err)) except Exception, err: message = str(err) success = False signals.post_save.connect(layer_post_save, sender=Layer) end_time = datetime.datetime.utcnow() delta = end_time - start_time response_time = % (delta.seconds, delta.microseconds) check = Check( content_object=self, success=success, response_time=response_time, message=message ) check.save() LOGGER.debug( % (response_time, success)) return success, message
Check for availability of a layer and provide run metrics.
373,962
def _handle_rundebug_from_shell(cmd_line): command, args = parse_shell_command(cmd_line) if len(args) >= 1: get_workbench().get_editor_notebook().save_all_named_editors() origcommand=command if command == "Ev3RemoteRun": command="Run" if command == "Ev3RemoteDebug": command="Debug" cmd = ToplevelCommand(command=command, filename=args[0], args=args[1:]) if origcommand == "Ev3RemoteRun" or origcommand == "Ev3RemoteDebug": cmd.environment={ "EV3MODE" : "remote", "EV3IP": get_workbench().get_option("ev3.ip") } if os.path.isabs(cmd.filename): cmd.full_filename = cmd.filename else: runner=get_runner() cmd.full_filename = os.path.join(runner.get_cwd(), cmd.filename) if command in ["Run", "run", "Debug", "debug"]: with tokenize.open(cmd.full_filename) as fp: cmd.source = fp.read() get_runner().send_command(cmd) else: print_error_in_backend("Command takes at least one argument".format(command))
Handles all commands that take a filename and 0 or more extra arguments. Passes the command to backend. (Debugger plugin may also use this method)
373,963
def delete(self): if self._new: raise Exception("This is a new object, %s not in data, \ indicating this entry isn't stored." % self.primaryKey) r.table(self.table).get(self._data[self.primaryKey]) \ .delete(durability=self.durability).run(self._conn) return True
Deletes the current instance. This assumes that we know what we're doing, and have a primary key in our data already. If this is a new instance, then we'll let the user know with an Exception
373,964
def load_metascenario(self, scenario_list): for scenario in scenario_list: name = scenario.get() if name is None: raise DataError("Scenario in scenario list is missing a name parameter", scenario=scenario) tile_address = scenario.get() args = scenario.get(, {}) dest = self if tile_address is not None: dest = self._tiles.get(tile_address) if dest is None: raise DataError("Attempted to load a scenario into a tile address that does not exist", address=tile_address, valid_addresses=list(self._tiles)) dest.load_scenario(name, **args)
Load one or more scenarios from a list. Each entry in scenario_list should be a dict containing at least a name key and an optional tile key and args key. If tile is present and its value is not None, the scenario specified will be loaded into the given tile only. Otherwise it will be loaded into the entire device. If the args key is specified is will be passed as keyword arguments to load_scenario. Args: scenario_list (list): A list of dicts for each scenario that should be loaded.
373,965
def _set_redist_bgp(self, v, load=False): if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=redist_bgp.redist_bgp, is_container=, presence=False, yang_name="redist-bgp", rest_name="redist-bgp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u: {u: u, u: None}}, namespace=, defining_module=, yang_type=, is_config=False) except (TypeError, ValueError): raise ValueError({ : , : "container", : , }) self.__redist_bgp = t if hasattr(self, ): self._set()
Setter method for redist_bgp, mapped from YANG variable /isis_state/router_isis_config/is_address_family_v4/redist_bgp (container) If this variable is read-only (config: false) in the source YANG file, then _set_redist_bgp is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_redist_bgp() directly. YANG Description: IS-IS redistribution config for BGP routes
373,966
def unorm(v1): v1 = stypes.toDoubleVector(v1) vout = stypes.emptyDoubleVector(3) vmag = ctypes.c_double() libspice.unorm_c(v1, vout, ctypes.byref(vmag)) return stypes.cVectorToPython(vout), vmag.value
Normalize a double precision 3-vector and return its magnitude. http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/unorm_c.html :param v1: Vector to be normalized. :type v1: 3-Element Array of floats :return: Unit vector of v1, Magnitude of v1. :rtype: tuple
373,967
def record_move_fields(rec, tag, field_positions_local, field_position_local=None): fields = record_delete_fields( rec, tag, field_positions_local=field_positions_local) return record_add_fields( rec, tag, fields, field_position_local=field_position_local)
Move some fields to the position specified by 'field_position_local'. :param rec: a record structure as returned by create_record() :param tag: the tag of the fields to be moved :param field_positions_local: the positions of the fields to move :param field_position_local: insert the field before that field_position_local. If unspecified, appends the fields :return: the field_position_local is the operation was successful
373,968
def nonparabolicity(self, **kwargs): Eg = self.Eg_Gamma(**kwargs) meff = self.meff_e_Gamma(**kwargs) T = kwargs.get(, 300.) return k*T/Eg * (1 - meff)**2
Returns the Kane band nonparabolicity parameter for the Gamma-valley.
373,969
def insert_instance_template(self, body, request_id=None, project_id=None): response = self.get_conn().instanceTemplates().insert( project=project_id, body=body, requestId=request_id ).execute(num_retries=self.num_retries) try: operation_name = response["name"] except KeyError: raise AirflowException( "Wrong response returned - it should contain " " field".format(response)) self._wait_for_operation_to_complete(project_id=project_id, operation_name=operation_name)
Inserts instance template using body specified Must be called with keyword arguments rather than positional. :param body: Instance template representation as object according to https://cloud.google.com/compute/docs/reference/rest/v1/instanceTemplates :type body: dict :param request_id: Optional, unique request_id that you might add to achieve full idempotence (for example when client call times out repeating the request with the same request id will not create a new instance template again) It should be in UUID format as defined in RFC 4122 :type request_id: str :param project_id: Optional, Google Cloud Platform project ID where the Compute Engine Instance exists. If set to None or missing, the default project_id from the GCP connection is used. :type project_id: str :return: None
373,970
def skullstrip_template(dset,template,prefix=None,suffix=None,dilate=0): if suffix==None: suffix = if prefix==None: prefix = nl.suffix(dset,suffix) if not os.path.exists(prefix): with nl.notify( % dset): dset = os.path.abspath(dset) template = os.path.abspath(template) tmp_dir = tempfile.mkdtemp() cwd = os.getcwd() with nl.run_in(tmp_dir): nl.affine_align(template,dset,skull_strip=None,cost=,opts=[,]) nl.run([,,,,,,,nl.suffix(template,),,dset,,nl.suffix(template,)],products=nl.suffix(template,)) info = nl.dset_info(nl.suffix(template,)) max_value = info.subbricks[0][] nl.calc([dset,nl.suffix(template,)],%max_value,prefix) shutil.move(prefix,cwd) shutil.rmtree(tmp_dir)
Takes the raw anatomy ``dset``, aligns it to a template brain, and applies a templated skullstrip. Should produce fairly reliable skullstrips as long as there is a decent amount of normal brain and the overall shape of the brain is normal-ish
373,971
def is_cell_separator(self, cursor=None, block=None): assert cursor is not None or block is not None if cursor is not None: cursor0 = QTextCursor(cursor) cursor0.select(QTextCursor.BlockUnderCursor) text = to_text_string(cursor0.selectedText()) else: text = to_text_string(block.text()) if self.cell_separators is None: return False else: return text.lstrip().startswith(self.cell_separators)
Return True if cursor (or text block) is on a block separator
373,972
def exec_workflow(self, model, record_id, signal): if tools.v(self.version)[0] >= 11: raise DeprecationWarning( u"Workflows have been removed in Odoo >= 11.0") self._check_logged_user() args_to_send = [self.env.db, self.env.uid, self._password, model, signal, record_id] data = self.json( , {: , : , : args_to_send}) return data.get()
Execute the workflow `signal` on the instance having the ID `record_id` of `model`. *Python 2:* :raise: :class:`odoorpc.error.RPCError` :raise: :class:`odoorpc.error.InternalError` (if not logged) :raise: `urllib2.URLError` (connection error) *Python 3:* :raise: :class:`odoorpc.error.RPCError` :raise: :class:`odoorpc.error.InternalError` (if not logged) :raise: `urllib.error.URLError` (connection error)
373,973
def set_row_min_height(self, y: int, min_height: int): if y < 0: raise IndexError() self._min_heights[y] = min_height
Sets a minimum height for blocks in the row with coordinate y.
373,974
def loadTopicPageFromFile(self, fname): assert os.path.exists(fname) f = open(fname, "r", encoding="utf-8") self.topicPage = json.load(f)
load topic page from an existing file
373,975
def to_xy_arrays(self, dtype=np.float32): from .. import dtypes as iadt return [iadt.restore_dtypes_(np.copy(ls.coords), dtype) for ls in self.line_strings]
Convert this object to an iterable of ``(M,2)`` arrays of points. This is the inverse of :func:`imgaug.augmentables.lines.LineStringsOnImage.from_xy_array`. Parameters ---------- dtype : numpy.dtype, optional Desired output datatype of the ndarray. Returns ------- list of ndarray The arrays of point coordinates, each given as ``(M,2)``.
373,976
def create_audit_event(self, code=): event = self._meta.event_model( code=code, model=self.__class__.__name__, ) if current_user: event.created_by = current_user.get_id() self.copy_foreign_keys(event) self.populate_audit_fields(event) return event
Creates a generic auditing Event logging the changes between saves and the initial data in creates. Kwargs: code (str): The code to set the new Event to. Returns: Event: A new event with relevant info inserted into it
373,977
def convert_bytes(n): symbols = (, , , , , , , ) prefix = {} for i, s in enumerate(symbols): prefix[s] = 1 << (i + 1) * 10 for s in reversed(symbols): if n >= prefix[s]: value = float(n) / prefix[s] return % (value, s) return "%sB" % n
Convert a size number to 'K', 'M', .etc
373,978
def dimension_range(lower, upper, hard_range, soft_range, padding=None, log=False): lower, upper = range_pad(lower, upper, padding, log) lower = max_range([(lower, None), (soft_range[0], None)])[0] upper = max_range([(None, upper), (None, soft_range[1])])[1] dmin, dmax = hard_range lower = lower if dmin is None or not isfinite(dmin) else dmin upper = upper if dmax is None or not isfinite(dmax) else dmax return lower, upper
Computes the range along a dimension by combining the data range with the Dimension soft_range and range.
373,979
def delete_files(self, selections) -> None: try: currentpath = self.currentpath for selection in selections: name = str(selection) if name == : continue if not name.endswith(): name += path = os.path.join(currentpath, name) os.remove(path) except BaseException: objecttools.augment_excmessage( f f)
Delete the network files corresponding to the given selections (e.g. a |list| of |str| objects or a |Selections| object).
373,980
def find_proxy_plugin(component, plugin_name): reg = ComponentRegistry() plugins = reg.load_extensions(, comp_filter=component, class_filter=TileBusProxyPlugin, product_name=) for _name, plugin in plugins: if plugin.__name__ == plugin_name: return plugin raise DataError("Could not find proxy plugin module in registered components or installed distributions", component=component, name=plugin_name)
Attempt to find a proxy plugin provided by a specific component Args: component (string): The name of the component that provides the plugin plugin_name (string): The name of the plugin to load Returns: TileBuxProxyPlugin: The plugin, if found, otherwise raises DataError
373,981
def makemigrations(application, merge=False, dry_run=False, empty=False, extra_applications=None): from django.core.management import call_command apps = [application] if extra_applications: if isinstance(extra_applications, text_type): apps += [extra_applications] elif isinstance(extra_applications, list): apps += extra_applications for app in apps: call_command(, *(app,), merge=merge, dry_run=dry_run, empty=empty)
Generate migrations
373,982
def get_prev_sibling_tags(mention): span = _to_span(mention) prev_sibling_tags = [] i = _get_node(span.sentence) while i.getprevious() is not None: prev_sibling_tags.insert(0, str(i.getprevious().tag)) i = i.getprevious() return prev_sibling_tags
Return the HTML tag of the Mention's previous siblings. Previous siblings are Mentions which are at the same level in the HTML tree as the given mention, but are declared before the given mention. If a candidate is passed in, only the previous siblings of its first Mention are considered in the calculation. :param mention: The Mention to evaluate :rtype: list of strings
373,983
def setup_icons(self, ): floppy_icon = get_icon(, asicon=True) self.release_pb.setIcon(floppy_icon)
Set all icons on buttons :returns: None :rtype: None :raises: None
373,984
def stop(self): self.button_toggle_text.set("Start Modis") self.state = "off" logger.info("Stopping Discord Modis") from ._client import client asyncio.run_coroutine_threadsafe(client.logout(), client.loop) self.status_bar.set_status(0)
Stop Modis and log it out of Discord.
373,985
def check_settings_for_differences(old, new, as_bool=False, as_tri=False): assert not as_bool or not as_tri old = old or {} new = new or {} changes = set(k for k in set(new.iterkeys()).intersection(old.iterkeys()) if new[k] != old[k]) if changes and as_bool: return True added_keys = set(new.iterkeys()).difference(old.iterkeys()) if added_keys and as_bool: return True if not as_tri: changes.update(added_keys) deled_keys = set(old.iterkeys()).difference(new.iterkeys()) if deled_keys and as_bool: return True if as_bool: return False if not as_tri: changes.update(deled_keys) if as_tri: return added_keys, changes, deled_keys return changes
Returns a subset of the env dictionary keys that differ, either being added, deleted or changed between old and new.
373,986
def register_iq_response_future(self, from_, id_, fut): self._iq_response_map.add_listener( (from_, id_), StanzaErrorAwareListener( callbacks.FutureListener(fut) ) ) self._logger.debug("iq response future registered: from=%r, id=%r", from_, id_)
Register a future `fut` for an IQ stanza with type ``result`` or ``error`` from the :class:`~aioxmpp.JID` `from_` with the id `id_`. If the type of the IQ stanza is ``result``, the stanza is set as result to the future. If the type of the IQ stanza is ``error``, the stanzas error field is converted to an exception and set as the exception of the future. The future might also receive different exceptions: * :class:`.errors.ErroneousStanza`, if the response stanza received could not be parsed. Note that this exception is not emitted if the ``from`` address of the stanza is unset, because the code cannot determine whether a sender deliberately used an erroneous address to make parsing fail or no sender address was used. In the former case, an attacker could use that to inject a stanza which would be taken as a stanza from the peer server. Thus, the future will never be fulfilled in these cases. Also note that this exception does not derive from :class:`.errors.XMPPError`, as it cannot provide the same attributes. Instead, it dervies from :class:`.errors.StanzaError`, from which :class:`.errors.XMPPError` also derives; to catch all possible stanza errors, catching :class:`.errors.StanzaError` is sufficient and future-proof. * :class:`ConnectionError` if the stream is :meth:`stop`\\ -ped (only if SM is not enabled) or :meth:`close`\\ -ed. * Any :class:`Exception` which may be raised from :meth:`~.protocol.XMLStream.send_xso`, which are generally also :class:`ConnectionError` or at least :class:`OSError` subclasses.
373,987
def share(self, plotters, keys=None, draw=None, auto_update=False): auto_update = auto_update or not self.no_auto_update if isinstance(plotters, Plotter): plotters = [plotters] keys = self._set_sharing_keys(keys) for plotter in plotters: for key in keys: fmto = self._shared.get(key, getattr(self, key)) if not getattr(plotter, key) == fmto: plotter._shared[key] = getattr(self, key) fmto.shared.add(getattr(plotter, key)) if self._initialized: self.update(force=keys, auto_update=auto_update, draw=draw) for plotter in plotters: if not plotter._initialized: continue old_registered = plotter._registered_updates.copy() plotter._registered_updates.clear() try: plotter.update(force=keys, auto_update=auto_update, draw=draw) except: raise finally: plotter._registered_updates.clear() plotter._registered_updates.update(old_registered) if draw is None: draw = rcParams[] if draw: self.draw() if rcParams[]: self.show()
Share the formatoptions of this plotter with others This method shares the formatoptions of this :class:`Plotter` instance with others to make sure that, if the formatoption of this changes, those of the others change as well Parameters ---------- plotters: list of :class:`Plotter` instances or a :class:`Plotter` The plotters to share the formatoptions with keys: string or iterable of strings The formatoptions to share, or group names of formatoptions to share all formatoptions of that group (see the :attr:`fmt_groups` property). If None, all formatoptions of this plotter are unshared. %(InteractiveBase.start_update.parameters.draw)s %(InteractiveBase.update.parameters.auto_update)s See Also -------- unshare, unshare_me
373,988
def _internal_declare_key_flags(flag_names, flag_values=FLAGS, key_flag_values=None): key_flag_values = key_flag_values or flag_values module = _helpers.GetCallingModule() for flag_name in flag_names: flag = flag_values.GetFlag(flag_name) key_flag_values._RegisterKeyFlagForModule(module, flag)
Declares a flag as key for the calling module. Internal function. User code should call DECLARE_key_flag or ADOPT_module_key_flags instead. Args: flag_names: A list of strings that are names of already-registered Flag objects. flag_values: A FlagValues object that the flags listed in flag_names have registered with (the value of the flag_values argument from the DEFINE_* calls that defined those flags). This should almost never need to be overridden. key_flag_values: A FlagValues object that (among possibly many other things) keeps track of the key flags for each module. Default None means "same as flag_values". This should almost never need to be overridden. Raises: UnrecognizedFlagError: when we refer to a flag that was not defined yet.
373,989
def _storage_get_key_names(bucket, pattern): return [item.metadata.name for item in _storage_get_keys(bucket, pattern)]
Get names of all storage keys in a specified bucket that match a pattern.
373,990
def _create_matrix(self, document, dictionary): sentences = document.sentences words_count = len(dictionary) sentences_count = len(sentences) if words_count < sentences_count: message = ( "Number of words (%d) is lower than number of sentences (%d). " "LSA algorithm may not work properly." ) warn(message % (words_count, sentences_count)) matrix = numpy.zeros((words_count, sentences_count)) for col, sentence in enumerate(sentences): for word in map(self.stem_word, sentence.words): if word in dictionary: row = dictionary[word] matrix[row, col] += 1 return matrix
Creates matrix of shape |unique words|×|sentences| where cells contains number of occurences of words (rows) in senteces (cols).
373,991
def prepare_params(self): if self.options.resolve_fragment: self.fragment_name = self.node.fragment_name.resolve(self.context) else: self.fragment_name = str(self.node.fragment_name) for char in \"Number of quotes around the fragment name is incoherent') self.expire_time = self.get_expire_time() if self.options.versioning: self.version = force_bytes(self.get_version()) self.vary_on = [template.Variable(var).resolve(self.context) for var in self.node.vary_on]
Prepare the parameters passed to the templatetag
373,992
def request(self, path, data=None, headers=None, method=None): if isinstance(data, str): data = data.encode() response = urlopen(self._request(path, data=data, headers=headers, method=method)) self._set_session_cookie(response) return response
Performs a HTTP request to the Go server Args: path (str): The full path on the Go server to request. This includes any query string attributes. data (str, dict, bool, optional): If any data is present this request will become a POST request. headers (dict, optional): Headers to set for this particular request Raises: HTTPError: when the HTTP request fails. Returns: file like object: The response from a :func:`urllib2.urlopen` call
373,993
def post_event_discounts(self, id, **data): return self.post("/events/{0}/discounts/".format(id), data=data)
POST /events/:id/discounts/ Creates a new discount; returns the result as a :format:`discount` as the key ``discount``.
373,994
def release(self, conn): self._pool_lock.acquire() self._pool.put(ConnectionWrapper(self._pool, conn)) self._current_acquired -= 1 self._pool_lock.release()
Release a previously acquired connection. The connection is put back into the pool.
373,995
def getenv(option, default=undefined, cast=undefined): else: value = cast(value) return value
Return the value for option or default if defined.
373,996
def render(self, name, value, attrs=None): year, month, day = , , if value: match = W3C_DATE_RE.match(value) if match: date_parts = match.groupdict() year = date_parts[] month = date_parts[] day = date_parts[] year_html = self.create_textinput(name, self.year_field, year, size=4, title=, onClick=) month_html = self.create_textinput(name, self.month_field, month, size=2, title=, onClick=) day_html = self.create_textinput(name, self.day_field, day, size=2, title=, onClick=) output = [year_html, month_html, day_html] return mark_safe(u.join(output))
Render the widget as HTML inputs for display on a form. :param name: form field base name :param value: date value :param attrs: - unused :returns: HTML text with three inputs for year/month/day
373,997
def get_stderr(self, tail=None): if self.finished(): self.join_threads() while not self.stderr_q.empty(): self.stderr_l.append(self.stderr_q.get_nowait()) if tail is None: tail = len(self.stderr_l) return _py2_and_3_joiner(, self.stderr_l[:tail])
Returns current total output written to standard error. :param tail: Return this number of most-recent lines. :return: copy of stderr stream
373,998
def vprintf(self, alevel, format, *args): if self._verbosity and self._verbosity >= alevel: sys.stdout.write(format % args)
A verbosity-aware printf.
373,999
def fit_transform(self, X, y=None): if self.categorical_features == "auto": self.categorical_features = auto_select_categorical_features(X, threshold=self.threshold) return _transform_selected( X, self._fit_transform, self.categorical_features, copy=True )
Fit OneHotEncoder to X, then transform X. Equivalent to self.fit(X).transform(X), but more convenient and more efficient. See fit for the parameters, transform for the return value. Parameters ---------- X : array-like or sparse matrix, shape=(n_samples, n_features) Dense array or sparse matrix. y: array-like {n_samples,} (Optional, ignored) Feature labels