code
stringlengths
75
104k
docstring
stringlengths
1
46.9k
def enable_backups(self, table_name, model): """Calls UpdateContinuousBackups on the table according to model.Meta["continuous_backups"] :param table_name: The name of the table to enable Continuous Backups on :param model: The model to get Continuous Backups settings from """ self._tables.pop(table_name, None) request = { "TableName": table_name, "PointInTimeRecoverySpecification": {"PointInTimeRecoveryEnabled": True} } try: self.dynamodb_client.update_continuous_backups(**request) except botocore.exceptions.ClientError as error: raise BloopException("Unexpected error while setting Continuous Backups.") from error
Calls UpdateContinuousBackups on the table according to model.Meta["continuous_backups"] :param table_name: The name of the table to enable Continuous Backups on :param model: The model to get Continuous Backups settings from
def _producer_wrapper(f, port, addr='tcp://127.0.0.1'): """A shim that sets up a socket and starts the producer callable. Parameters ---------- f : callable Callable that takes a single argument, a handle for a ZeroMQ PUSH socket. Must be picklable. port : int The port on which the socket should connect. addr : str, optional Address to which the socket should connect. Defaults to localhost ('tcp://127.0.0.1'). """ try: context = zmq.Context() socket = context.socket(zmq.PUSH) socket.connect(':'.join([addr, str(port)])) f(socket) finally: # Works around a Python 3.x bug. context.destroy()
A shim that sets up a socket and starts the producer callable. Parameters ---------- f : callable Callable that takes a single argument, a handle for a ZeroMQ PUSH socket. Must be picklable. port : int The port on which the socket should connect. addr : str, optional Address to which the socket should connect. Defaults to localhost ('tcp://127.0.0.1').
def rigid_transform_from_ros(from_frame, to_frame, service_name='rigid_transforms/rigid_transform_listener', namespace=None): """Gets transform from ROS as a rigid transform Requires ROS rigid_transform_publisher service to be running. Assuming autolab_core is installed as a catkin package, this can be done with: roslaunch autolab_core rigid_transforms.launch Parameters ---------- from_frame : :obj:`str` to_frame : :obj:`str` service_name : string, optional RigidTransformListener service to interface with. If the RigidTransformListener services are started through rigid_transforms.launch it will be called rigid_transform_listener namespace : string, optional Namespace to prepend to transform_listener_service. If None, current namespace is prepended. Raises ------ rospy.ServiceException If service call to rigid_transform_listener fails """ if namespace == None: service_name = rospy.get_namespace() + service_name else: service_name = namespace + service_name rospy.wait_for_service(service_name, timeout = 10) listener = rospy.ServiceProxy(service_name, RigidTransformListener) ret = listener(from_frame, to_frame) quat = np.asarray([ret.w_rot, ret.x_rot, ret.y_rot, ret.z_rot]) trans = np.asarray([ret.x_trans, ret.y_trans, ret.z_trans]) rot = RigidTransform.rotation_from_quaternion(quat) return RigidTransform(rotation=rot, translation=trans, from_frame=from_frame, to_frame=to_frame)
Gets transform from ROS as a rigid transform Requires ROS rigid_transform_publisher service to be running. Assuming autolab_core is installed as a catkin package, this can be done with: roslaunch autolab_core rigid_transforms.launch Parameters ---------- from_frame : :obj:`str` to_frame : :obj:`str` service_name : string, optional RigidTransformListener service to interface with. If the RigidTransformListener services are started through rigid_transforms.launch it will be called rigid_transform_listener namespace : string, optional Namespace to prepend to transform_listener_service. If None, current namespace is prepended. Raises ------ rospy.ServiceException If service call to rigid_transform_listener fails
def copy(self, **replacements): """Returns a clone of this M2Coordinate with the given replacements kwargs overlaid.""" cls = type(self) kwargs = {'org': self.org, 'name': self.name, 'ext': self.ext, 'classifier': self.classifier, 'rev': self.rev} for key, val in replacements.items(): kwargs[key] = val return cls(**kwargs)
Returns a clone of this M2Coordinate with the given replacements kwargs overlaid.
def _SignedVarintEncoder(): """Return an encoder for a basic signed varint value (does not include tag).""" def EncodeSignedVarint(write, value): if value < 0: value += (1 << 64) bits = value & 0x7f value >>= 7 while value: write(six.int2byte(0x80|bits)) bits = value & 0x7f value >>= 7 return write(six.int2byte(bits)) return EncodeSignedVarint
Return an encoder for a basic signed varint value (does not include tag).
def generate_graphs(result, topic, aspect, for_doc=False): """ genrate graphs from result file Parameters ---------- result : str path to result file topic : str benchmark topic; for example "Open file" or "Save file" aspect : str performance indiitemsor; can be "ram" (RAM memory usage) or "time" (elapsed time) for_doc : bool wether the source code is used inside the documentation """ result = result topic = topic aspect = aspect for_doc = for_doc with open(result, 'r') as f: lines = f.readlines() platform = 'x86' if '32 bit' in lines[2] else 'x64' idx = [i for i, line in enumerate(lines) if line.startswith('==')] table_spans = {'open': [idx[1] + 1, idx[2]], 'save': [idx[4] + 1, idx[5]], 'get': [idx[7] + 1, idx[8]], 'convert' : [idx[10] + 1, idx[11]], 'merge' : [idx[13] + 1, idx[14]]} start, stop = table_spans[topic.lower()] items = [l[:50].strip(' \t\r\n\0*') for l in lines[start: stop]] time = np.array([int(l[50:61].strip(' \t\r\n\0*')) for l in lines[start: stop]]) ram = np.array([int(l[61:].strip(' \t\r\n\0*')) for l in lines[start: stop]]) if aspect == 'ram': array = ram else: array = time y_pos = list(range(len(items))) fig, ax = plt.subplots() fig.set_size_inches(15, 3.8 / 12 * len(items) + 1.2) asam_pos = [i for i, c in enumerate(items) if c.startswith('asam')] mdfreader_pos = [i for i, c in enumerate(items) if c.startswith('mdfreader')] ax.barh(asam_pos, array[asam_pos], color='green', ecolor='green') ax.barh(mdfreader_pos, array[mdfreader_pos], color='blue', ecolor='black') ax.set_yticks(y_pos) ax.set_yticklabels(items) ax.invert_yaxis() ax.set_xlabel('Time [ms]' if aspect == 'time' else 'RAM [MB]') if topic == 'Get': ax.set_title('Get all channels (36424 calls) - {}' .format('time' if aspect == 'time' else 'ram usage')) else: ax.set_title('{} test file - {}' .format(topic, 'time' if aspect == 'time' else 'ram usage')) ax.xaxis.grid() fig.subplots_adjust(bottom=0.72/fig.get_figheight(), top=1-0.48/fig.get_figheight(), left=0.4, right=0.9) if aspect == 'time': if topic == 'Get': name = '{}_get_all_channels.png'.format(platform) else: name = '{}_{}.png'.format(platform, topic.lower()) else: if topic == 'Get': name = '{}_get_all_channels_ram_usage.png'.format(platform) else: name = '{}_{}_ram_usage.png'.format(platform, topic.lower()) if for_doc: plt.show() else: plt.savefig(name, dpi=300)
genrate graphs from result file Parameters ---------- result : str path to result file topic : str benchmark topic; for example "Open file" or "Save file" aspect : str performance indiitemsor; can be "ram" (RAM memory usage) or "time" (elapsed time) for_doc : bool wether the source code is used inside the documentation
def parse_item(self, response): """ Get basic information about a page, so that it can be passed to the `pa11y` tool for further testing. @url https://www.google.com/ @returns items 1 1 @returns requests 0 0 @scrapes url request_headers accessed_at page_title """ # if we got redirected to a login page, then login if URLObject(response.url).path == LOGIN_HTML_PATH: reqs = self.handle_unexpected_redirect_to_login_page(response) for req in reqs: yield req title = response.xpath("//title/text()").extract_first() if title: title = title.strip() # `response.request.headers` is a dictionary where the key is the # header name, and the value is a *list*, containing one item, # which is the header value. We need to get rid of this list, and just # have key-value pairs. (This list probably exists in case the same # header is sent multiple times, but that's not happening in this case, # and the list construct is getting in the way.) # # We also need to convert bytes to ASCII. In practice, headers can # only contain ASCII characters: see # http://stackoverflow.com/questions/5423223/how-to-send-non-english-unicode-string-using-http-header request_headers = {key.decode('ascii'): value[0].decode('ascii') for key, value in response.request.headers.items()} item = A11yItem( url=response.url, request_headers=request_headers, accessed_at=datetime.utcnow(), page_title=title, ) yield item
Get basic information about a page, so that it can be passed to the `pa11y` tool for further testing. @url https://www.google.com/ @returns items 1 1 @returns requests 0 0 @scrapes url request_headers accessed_at page_title
def draw_interface(objects, callback, callback_text): """ Draws a ncurses interface. Based on the given object list, every object should have a "string" key, this is whats displayed on the screen, callback is called with the selected object. Rest of the code is modified from: https://stackoverflow.com/a/30834868 """ screen = curses.initscr() height, width = screen.getmaxyx() curses.noecho() curses.cbreak() curses.start_color() screen.keypad( 1 ) curses.init_pair(1,curses.COLOR_BLACK, curses.COLOR_CYAN) highlightText = curses.color_pair( 1 ) normalText = curses.A_NORMAL screen.border( 0 ) curses.curs_set( 0 ) max_row = height - 15 # max number of rows box = curses.newwin( max_row + 2, int(width - 2), 1, 1 ) box.box() fmt = PartialFormatter() row_num = len( objects ) pages = int( ceil( row_num / max_row ) ) position = 1 page = 1 for i in range( 1, max_row + 1 ): if row_num == 0: box.addstr( 1, 1, "There aren't strings", highlightText ) else: if (i == position): box.addstr( i, 2, str( i ) + " - " + objects[ i - 1 ]['string'], highlightText ) else: box.addstr( i, 2, str( i ) + " - " + objects[ i - 1 ]['string'], normalText ) if i == row_num: break screen.refresh() box.refresh() x = screen.getch() while x != 27: if x == curses.KEY_DOWN: if page == 1: if position < i: position = position + 1 else: if pages > 1: page = page + 1 position = 1 + ( max_row * ( page - 1 ) ) elif page == pages: if position < row_num: position = position + 1 else: if position < max_row + ( max_row * ( page - 1 ) ): position = position + 1 else: page = page + 1 position = 1 + ( max_row * ( page - 1 ) ) if x == curses.KEY_UP: if page == 1: if position > 1: position = position - 1 else: if position > ( 1 + ( max_row * ( page - 1 ) ) ): position = position - 1 else: page = page - 1 position = max_row + ( max_row * ( page - 1 ) ) screen.erase() if x == ord( "\n" ) and row_num != 0: screen.erase() screen.border( 0 ) service = objects[position -1] text = fmt.format(callback_text, **service) screen.addstr( max_row + 4, 3, text) text = callback(service) count = 0 for line in text: screen.addstr( max_row + 5 + count, 3, line) count += 1 box.erase() screen.border( 0 ) box.border( 0 ) for i in range( 1 + ( max_row * ( page - 1 ) ), max_row + 1 + ( max_row * ( page - 1 ) ) ): if row_num == 0: box.addstr( 1, 1, "There aren't strings", highlightText ) else: if ( i + ( max_row * ( page - 1 ) ) == position + ( max_row * ( page - 1 ) ) ): box.addstr( i - ( max_row * ( page - 1 ) ), 2, str( i ) + " - " + objects[ i - 1 ]['string'], highlightText ) else: box.addstr( i - ( max_row * ( page - 1 ) ), 2, str( i ) + " - " + objects[ i - 1 ]['string'], normalText ) if i == row_num: break screen.refresh() box.refresh() x = screen.getch() curses.endwin() exit()
Draws a ncurses interface. Based on the given object list, every object should have a "string" key, this is whats displayed on the screen, callback is called with the selected object. Rest of the code is modified from: https://stackoverflow.com/a/30834868
def _ensure_batch_is_sufficiently_small( self, batch_instances: Iterable[Instance], excess: Deque[Instance]) -> List[List[Instance]]: """ If self._maximum_samples_per_batch is specified, then split the batch into smaller sub-batches if it exceeds the maximum size. Parameters ---------- batch_instances : ``Iterable[Instance]`` A candidate batch. excess : ``Deque[Instance]`` Instances that were not sufficient to form an entire batch previously. They will be used as part of the first sub-batch. This will be populated with instances from the end of batch_instances that do not consist of more than self._maximum_samples_per_batch samples or self._batch_size instances. It is the caller's responsibility to place these in a batch too, which may, of course, be done in part with subsequent calls to this method. WARNING: Mutated in place! """ if self._maximum_samples_per_batch is None: assert not excess return [list(batch_instances)] key, limit = self._maximum_samples_per_batch batches: List[List[Instance]] = [] batch: List[Instance] = [] padding_length = -1 excess.extend(batch_instances) while excess: instance = excess.popleft() if self.vocab is not None: # we index here to ensure that shape information is available, # as in some cases (with self._maximum_samples_per_batch) # we need access to shaping information before batches are constructed) instance.index_fields(self.vocab) field_lengths = instance.get_padding_lengths() for _, lengths in field_lengths.items(): try: padding_length = max(padding_length, lengths[key]) except KeyError: pass proposed_batch_size = len(batch) + 1 # Adding the current instance would exceed the batch size or sample size. if proposed_batch_size >= self._batch_size or padding_length * proposed_batch_size > limit: # Output the already existing batch batches.append(batch) # Put the current instance back, reset state. excess.appendleft(instance) batch = [] padding_length = -1 else: batch.append(instance) # Keep the current batch as excess. excess.extend(batch) return batches
If self._maximum_samples_per_batch is specified, then split the batch into smaller sub-batches if it exceeds the maximum size. Parameters ---------- batch_instances : ``Iterable[Instance]`` A candidate batch. excess : ``Deque[Instance]`` Instances that were not sufficient to form an entire batch previously. They will be used as part of the first sub-batch. This will be populated with instances from the end of batch_instances that do not consist of more than self._maximum_samples_per_batch samples or self._batch_size instances. It is the caller's responsibility to place these in a batch too, which may, of course, be done in part with subsequent calls to this method. WARNING: Mutated in place!
def runlist_list(**kwargs): """ Show uploaded runlists. """ ctx = Context(**kwargs) ctx.execute_action('runlist:list', **{ 'storage': ctx.repo.create_secure_service('storage'), })
Show uploaded runlists.
def CheckVersion(problems, latest_version=None): """ Check if there is a newer version of transitfeed available. Args: problems: if a new version is available, a NewVersionAvailable problem will be added latest_version: if specified, override the latest version read from the project page """ if not latest_version: timeout = 20 socket.setdefaulttimeout(timeout) request = urllib2.Request(LATEST_RELEASE_VERSION_URL) try: response = urllib2.urlopen(request) content = response.read() m = re.search(r'version=(\d+\.\d+\.\d+)', content) if m: latest_version = m.group(1) except urllib2.HTTPError as e: description = ('During the new-version check, we failed to reach ' 'transitfeed server: Reason: %s [%s].' % (e.reason, e.code)) problems.OtherProblem( description=description, type=errors.TYPE_NOTICE) return except urllib2.URLError as e: description = ('During the new-version check, we failed to reach ' 'transitfeed server. Reason: %s.' % e.reason) problems.OtherProblem( description=description, type=errors.TYPE_NOTICE) return if not latest_version: description = ('During the new-version check, we had trouble parsing the ' 'contents of %s.' % LATEST_RELEASE_VERSION_URL) problems.OtherProblem( description=description, type=errors.TYPE_NOTICE) return newest_version = _MaxVersion([latest_version, __version__]) if __version__ != newest_version: problems.NewVersionAvailable(newest_version)
Check if there is a newer version of transitfeed available. Args: problems: if a new version is available, a NewVersionAvailable problem will be added latest_version: if specified, override the latest version read from the project page
def normalize_extension(extension): """Normalise a file name extension.""" extension = decode_path(extension) if extension is None: return if extension.startswith('.'): extension = extension[1:] if '.' in extension: _, extension = os.path.splitext(extension) extension = slugify(extension, sep='') if extension is None: return if len(extension): return extension
Normalise a file name extension.
def ok(self, text=u"OK", err=False): """Set Ok (success) finalizer to a spinner.""" # Do not display spin text for ok state self._text = None _text = to_text(text) if text else u"OK" err = err or not self.write_to_stdout self._freeze(_text, err=err)
Set Ok (success) finalizer to a spinner.
def proxy_alias(alias_name, node_type): """Get a Proxy from the given name to the given node type.""" proxy = type( alias_name, (lazy_object_proxy.Proxy,), { "__class__": object.__dict__["__class__"], "__instancecheck__": _instancecheck, }, ) return proxy(lambda: node_type)
Get a Proxy from the given name to the given node type.
def is_vert_aligned(c): """Return true if all the components of c are vertically aligned. Vertical alignment means that the bounding boxes of each Mention of c shares a similar x-axis value in the visual rendering of the document. :param c: The candidate to evaluate :rtype: boolean """ return all( [ _to_span(c[i]).sentence.is_visual() and bbox_vert_aligned( bbox_from_span(_to_span(c[i])), bbox_from_span(_to_span(c[0])) ) for i in range(len(c)) ] )
Return true if all the components of c are vertically aligned. Vertical alignment means that the bounding boxes of each Mention of c shares a similar x-axis value in the visual rendering of the document. :param c: The candidate to evaluate :rtype: boolean
def get_distance( self, l_motor: float, r_motor: float, tm_diff: float ) -> typing.Tuple[float, float]: """ Given motor values and the amount of time elapsed since this was last called, retrieves the x,y,angle that the robot has moved. Pass these values to :meth:`PhysicsInterface.distance_drive`. To update your encoders, use the ``l_position`` and ``r_position`` attributes of this object. :param l_motor: Left motor value (-1 to 1); -1 is forward :param r_motor: Right motor value (-1 to 1); 1 is forward :param tm_diff: Elapsed time since last call to this function :returns: x travel, y travel, angle turned (radians) .. note:: If you are using more than 2 motors, it is assumed that all motors on each side are set to the same speed. Only pass in one of the values from each side """ # This isn't quite right, the right way is to use matrix math. However, # this is Good Enough for now... x = 0 y = 0 angle = 0 # split the time difference into timestep_ms steps total_time = int(tm_diff * 100000) steps = total_time // self._timestep remainder = total_time % self._timestep step = self._timestep / 100000.0 if remainder: last_step = remainder / 100000.0 steps += 1 else: last_step = step while steps != 0: if steps == 1: tm_diff = last_step else: tm_diff = step steps -= 1 l = self._lmotor.compute(-l_motor, tm_diff) r = self._rmotor.compute(r_motor, tm_diff) # Tank drive motion equations velocity = (l + r) * 0.5 # Thanks to Tyler Veness for fixing the rotation equation, via conservation # of angular momentum equations # -> omega = b * m * (l - r) / J rotation = self._bm * (l - r) / self._inertia distance = velocity * tm_diff turn = rotation * tm_diff x += distance * math.cos(angle) y += distance * math.sin(angle) angle += turn return x, y, angle
Given motor values and the amount of time elapsed since this was last called, retrieves the x,y,angle that the robot has moved. Pass these values to :meth:`PhysicsInterface.distance_drive`. To update your encoders, use the ``l_position`` and ``r_position`` attributes of this object. :param l_motor: Left motor value (-1 to 1); -1 is forward :param r_motor: Right motor value (-1 to 1); 1 is forward :param tm_diff: Elapsed time since last call to this function :returns: x travel, y travel, angle turned (radians) .. note:: If you are using more than 2 motors, it is assumed that all motors on each side are set to the same speed. Only pass in one of the values from each side
def transfer(self, receiver_address, amount, from_account): """ Transfer tokens from one account to the receiver address. :param receiver_address: Address of the transfer receiver, str :param amount: Amount of tokens, int :param from_account: Sender account, Account :return: bool """ tx_hash = self.send_transaction( 'transfer', (receiver_address, amount), transact={'from': from_account.address, 'passphrase': from_account.password} ) return self.get_tx_receipt(tx_hash).status == 1
Transfer tokens from one account to the receiver address. :param receiver_address: Address of the transfer receiver, str :param amount: Amount of tokens, int :param from_account: Sender account, Account :return: bool
def load_core_file(core_fp): """ For core OTU data file, returns Genus-species identifier for each data entry. :type core_fp: str :param core_fp: A file containing core OTU data. :rtype: str :return: Returns genus-species identifier based on identified taxonomical level. """ core = {} with open(core_fp) as in_f: for line in in_f.read().splitlines(): if not line.startswith("#"): otu_id, tax = line.split("\t") core[otu_id] = oc.otu_name(ast.literal_eval(tax)) return core
For core OTU data file, returns Genus-species identifier for each data entry. :type core_fp: str :param core_fp: A file containing core OTU data. :rtype: str :return: Returns genus-species identifier based on identified taxonomical level.
def get_random_proxy(self): """Return random proxy""" idx = randint(0, len(self._list) - 1) return self._list[idx]
Return random proxy
def close(self): """ Close the internal epoll file descriptor if it isn't closed :raises OSError: If the underlying ``close(2)`` fails. The error message matches those found in the manual page. """ with self._close_lock: epfd = self._epfd if epfd >= 0: self._epfd = -1 close(epfd)
Close the internal epoll file descriptor if it isn't closed :raises OSError: If the underlying ``close(2)`` fails. The error message matches those found in the manual page.
def write_bus_data(self, file): """ Writes bus data as CSV. """ writer = self._get_writer(file) writer.writerow(BUS_ATTRS) for bus in self.case.buses: writer.writerow([getattr(bus, attr) for attr in BUS_ATTRS])
Writes bus data as CSV.
def register_blueprint(self, blueprint): ''' Register given blueprint on curren app. This method is provided for using inside plugin's module-level :func:`register_plugin` functions. :param blueprint: blueprint object with plugin endpoints :type blueprint: flask.Blueprint ''' if blueprint not in self._blueprint_known: self.app.register_blueprint(blueprint) self._blueprint_known.add(blueprint)
Register given blueprint on curren app. This method is provided for using inside plugin's module-level :func:`register_plugin` functions. :param blueprint: blueprint object with plugin endpoints :type blueprint: flask.Blueprint
def register(self, app, options): """Register the blueprint to the mach9 app.""" url_prefix = options.get('url_prefix', self.url_prefix) # Routes for future in self.routes: # attach the blueprint name to the handler so that it can be # prefixed properly in the router future.handler.__blueprintname__ = self.name # Prepend the blueprint URI prefix if available uri = url_prefix + future.uri if url_prefix else future.uri app.route( uri=uri[1:] if uri.startswith('//') else uri, methods=future.methods, host=future.host or self.host, strict_slashes=future.strict_slashes, stream=future.stream )(future.handler) # Middleware for future in self.middlewares: if future.args or future.kwargs: app.middleware(*future.args, **future.kwargs)(future.middleware) else: app.middleware(future.middleware) # Exceptions for future in self.exceptions: app.exception(*future.args, **future.kwargs)(future.handler) # Static Files for future in self.statics: # Prepend the blueprint URI prefix if available uri = url_prefix + future.uri if url_prefix else future.uri app.static(uri, future.file_or_directory, *future.args, **future.kwargs) # Event listeners for event, listeners in self.listeners.items(): for listener in listeners: app.listener(event)(listener)
Register the blueprint to the mach9 app.
def update_identity(self, identity, identity_id): """UpdateIdentity. :param :class:`<Identity> <azure.devops.v5_0.identity.models.Identity>` identity: :param str identity_id: """ route_values = {} if identity_id is not None: route_values['identityId'] = self._serialize.url('identity_id', identity_id, 'str') content = self._serialize.body(identity, 'Identity') self._send(http_method='PUT', location_id='28010c54-d0c0-4c89-a5b0-1c9e188b9fb7', version='5.0', route_values=route_values, content=content)
UpdateIdentity. :param :class:`<Identity> <azure.devops.v5_0.identity.models.Identity>` identity: :param str identity_id:
def formatdate(timeval=None, localtime=False, usegmt=False): """Returns a date string as specified by RFC 2822, e.g.: Fri, 09 Nov 2001 01:08:47 -0000 Optional timeval if given is a floating point time value as accepted by gmtime() and localtime(), otherwise the current time is used. Optional localtime is a flag that when True, interprets timeval, and returns a date relative to the local timezone instead of UTC, properly taking daylight savings time into account. Optional argument usegmt means that the timezone is written out as an ascii string, not numeric one (so "GMT" instead of "+0000"). This is needed for HTTP, and is only used when localtime==False. """ # Note: we cannot use strftime() because that honors the locale and RFC # 2822 requires that day and month names be the English abbreviations. if timeval is None: timeval = time.time() if localtime: now = time.localtime(timeval) # Calculate timezone offset, based on whether the local zone has # daylight savings time, and whether DST is in effect. if time.daylight and now[-1]: offset = time.altzone else: offset = time.timezone hours, minutes = divmod(abs(offset), 3600) # Remember offset is in seconds west of UTC, but the timezone is in # minutes east of UTC, so the signs differ. if offset > 0: sign = '-' else: sign = '+' zone = '%s%02d%02d' % (sign, hours, minutes // 60) else: now = time.gmtime(timeval) # Timezone offset is always -0000 if usegmt: zone = 'GMT' else: zone = '-0000' return _format_timetuple_and_zone(now, zone)
Returns a date string as specified by RFC 2822, e.g.: Fri, 09 Nov 2001 01:08:47 -0000 Optional timeval if given is a floating point time value as accepted by gmtime() and localtime(), otherwise the current time is used. Optional localtime is a flag that when True, interprets timeval, and returns a date relative to the local timezone instead of UTC, properly taking daylight savings time into account. Optional argument usegmt means that the timezone is written out as an ascii string, not numeric one (so "GMT" instead of "+0000"). This is needed for HTTP, and is only used when localtime==False.
def p_DefaultValue_string(p): """DefaultValue : STRING""" p[0] = model.Value(type=model.Value.STRING, value=p[1])
DefaultValue : STRING
def get_error(self, block=False, timeout=None): """Removes and returns an error from self._errors Args: block(bool): if True block until a RTMMessage is available, else it will return None when self._inbox is empty timeout(int): it blocks at most timeout seconds Returns: error if inbox is not empty, else None """ try: error = self._errors.get(block=block, timeout=timeout) return error except Exception: return None
Removes and returns an error from self._errors Args: block(bool): if True block until a RTMMessage is available, else it will return None when self._inbox is empty timeout(int): it blocks at most timeout seconds Returns: error if inbox is not empty, else None
def _find_set_members(set): ''' Return list of members for a set ''' cmd = '{0} list {1}'.format(_ipset_cmd(), set) out = __salt__['cmd.run_all'](cmd, python_shell=False) if out['retcode'] > 0: # Set doesn't exist return false return False _tmp = out['stdout'].split('\n') members = [] startMembers = False for i in _tmp: if startMembers: members.append(i) if 'Members:' in i: startMembers = True return members
Return list of members for a set
def rps_at(self, t): '''Return rps for second t''' if 0 <= t <= self.duration: return self.minrps + \ float(self.maxrps - self.minrps) * t / self.duration else: return 0
Return rps for second t
def put_archive(self, path, data): """ Insert a file or folder in this container using a tar archive as source. Args: path (str): Path inside the container where the file(s) will be extracted. Must exist. data (bytes): tar data to be extracted Returns: (bool): True if the call succeeds. Raises: :py:class:`~docker.errors.APIError` If an error occurs. """ return self.client.api.put_archive(self.id, path, data)
Insert a file or folder in this container using a tar archive as source. Args: path (str): Path inside the container where the file(s) will be extracted. Must exist. data (bytes): tar data to be extracted Returns: (bool): True if the call succeeds. Raises: :py:class:`~docker.errors.APIError` If an error occurs.
def nvrtcGetPTX(self, prog): """ Returns the compiled PTX for the NVRTC program object. """ size = c_size_t() code = self._lib.nvrtcGetPTXSize(prog, byref(size)) self._throw_on_error(code) buf = create_string_buffer(size.value) code = self._lib.nvrtcGetPTX(prog, buf) self._throw_on_error(code) return buf.value.decode('utf-8')
Returns the compiled PTX for the NVRTC program object.
def outdict(self, ndigits=3): """Return dictionary structure rounded to a given precision.""" output = self.__dict__.copy() for item in output: output[item] = round(output[item], ndigits) return output
Return dictionary structure rounded to a given precision.
def AutorizarAnticipo(self): "Autorizar Anticipo de una Liquidación Primaria Electrónica de Granos" # extraer y adaptar los campos para el anticipo anticipo = {"liquidacion": self.liquidacion} liq = anticipo["liquidacion"] liq["campaniaPpal"] = self.liquidacion["campaniaPPal"] liq["codLocProcedencia"] = self.liquidacion["codLocalidadProcedencia"] liq["descPuertoLocalidad"] = self.liquidacion["desPuertoLocalidad"] if self.opcionales: liq['opcionales'] = self.opcionales if self.retenciones: anticipo['retenciones'] = self.retenciones if self.deducciones: anticipo['deducciones'] = self.deducciones # llamo al webservice: ret = self.client.lpgAutorizarAnticipo( auth={ 'token': self.Token, 'sign': self.Sign, 'cuit': self.Cuit, }, anticipo=anticipo, ) # analizo la respusta ret = ret['liqReturn'] self.__analizar_errores(ret) self.AnalizarLiquidacion(ret.get('autorizacion'), self.liquidacion) return True
Autorizar Anticipo de una Liquidación Primaria Electrónica de Granos
def logtrace(logger, msg, *args, **kwargs): ''' If esgfpid.defaults.LOG_TRACE_TO_DEBUG, messages are treated like debug messages (with an added [trace]). Otherwise, they are ignored. ''' if esgfpid.defaults.LOG_TRACE_TO_DEBUG: logdebug(logger, '[trace] %s' % msg, *args, **kwargs) else: pass
If esgfpid.defaults.LOG_TRACE_TO_DEBUG, messages are treated like debug messages (with an added [trace]). Otherwise, they are ignored.
def _sendDDEcommand(self, cmd, timeout=None): """Send command to DDE client""" reply = self.conversation.Request(cmd, timeout) if self.pyver > 2: reply = reply.decode('ascii').rstrip() return reply
Send command to DDE client
def listIterators(self, login, tableName): """ Parameters: - login - tableName """ self.send_listIterators(login, tableName) return self.recv_listIterators()
Parameters: - login - tableName
def write(self, nb, fp, **kwargs): """Write a notebook to a file like object""" return fp.write(self.writes(nb,**kwargs))
Write a notebook to a file like object
def unpack_fixed8(src): """Get a FIXED8 value.""" dec_part = unpack_ui8(src) int_part = unpack_ui8(src) return int_part + dec_part / 256
Get a FIXED8 value.
def _start_of_century(self): """ Reset the date to the first day of the century. :rtype: Date """ year = self.year - 1 - (self.year - 1) % YEARS_PER_CENTURY + 1 return self.set(year, 1, 1)
Reset the date to the first day of the century. :rtype: Date
def just(*args): ''' this works as an infinite loop that yields the given argument(s) over and over ''' assert len(args) >= 1, 'generators.just needs at least one arg' if len(args) == 1: # if only one arg is given try: # try to cycle in a set for iteration speedup return cycle(set(args)) except: # revert to cycling args as a tuple return cycle(args) else: return cycle({args})
this works as an infinite loop that yields the given argument(s) over and over
def create_key(file_): """ Create a key and save it into ``file_``. Note that ``file`` must be opened in binary mode. """ pkey = crypto.PKey() pkey.generate_key(crypto.TYPE_RSA, 2048) file_.write(crypto.dump_privatekey(crypto.FILETYPE_PEM, pkey)) file_.flush()
Create a key and save it into ``file_``. Note that ``file`` must be opened in binary mode.
def decode_source(source_bytes): """Decode bytes representing source code and return the string. Universal newline support is used in the decoding. """ # source_bytes_readline = io.BytesIO(source_bytes).readline # encoding, _ = detect_encoding(source_bytes_readline) newline_decoder = io.IncrementalNewlineDecoder(None, True) return newline_decoder.decode(source_to_unicode(source_bytes))
Decode bytes representing source code and return the string. Universal newline support is used in the decoding.
def unhook_wnd_proc(self): """Restore previous Window message handler""" if not self.__local_wnd_proc_wrapped: return SetWindowLong(self.__local_win_handle, GWL_WNDPROC, self.__old_wnd_proc) ## Allow the ctypes wrapper to be garbage collected self.__local_wnd_proc_wrapped = None
Restore previous Window message handler
def percentile(self, p): """ Computes the percentile of a specific value in [0,100]. """ if not (0 <= p <= 100): raise ValueError("p must be between 0 and 100, inclusive.") p = float(p)/100. p *= self.n c_i = None t = 0 if p == 0: return self.C.min_item()[1].mean for i, key in enumerate(self.C.keys()): c_i_plus_one = self.C[key] if i == 0: k = c_i_plus_one.count / 2 else: k = (c_i_plus_one.count + c_i.count) / 2. if p < t + k: z1 = p - t z2 = t + k - p return (c_i.mean * z2 + c_i_plus_one.mean * z1) / (z1 + z2) c_i = c_i_plus_one t += k return self.C.max_item()[1].mean
Computes the percentile of a specific value in [0,100].
def _unpack(c, tmp, package, version, git_url=None): """ Download + unpack given package into temp dir ``tmp``. Return ``(real_version, source)`` where ``real_version`` is the "actual" version downloaded (e.g. if a Git master was indicated, it will be the SHA of master HEAD) and ``source`` is the source directory (relative to unpacked source) to import into ``<project>/vendor``. """ real_version = version[:] source = None if git_url: pass # git clone into tempdir # git checkout <version> # set target to checkout # if version does not look SHA-ish: # in the checkout, obtain SHA from that branch # set real_version to that value else: cwd = os.getcwd() print("Moving into temp dir %s" % tmp) os.chdir(tmp) try: # Nab from index. Skip wheels; we want to unpack an sdist. flags = "--download=. --build=build --no-use-wheel" cmd = "pip install %s %s==%s" % (flags, package, version) c.run(cmd) # Identify basename # TODO: glob is bad here because pip install --download gets all # dependencies too! ugh. Figure out best approach for that. globs = [] globexpr = "" for extension, opener in ( ("zip", "unzip"), ("tgz", "tar xzvf"), ("tar.gz", "tar xzvf"), ): globexpr = "*.{0}".format(extension) globs = glob(globexpr) if globs: break archive = os.path.basename(globs[0]) source, _, _ = archive.rpartition(".{0}".format(extension)) c.run("{0} {1}".format(opener, globexpr)) finally: os.chdir(cwd) return real_version, source
Download + unpack given package into temp dir ``tmp``. Return ``(real_version, source)`` where ``real_version`` is the "actual" version downloaded (e.g. if a Git master was indicated, it will be the SHA of master HEAD) and ``source`` is the source directory (relative to unpacked source) to import into ``<project>/vendor``.
def get_dataframe_from_variable(nc, data_var): """ Returns a Pandas DataFrame of the data. This always returns positive down depths """ time_var = nc.get_variables_by_attributes(standard_name='time')[0] depth_vars = nc.get_variables_by_attributes(axis=lambda v: v is not None and v.lower() == 'z') depth_vars += nc.get_variables_by_attributes(standard_name=lambda v: v in ['height', 'depth' 'surface_altitude'], positive=lambda x: x is not None) # Find the correct depth variable depth_var = None for d in depth_vars: try: if d._name in data_var.coordinates.split(" ") or d._name in data_var.dimensions: depth_var = d break except AttributeError: continue times = netCDF4.num2date(time_var[:], units=time_var.units, calendar=getattr(time_var, 'calendar', 'standard')) original_times_size = times.size if depth_var is None and hasattr(data_var, 'sensor_depth'): depth_type = get_type(data_var.sensor_depth) depths = np.asarray([data_var.sensor_depth] * len(times)).flatten() values = data_var[:].flatten() elif depth_var is None: depths = np.asarray([np.nan] * len(times)).flatten() depth_type = get_type(depths) values = data_var[:].flatten() else: depths = depth_var[:] depth_type = get_type(depths) if len(data_var.shape) > 1: times = np.repeat(times, depths.size) depths = np.tile(depths, original_times_size) values = data_var[:, :].flatten() else: values = data_var[:].flatten() if getattr(depth_var, 'positive', 'down').lower() == 'up': logger.warning("Converting depths to positive down before returning the DataFrame") depths = depths * -1 # https://github.com/numpy/numpy/issues/4595 # We can't call astype on a MaskedConstant if ( isinstance(depths, np.ma.core.MaskedConstant) or (hasattr(depths, 'mask') and depths.mask.all()) ): depths = np.asarray([np.nan] * len(times)).flatten() df = pd.DataFrame({ 'time': times, 'value': values.astype(data_var.dtype), 'unit': data_var.units if hasattr(data_var, 'units') else np.nan, 'depth': depths.astype(depth_type) }) df.set_index([pd.DatetimeIndex(df['time']), pd.Float64Index(df['depth'])], inplace=True) return df
Returns a Pandas DataFrame of the data. This always returns positive down depths
def copy(self): """ Make a copy of the SegmentList. :return: A copy of the SegmentList instance. :rtype: angr.analyses.cfg_fast.SegmentList """ n = SegmentList() n._list = [ a.copy() for a in self._list ] n._bytes_occupied = self._bytes_occupied return n
Make a copy of the SegmentList. :return: A copy of the SegmentList instance. :rtype: angr.analyses.cfg_fast.SegmentList
def update(self, slug): ''' Update the page. ''' post_data = self.get_post_data() post_data['user_name'] = self.userinfo.user_name pageinfo = MWiki.get_by_uid(slug) cnt_old = tornado.escape.xhtml_unescape(pageinfo.cnt_md).strip() cnt_new = post_data['cnt_md'].strip() if cnt_old == cnt_new: pass else: MWikiHist.create_wiki_history(MWiki.get_by_uid(slug)) MWiki.update(slug, post_data) tornado.ioloop.IOLoop.instance().add_callback(self.cele_gen_whoosh) self.redirect('/page/{0}'.format(post_data['slug']))
Update the page.
def handle_response (response): """ Handle a response from the newton API """ response = json.loads(response.read()) # Was the expression valid? if 'error' in response: raise ValueError(response['error']) else: # Some of the strings returned can be parsed to integers or floats try: return json.loads(response['result']) except (TypeError, json.decoder.JSONDecodeError): # If the result is NaN, return the actual NaN float if response['result'] == 'NaN': return float('nan') else: return response['result']
Handle a response from the newton API
def enable_directory_service(self, check_peer=False): """Enable the directory service. :param check_peer: If True, enables server authenticity enforcement. If False, enables directory service integration. :type check_peer: bool, optional :returns: A dictionary describing the status of the directory service. :rtype: ResponseDict """ if check_peer: return self.set_directory_service(check_peer=True) return self.set_directory_service(enabled=True)
Enable the directory service. :param check_peer: If True, enables server authenticity enforcement. If False, enables directory service integration. :type check_peer: bool, optional :returns: A dictionary describing the status of the directory service. :rtype: ResponseDict
def _get_on_name(self, func): """Return `eventname` when the function name is `on_<eventname>()`.""" r = re.match("^on_(.+)$", func.__name__) if r: event = r.group(1) else: raise ValueError("The function name should be " "`on_<eventname>`().") return event
Return `eventname` when the function name is `on_<eventname>()`.
def update_thesis_information(self): """501 degree info - move subfields.""" fields_501 = record_get_field_instances(self.record, '502') for field in fields_501: new_subs = [] for key, value in field[0]: if key == 'b': new_subs.append(('a', value)) elif key == 'c': new_subs.append(('b', value)) elif key == 'd': new_subs.append(('c', value)) else: new_subs.append((key, value)) record_delete_field(self.record, tag="502", field_position_global=field[4]) record_add_field(self.record, "502", subfields=new_subs)
501 degree info - move subfields.
def get(self, name): """Returns a Notification by name. """ if not self.loaded: raise RegistryNotLoaded(self) if not self._registry.get(name): raise NotificationNotRegistered( f"Notification not registered. Got '{name}'." ) return self._registry.get(name)
Returns a Notification by name.
def ungrist (value): """ Returns the value without grist. If value is a sequence, does it for every value and returns the result as a sequence. """ assert is_iterable_typed(value, basestring) or isinstance(value, basestring) def ungrist_one (value): stripped = __re_grist_content.match (value) if not stripped: raise BaseException ("in ungrist: '%s' is not of the form <.*>" % value) return stripped.group (1) if isinstance (value, str): return ungrist_one (value) else: return [ ungrist_one (v) for v in value ]
Returns the value without grist. If value is a sequence, does it for every value and returns the result as a sequence.
def _relative_attention_inner(x, y, z, transpose): """Relative position-aware dot-product attention inner calculation. This batches matrix multiply calculations to avoid unnecessary broadcasting. Args: x: Tensor with shape [batch_size, heads, length or 1, length or depth]. y: Tensor with shape [batch_size, heads, length or 1, depth]. z: Tensor with shape [length or 1, length, depth]. transpose: Whether to transpose inner matrices of y and z. Should be true if last dimension of x is depth, not length. Returns: A Tensor with shape [batch_size, heads, length, length or depth]. """ batch_size = tf.shape(x)[0] heads = x.get_shape().as_list()[1] length = tf.shape(x)[2] # xy_matmul is [batch_size, heads, length or 1, length or depth] xy_matmul = tf.matmul(x, y, transpose_b=transpose) # x_t is [length or 1, batch_size, heads, length or depth] x_t = tf.transpose(x, [2, 0, 1, 3]) # x_t_r is [length or 1, batch_size * heads, length or depth] x_t_r = tf.reshape(x_t, [length, heads * batch_size, -1]) # x_tz_matmul is [length or 1, batch_size * heads, length or depth] x_tz_matmul = tf.matmul(x_t_r, z, transpose_b=transpose) # x_tz_matmul_r is [length or 1, batch_size, heads, length or depth] x_tz_matmul_r = tf.reshape(x_tz_matmul, [length, batch_size, heads, -1]) # x_tz_matmul_r_t is [batch_size, heads, length or 1, length or depth] x_tz_matmul_r_t = tf.transpose(x_tz_matmul_r, [1, 2, 0, 3]) return xy_matmul + x_tz_matmul_r_t
Relative position-aware dot-product attention inner calculation. This batches matrix multiply calculations to avoid unnecessary broadcasting. Args: x: Tensor with shape [batch_size, heads, length or 1, length or depth]. y: Tensor with shape [batch_size, heads, length or 1, depth]. z: Tensor with shape [length or 1, length, depth]. transpose: Whether to transpose inner matrices of y and z. Should be true if last dimension of x is depth, not length. Returns: A Tensor with shape [batch_size, heads, length, length or depth].
def _download_initial_config(self): """Loads the initial config.""" _initial_config = self._download_running_config() # this is a bit slow! self._last_working_config = _initial_config self._config_history.append(_initial_config) self._config_history.append(_initial_config)
Loads the initial config.
def receive_request(self, transaction): """ Handle requests coming from the udp socket. :param transaction: the transaction created to manage the request """ with transaction: transaction.separate_timer = self._start_separate_timer(transaction) self._blockLayer.receive_request(transaction) if transaction.block_transfer: self._stop_separate_timer(transaction.separate_timer) self._messageLayer.send_response(transaction) self.send_datagram(transaction.response) return self._observeLayer.receive_request(transaction) self._requestLayer.receive_request(transaction) if transaction.resource is not None and transaction.resource.changed: self.notify(transaction.resource) transaction.resource.changed = False elif transaction.resource is not None and transaction.resource.deleted: self.notify(transaction.resource) transaction.resource.deleted = False self._observeLayer.send_response(transaction) self._blockLayer.send_response(transaction) self._stop_separate_timer(transaction.separate_timer) self._messageLayer.send_response(transaction) if transaction.response is not None: if transaction.response.type == defines.Types["CON"]: self._start_retransmission(transaction, transaction.response) self.send_datagram(transaction.response)
Handle requests coming from the udp socket. :param transaction: the transaction created to manage the request
def get_or_create_namespace(self, url: str) -> Union[Namespace, Dict]: """Insert the namespace file at the given location to the cache. If not cachable, returns the dict of the values of this namespace. :raises: pybel.resources.exc.ResourceError """ result = self.get_namespace_by_url(url) if result is not None: return result t = time.time() bel_resource = get_bel_resource(url) _clean_bel_namespace_values(bel_resource) values = bel_resource['Values'] if not_resource_cachable(bel_resource): log.debug('not caching namespace: %s (%d terms in %.2f seconds)', url, len(values), time.time() - t) log.debug('loaded uncached namespace: %s (%d)', url, len(values)) return values namespace_insert_values = _get_namespace_insert_values(bel_resource) namespace = Namespace( url=url, **namespace_insert_values ) namespace.entries = [ NamespaceEntry(name=name, encoding=encoding) for name, encoding in values.items() ] log.info('inserted namespace: %s (%d terms in %.2f seconds)', url, len(values), time.time() - t) self.session.add(namespace) self.session.commit() return namespace
Insert the namespace file at the given location to the cache. If not cachable, returns the dict of the values of this namespace. :raises: pybel.resources.exc.ResourceError
def quotation_markers(self, value): """ Setter for **self.__quotation_markers** attribute. :param value: Attribute value. :type value: tuple or list """ if value is not None: assert type(value) in (tuple, list), "'{0}' attribute: '{1}' type is not 'tuple' or 'list'!".format( "quotation_markers", value) for element in value: assert type(element) is unicode, "'{0}' attribute: '{1}' type is not 'unicode'!".format( "quotation_markers", element) assert len(element) == 1, "'{0}' attribute: '{1}' has multiples characters!".format("quotation_markers", element) assert not re.search(r"\w", element), "'{0}' attribute: '{1}' is an alphanumeric character!".format( "quotation_markers", element) self.__quotation_markers = value
Setter for **self.__quotation_markers** attribute. :param value: Attribute value. :type value: tuple or list
def set_meta_rdf(self, rdf, fmt='n3'): """Set the metadata for this Thing in RDF fmt Advanced users who want to manipulate the RDF for this Thing directly without the [ThingMeta](ThingMeta.m.html#IoticAgent.IOT.ThingMeta.ThingMeta) helper object Raises [IOTException](./Exceptions.m.html#IoticAgent.IOT.Exceptions.IOTException) containing the error if the infrastructure detects a problem Raises [LinkException](../Core/AmqpLink.m.html#IoticAgent.Core.AmqpLink.LinkException) if there is a communications problem between you and the infrastructure `fmt` (optional) (string) The format of RDF you have sent. Valid formats are: "xml", "n3", "turtle" """ evt = self._client._request_entity_meta_set(self.__lid, rdf, fmt=fmt) self._client._wait_and_except_if_failed(evt)
Set the metadata for this Thing in RDF fmt Advanced users who want to manipulate the RDF for this Thing directly without the [ThingMeta](ThingMeta.m.html#IoticAgent.IOT.ThingMeta.ThingMeta) helper object Raises [IOTException](./Exceptions.m.html#IoticAgent.IOT.Exceptions.IOTException) containing the error if the infrastructure detects a problem Raises [LinkException](../Core/AmqpLink.m.html#IoticAgent.Core.AmqpLink.LinkException) if there is a communications problem between you and the infrastructure `fmt` (optional) (string) The format of RDF you have sent. Valid formats are: "xml", "n3", "turtle"
def object_ref(self): """Return the reference of the changed object.""" return ImmutableDict(type=self.type, category_id=self.category_id, event_id=self.event_id, session_id=self.session_id, contrib_id=self.contrib_id, subcontrib_id=self.subcontrib_id)
Return the reference of the changed object.
def indices2one_hot(indices, nb_classes): """ Convert an iterable of indices to one-hot encoded list. You might also be interested in sklearn.preprocessing.OneHotEncoder Parameters ---------- indices : iterable iterable of indices nb_classes : int Number of classes dtype : type Returns ------- one_hot : list Examples -------- >>> indices2one_hot([0, 1, 1], 3) [[1, 0, 0], [0, 1, 0], [0, 1, 0]] >>> indices2one_hot([0, 1, 1], 2) [[1, 0], [0, 1], [0, 1]] """ if nb_classes < 1: raise ValueError('nb_classes={}, but positive number expected' .format(nb_classes)) one_hot = [] for index in indices: one_hot.append([0] * nb_classes) one_hot[-1][index] = 1 return one_hot
Convert an iterable of indices to one-hot encoded list. You might also be interested in sklearn.preprocessing.OneHotEncoder Parameters ---------- indices : iterable iterable of indices nb_classes : int Number of classes dtype : type Returns ------- one_hot : list Examples -------- >>> indices2one_hot([0, 1, 1], 3) [[1, 0, 0], [0, 1, 0], [0, 1, 0]] >>> indices2one_hot([0, 1, 1], 2) [[1, 0], [0, 1], [0, 1]]
def _on_apply_button_clicked(self, *args): """Apply button clicked: Apply the configuration """ refresh_required = self.core_config_model.apply_preliminary_config() refresh_required |= self.gui_config_model.apply_preliminary_config() if not self.gui_config_model.config.get_config_value("SESSION_RESTORE_ENABLED"): import rafcon.gui.backup.session as backup_session logger.info("Removing current session") backup_session.reset_session() if refresh_required: from rafcon.gui.singleton import main_window_controller main_window_controller.get_controller('menu_bar_controller').on_refresh_all_activate(None, None) self._popup_message()
Apply button clicked: Apply the configuration
def d3logpdf_dlink3(self, link_f, y, Y_metadata=None): """ Third order derivative log-likelihood function at y given link(f) w.r.t link(f) .. math:: \\frac{d^{3} \\ln p(y_{i}|\lambda(f_{i}))}{d^{3}\\lambda(f)} = -\\beta^{3}\\frac{d^{2}\\Psi(\\alpha_{i})}{d\\alpha_{i}}\\\\ \\alpha_{i} = \\beta y_{i} :param link_f: latent variables link(f) :type link_f: Nx1 array :param y: data :type y: Nx1 array :param Y_metadata: Y_metadata which is not used in gamma distribution :returns: third derivative of likelihood evaluated at points f :rtype: Nx1 array """ d3lik_dlink3 = -special.polygamma(2, self.beta*link_f)*(self.beta**3) return d3lik_dlink3
Third order derivative log-likelihood function at y given link(f) w.r.t link(f) .. math:: \\frac{d^{3} \\ln p(y_{i}|\lambda(f_{i}))}{d^{3}\\lambda(f)} = -\\beta^{3}\\frac{d^{2}\\Psi(\\alpha_{i})}{d\\alpha_{i}}\\\\ \\alpha_{i} = \\beta y_{i} :param link_f: latent variables link(f) :type link_f: Nx1 array :param y: data :type y: Nx1 array :param Y_metadata: Y_metadata which is not used in gamma distribution :returns: third derivative of likelihood evaluated at points f :rtype: Nx1 array
def fit(self, choosers, alternatives, current_choice): """ Fit and save model parameters based on given data. Parameters ---------- choosers : pandas.DataFrame Table describing the agents making choices, e.g. households. alternatives : pandas.DataFrame Table describing the things from which agents are choosing, e.g. buildings. current_choice : pandas.Series or any A Series describing the `alternatives` currently chosen by the `choosers`. Should have an index matching `choosers` and values matching the index of `alternatives`. If a non-Series is given it should be a column in `choosers`. Returns ------- log_likelihoods : dict Dict of log-liklihood values describing the quality of the model fit. Will have keys 'null', 'convergence', and 'ratio'. """ logger.debug('start: fit LCM model {}'.format(self.name)) if not isinstance(current_choice, pd.Series): current_choice = choosers[current_choice] choosers, alternatives = self.apply_fit_filters(choosers, alternatives) if self.estimation_sample_size: choosers = choosers.loc[np.random.choice( choosers.index, min(self.estimation_sample_size, len(choosers)), replace=False)] current_choice = current_choice.loc[choosers.index] _, merged, chosen = interaction.mnl_interaction_dataset( choosers, alternatives, self.sample_size, current_choice) model_design = dmatrix( self.str_model_expression, data=merged, return_type='dataframe') if len(merged) != model_design.as_matrix().shape[0]: raise ModelEvaluationError( 'Estimated data does not have the same length as input. ' 'This suggests there are null values in one or more of ' 'the input columns.') self.log_likelihoods, self.fit_parameters = mnl.mnl_estimate( model_design.as_matrix(), chosen, self.sample_size) self.fit_parameters.index = model_design.columns logger.debug('finish: fit LCM model {}'.format(self.name)) return self.log_likelihoods
Fit and save model parameters based on given data. Parameters ---------- choosers : pandas.DataFrame Table describing the agents making choices, e.g. households. alternatives : pandas.DataFrame Table describing the things from which agents are choosing, e.g. buildings. current_choice : pandas.Series or any A Series describing the `alternatives` currently chosen by the `choosers`. Should have an index matching `choosers` and values matching the index of `alternatives`. If a non-Series is given it should be a column in `choosers`. Returns ------- log_likelihoods : dict Dict of log-liklihood values describing the quality of the model fit. Will have keys 'null', 'convergence', and 'ratio'.
def validate_json(data, validator): """Validate data against a given JSON schema (see https://json-schema.org). data: JSON-serializable data to validate. validator (jsonschema.DraftXValidator): The validator. RETURNS (list): A list of error messages, if available. """ errors = [] for err in sorted(validator.iter_errors(data), key=lambda e: e.path): if err.path: err_path = "[{}]".format(" -> ".join([str(p) for p in err.path])) else: err_path = "" msg = err.message + " " + err_path if err.context: # Error has suberrors, e.g. if schema uses anyOf suberrs = [" - {}".format(suberr.message) for suberr in err.context] msg += ":\n{}".format("".join(suberrs)) errors.append(msg) return errors
Validate data against a given JSON schema (see https://json-schema.org). data: JSON-serializable data to validate. validator (jsonschema.DraftXValidator): The validator. RETURNS (list): A list of error messages, if available.
def match(self, s): """ Matching the pattern to the input string, returns True/False and saves the matched string in the internal list """ if self.re.match(s): self.list.append(s) return True else: return False
Matching the pattern to the input string, returns True/False and saves the matched string in the internal list
def minimize(bed_file): """ strip a BED file down to its three necessary columns: chrom start end """ if not bed_file: return bed_file else: sorted_bed = bt.BedTool(bed_file).cut(range(3)).sort() if not sorted_bed.fn.endswith(".bed"): return sorted_bed.moveto(sorted_bed.fn + ".bed") else: return sorted_bed
strip a BED file down to its three necessary columns: chrom start end
def _GetAccessToken(self): """Gets oauth2 access token for Gitkit API using service account. Returns: string, oauth2 access token. """ d = { 'assertion': self._GenerateAssertion(), 'grant_type': 'urn:ietf:params:oauth:grant-type:jwt-bearer', } try: body = parse.urlencode(d) except AttributeError: body = urllib.urlencode(d) req = urllib_request.Request(RpcHelper.TOKEN_ENDPOINT) req.add_header('Content-type', 'application/x-www-form-urlencoded') binary_body = body.encode('utf-8') raw_response = urllib_request.urlopen(req, binary_body) return simplejson.loads(raw_response.read())['access_token']
Gets oauth2 access token for Gitkit API using service account. Returns: string, oauth2 access token.
def available_state(self, state: State) -> Tuple[State, ...]: """ Return the state reachable from a given state. """ result = [] for gene in self.genes: result.extend(self.available_state_for_gene(gene, state)) if len(result) > 1 and state in result: result.remove(state) return tuple(result)
Return the state reachable from a given state.
def validate(self, document, schema=None, update=False, normalize=True): """ Normalizes and validates a mapping against a validation-schema of defined rules. :param document: The document to normalize. :type document: any :term:`mapping` :param schema: The validation schema. Defaults to :obj:`None`. If not provided here, the schema must have been provided at class instantiation. :type schema: any :term:`mapping` :param update: If ``True``, required fields won't be checked. :type update: :class:`bool` :param normalize: If ``True``, normalize the document before validation. :type normalize: :class:`bool` :return: ``True`` if validation succeeds, otherwise ``False``. Check the :func:`errors` property for a list of processing errors. :rtype: :class:`bool` """ self.update = update self._unrequired_by_excludes = set() self.__init_processing(document, schema) if normalize: self.__normalize_mapping(self.document, self.schema) for field in self.document: if self.ignore_none_values and self.document[field] is None: continue definitions = self.schema.get(field) if definitions is not None: self.__validate_definitions(definitions, field) else: self.__validate_unknown_fields(field) if not self.update: self.__validate_required_fields(self.document) self.error_handler.end(self) return not bool(self._errors)
Normalizes and validates a mapping against a validation-schema of defined rules. :param document: The document to normalize. :type document: any :term:`mapping` :param schema: The validation schema. Defaults to :obj:`None`. If not provided here, the schema must have been provided at class instantiation. :type schema: any :term:`mapping` :param update: If ``True``, required fields won't be checked. :type update: :class:`bool` :param normalize: If ``True``, normalize the document before validation. :type normalize: :class:`bool` :return: ``True`` if validation succeeds, otherwise ``False``. Check the :func:`errors` property for a list of processing errors. :rtype: :class:`bool`
def transition_matrix_reversible_pisym(C, return_statdist=False, **kwargs): r""" Estimates reversible transition matrix as follows: ..:math: p_{ij} = c_{ij} / c_i where c_i = sum_j c_{ij} \pi_j = \sum_j \pi_i p_{ij} x_{ij} = \pi_i p_{ij} + \pi_j p_{ji} p^{rev}_{ij} = x_{ij} / x_i where x_i = sum_j x_{ij} In words: takes the nonreversible transition matrix estimate, uses its stationary distribution to compute an equilibrium correlation matrix, symmetrizes that correlation matrix and then normalizes to the reversible transition matrix estimate. Parameters ---------- C: ndarray, shape (n,n) count matrix Returns ------- T: Estimated transition matrix """ # nonreversible estimate T_nonrev = transition_matrix_non_reversible(C) from msmtools.analysis import stationary_distribution pi = stationary_distribution(T_nonrev) # correlation matrix X = scipy.sparse.diags(pi).dot(T_nonrev) X = X.T + X # result pi_rev = np.array(X.sum(axis=1)).squeeze() T_rev = scipy.sparse.diags(1.0/pi_rev).dot(X) if return_statdist: #np.testing.assert_allclose(pi, stationary_distribution(T_rev)) #np.testing.assert_allclose(T_rev.T.dot(pi), pi) return T_rev, pi return T_rev
r""" Estimates reversible transition matrix as follows: ..:math: p_{ij} = c_{ij} / c_i where c_i = sum_j c_{ij} \pi_j = \sum_j \pi_i p_{ij} x_{ij} = \pi_i p_{ij} + \pi_j p_{ji} p^{rev}_{ij} = x_{ij} / x_i where x_i = sum_j x_{ij} In words: takes the nonreversible transition matrix estimate, uses its stationary distribution to compute an equilibrium correlation matrix, symmetrizes that correlation matrix and then normalizes to the reversible transition matrix estimate. Parameters ---------- C: ndarray, shape (n,n) count matrix Returns ------- T: Estimated transition matrix
def slice_begin(self, tensor_shape, pnum): """Begin position for the tensor slice for the given processor. Args: tensor_shape: Shape. pnum: int <= self.size. Returns: list of integers with length tensor_shape.ndims. """ tensor_layout = self.tensor_layout(tensor_shape) coordinates = pnum_to_processor_coordinates(self.shape, pnum) ret = [] for dim_size, mesh_axis in zip( tensor_shape.to_integer_list, tensor_layout.tensor_axis_to_mesh_axis): if mesh_axis is None: ret.append(0) else: ret.append( dim_size // self.shape[mesh_axis].size * coordinates[mesh_axis]) return ret
Begin position for the tensor slice for the given processor. Args: tensor_shape: Shape. pnum: int <= self.size. Returns: list of integers with length tensor_shape.ndims.
def get_sorted_hdrgo2usrgos(self, hdrgos, flat_list=None, hdrgo_prt=True, hdrgo_sort=True): """Return GO IDs sorting using go2nt's namedtuple.""" # Return user-specfied sort or default sort of header and user GO IDs sorted_hdrgos_usrgos = [] h2u_get = self.grprobj.hdrgo2usrgos.get # Sort GO group headers using GO info in go2nt hdr_go2nt = self._get_go2nt(hdrgos) if hdrgo_sort is True: hdr_go2nt = sorted(hdr_go2nt.items(), key=lambda t: self.hdrgo_sortby(t[1])) for hdrgo_id, hdrgo_nt in hdr_go2nt: if flat_list is not None: if hdrgo_prt or hdrgo_id in self.grprobj.usrgos: flat_list.append(hdrgo_nt) # Sort user GOs which are under the current GO header usrgos_unsorted = h2u_get(hdrgo_id) if usrgos_unsorted: usrgo2nt = self._get_go2nt(usrgos_unsorted) usrgont_sorted = sorted(usrgo2nt.items(), key=lambda t: self.usrgo_sortby(t[1])) usrgos_sorted, usrnts_sorted = zip(*usrgont_sorted) if flat_list is not None: flat_list.extend(usrnts_sorted) sorted_hdrgos_usrgos.append((hdrgo_id, usrgos_sorted)) else: sorted_hdrgos_usrgos.append((hdrgo_id, [])) return cx.OrderedDict(sorted_hdrgos_usrgos)
Return GO IDs sorting using go2nt's namedtuple.
def cmd_ping(ip, interface, count, timeout, wait, verbose): """The classic ping tool that send ICMP echo requests. \b # habu.ping 8.8.8.8 IP / ICMP 8.8.8.8 > 192.168.0.5 echo-reply 0 / Padding IP / ICMP 8.8.8.8 > 192.168.0.5 echo-reply 0 / Padding IP / ICMP 8.8.8.8 > 192.168.0.5 echo-reply 0 / Padding IP / ICMP 8.8.8.8 > 192.168.0.5 echo-reply 0 / Padding """ if interface: conf.iface = interface conf.verb = False conf.L3socket=L3RawSocket layer3 = IP() layer3.dst = ip layer3.tos = 0 layer3.id = 1 layer3.flags = 0 layer3.frag = 0 layer3.ttl = 64 layer3.proto = 1 # icmp layer4 = ICMP() layer4.type = 8 # echo-request layer4.code = 0 layer4.id = 0 layer4.seq = 0 pkt = layer3 / layer4 counter = 0 while True: ans = sr1(pkt, timeout=timeout) if ans: if verbose: ans.show() else: print(ans.summary()) del(ans) else: print('Timeout') counter += 1 if count != 0 and counter == count: break sleep(wait) return True
The classic ping tool that send ICMP echo requests. \b # habu.ping 8.8.8.8 IP / ICMP 8.8.8.8 > 192.168.0.5 echo-reply 0 / Padding IP / ICMP 8.8.8.8 > 192.168.0.5 echo-reply 0 / Padding IP / ICMP 8.8.8.8 > 192.168.0.5 echo-reply 0 / Padding IP / ICMP 8.8.8.8 > 192.168.0.5 echo-reply 0 / Padding
def _add_explicit_includes(lines, dependencies=None, extralinks=None): """Adds any relevant libraries that need to be explicitly included according to the fortpy configuration file. Libraries are appended to the specified collection of lines. Returns true if relevant libraries were added. """ from fortpy import config import sys from os import path includes = sys.modules["config"].includes linklibs = False if extralinks is not None and len(extralinks) > 0: for i, link in enumerate(extralinks): lines.append("LBD{0:d} = {1}".format(i, link)) lines.append("") if len(includes) > 0: lines.append("LIBS\t\t= \\") for library in includes: addlib = False if "modules" in library: #We need to loop over the modules specified for the library and see #if any of them are in our list of modules. for libmod in library["modules"]: if dependencies is None or libmod.lower() in dependencies: addlib = True break else: addlib = True if addlib: linklibs = True lines.append("\t\t{} \\".format(library["path"])) #These links specify explicit libraries to include in the final compilation. if extralinks is not None: for i in range(len(extralinks)): if path.isfile(extralinks[i]): lines.append("\t\t$(LBD{0:d}) \\".format(i)) return linklibs or (extralinks is not None and len(extralinks) > 0)
Adds any relevant libraries that need to be explicitly included according to the fortpy configuration file. Libraries are appended to the specified collection of lines. Returns true if relevant libraries were added.
def parse_error(self, tup_tree): """ Parse the tuple for an ERROR element: :: <!ELEMENT ERROR (INSTANCE*)> <!ATTLIST ERROR CODE CDATA #REQUIRED DESCRIPTION CDATA #IMPLIED> """ self.check_node(tup_tree, 'ERROR', ('CODE',), ('DESCRIPTION',), ('INSTANCE',)) # self.list_of_various() has the same effect as self.list_of_same() # when used with a single allowed child element, but is a little # faster. instance_list = self.list_of_various(tup_tree, ('INSTANCE',)) return (name(tup_tree), attrs(tup_tree), instance_list)
Parse the tuple for an ERROR element: :: <!ELEMENT ERROR (INSTANCE*)> <!ATTLIST ERROR CODE CDATA #REQUIRED DESCRIPTION CDATA #IMPLIED>
def extrude( self, input_entity, translation_axis=None, rotation_axis=None, point_on_axis=None, angle=None, num_layers=None, recombine=False, ): """Extrusion (translation + rotation) of any entity along a given translation_axis, around a given rotation_axis, about a given angle. If one of the entities is not provided, this method will produce only translation or rotation. """ self._EXTRUDE_ID += 1 if _is_string(input_entity): entity = Dummy(input_entity) elif isinstance(input_entity, PointBase): entity = Dummy("Point{{{}}}".format(input_entity.id)) elif isinstance(input_entity, SurfaceBase): entity = Dummy("Surface{{{}}}".format(input_entity.id)) elif hasattr(input_entity, "surface"): entity = Dummy("Surface{{{}}}".format(input_entity.surface.id)) else: assert isinstance(input_entity, LineBase), "Illegal extrude entity." entity = Dummy("Line{{{}}}".format(input_entity.id)) extrusion_string = "" # out[] = Extrude{0,1,0}{ Line{1}; }; name = "ex{}".format(self._EXTRUDE_ID) if translation_axis is not None: if rotation_axis is not None: extrusion_string += "{}[] = Extrude{{{{{}}}, {{{}}}, {{{}}}, {}}}{{{};".format( name, ",".join(repr(x) for x in translation_axis), ",".join(repr(x) for x in rotation_axis), ",".join(repr(x) for x in point_on_axis), angle, entity.id, ) else: # Only translation extrusion_string += "{}[] = Extrude {{{}}} {{{};".format( name, ",".join(repr(x) for x in translation_axis), entity.id ) else: assert ( rotation_axis is not None ), "Specify at least translation or rotation." # Only rotation extrusion_string += "{}[] = Extrude{{{{{}}}, {{{}}}, {}}}{{{};".format( name, ",".join(repr(x) for x in rotation_axis), ",".join(repr(x) for x in point_on_axis), angle, entity.id, ) if num_layers is not None: extrusion_string += " Layers{{{}}}; {}".format( num_layers, "Recombine;" if recombine else "" ) # close command extrusion_string += "};" self._GMSH_CODE.append(extrusion_string) # From <https://www.manpagez.com/info/gmsh/gmsh-2.4.0/gmsh_66.php>: # # > In this last extrusion command we retrieved the volume number # > programatically by saving the output of the command into a # > list. This list will contain the "top" of the extruded surface (in # > out[0]) as well as the newly created volume (in out[1]). # top = "{}[0]".format(name) extruded = "{}[1]".format(name) if isinstance(input_entity, LineBase): top = LineBase(top) # A surface extruded from a single line has always 4 edges extruded = SurfaceBase(extruded, 4) elif isinstance(input_entity, SurfaceBase): top = SurfaceBase(top, input_entity.num_edges) extruded = VolumeBase(extruded) elif isinstance(input_entity, PointBase): top = PointBase(top) extruded = LineBase(extruded) else: top = Dummy(top) extruded = Dummy(extruded) lat = [] # lateral surfaces can be deduced only if we start from a SurfaceBase if isinstance(input_entity, SurfaceBase): # out[0]` is the surface, out[1] the top, and everything after that # the sides, cf. # <https://gmsh.info/doc/texinfo/gmsh.html#Extrusions>. Each # lateral surface has 4 edges: the one from input_entity, the one # from top, and the two lines (or splines) connecting their extreme # points. lat = [ SurfaceBase("{}[{}]".format(name, i + 2), 4) for i in range(input_entity.num_edges) ] return top, extruded, lat
Extrusion (translation + rotation) of any entity along a given translation_axis, around a given rotation_axis, about a given angle. If one of the entities is not provided, this method will produce only translation or rotation.
def setup_address(self, name, address=default, transact={}): """ Set up the name to point to the supplied address. The sender of the transaction must own the name, or its parent name. Example: If the caller owns ``parentname.eth`` with no subdomains and calls this method with ``sub.parentname.eth``, then ``sub`` will be created as part of this call. :param str name: ENS name to set up :param str address: name will point to this address, in checksum format. If ``None``, erase the record. If not specified, name will point to the owner's address. :param dict transact: the transaction configuration, like in :meth:`~web3.eth.Eth.sendTransaction` :raises InvalidName: if ``name`` has invalid syntax :raises UnauthorizedError: if ``'from'`` in `transact` does not own `name` """ owner = self.setup_owner(name, transact=transact) self._assert_control(owner, name) if is_none_or_zero_address(address): address = None elif address is default: address = owner elif is_binary_address(address): address = to_checksum_address(address) elif not is_checksum_address(address): raise ValueError("You must supply the address in checksum format") if self.address(name) == address: return None if address is None: address = EMPTY_ADDR_HEX transact['from'] = owner resolver = self._set_resolver(name, transact=transact) return resolver.functions.setAddr(raw_name_to_hash(name), address).transact(transact)
Set up the name to point to the supplied address. The sender of the transaction must own the name, or its parent name. Example: If the caller owns ``parentname.eth`` with no subdomains and calls this method with ``sub.parentname.eth``, then ``sub`` will be created as part of this call. :param str name: ENS name to set up :param str address: name will point to this address, in checksum format. If ``None``, erase the record. If not specified, name will point to the owner's address. :param dict transact: the transaction configuration, like in :meth:`~web3.eth.Eth.sendTransaction` :raises InvalidName: if ``name`` has invalid syntax :raises UnauthorizedError: if ``'from'`` in `transact` does not own `name`
def create_model( # noqa: C901 (ignore complexity) model_name: str, *, __config__: Type[BaseConfig] = None, __base__: Type[BaseModel] = None, __module__: Optional[str] = None, __validators__: Dict[str, classmethod] = None, **field_definitions: Any, ) -> BaseModel: """ Dynamically create a model. :param model_name: name of the created model :param __config__: config class to use for the new model :param __base__: base class for the new model to inherit from :param __validators__: a dict of method names and @validator class methods :param **field_definitions: fields of the model (or extra fields if a base is supplied) in the format `<name>=(<type>, <default default>)` or `<name>=<default value> eg. `foobar=(str, ...)` or `foobar=123` """ if __base__: if __config__ is not None: raise ConfigError('to avoid confusion __config__ and __base__ cannot be used together') else: __base__ = BaseModel fields = {} annotations = {} for f_name, f_def in field_definitions.items(): if f_name.startswith('_'): warnings.warn(f'fields may not start with an underscore, ignoring "{f_name}"', RuntimeWarning) if isinstance(f_def, tuple): try: f_annotation, f_value = f_def except ValueError as e: raise ConfigError( f'field definitions should either be a tuple of (<type>, <default>) or just a ' f'default value, unfortunately this means tuples as ' f'default values are not allowed' ) from e else: f_annotation, f_value = None, f_def if f_annotation: annotations[f_name] = f_annotation fields[f_name] = f_value namespace: 'DictStrAny' = {'__annotations__': annotations, '__module__': __module__} if __validators__: namespace.update(__validators__) namespace.update(fields) if __config__: namespace['Config'] = inherit_config(__config__, BaseConfig) return type(model_name, (__base__,), namespace)
Dynamically create a model. :param model_name: name of the created model :param __config__: config class to use for the new model :param __base__: base class for the new model to inherit from :param __validators__: a dict of method names and @validator class methods :param **field_definitions: fields of the model (or extra fields if a base is supplied) in the format `<name>=(<type>, <default default>)` or `<name>=<default value> eg. `foobar=(str, ...)` or `foobar=123`
def send_response(self, transaction): """ Finalize to add the client to the list of observer. :type transaction: Transaction :param transaction: the transaction that owns the response :return: the transaction unmodified """ host, port = transaction.request.source key_token = hash(str(host) + str(port) + str(transaction.request.token)) if key_token in self._relations: if transaction.response.code == defines.Codes.CONTENT.number: if transaction.resource is not None and transaction.resource.observable: transaction.response.observe = transaction.resource.observe_count self._relations[key_token].allowed = True self._relations[key_token].transaction = transaction self._relations[key_token].timestamp = time.time() else: del self._relations[key_token] elif transaction.response.code >= defines.Codes.ERROR_LOWER_BOUND: del self._relations[key_token] return transaction
Finalize to add the client to the list of observer. :type transaction: Transaction :param transaction: the transaction that owns the response :return: the transaction unmodified
def get(self, alias: str): """ Retrieve cache identified by alias. Will return always the same instance If the cache was not instantiated yet, it will do it lazily the first time this is called. :param alias: str cache alias :return: cache instance """ try: return self._caches[alias] except KeyError: pass config = self.get_alias_config(alias) cache = _create_cache(**deepcopy(config)) self._caches[alias] = cache return cache
Retrieve cache identified by alias. Will return always the same instance If the cache was not instantiated yet, it will do it lazily the first time this is called. :param alias: str cache alias :return: cache instance
def get_field_def( schema: GraphQLSchema, parent_type: GraphQLObjectType, field_name: str ) -> GraphQLField: """Get field definition. This method looks up the field on the given type definition. It has special casing for the two introspection fields, `__schema` and `__typename`. `__typename` is special because it can always be queried as a field, even in situations where no other fields are allowed, like on a Union. `__schema` could get automatically added to the query type, but that would require mutating type definitions, which would cause issues. """ if field_name == "__schema" and schema.query_type == parent_type: return SchemaMetaFieldDef elif field_name == "__type" and schema.query_type == parent_type: return TypeMetaFieldDef elif field_name == "__typename": return TypeNameMetaFieldDef return parent_type.fields.get(field_name)
Get field definition. This method looks up the field on the given type definition. It has special casing for the two introspection fields, `__schema` and `__typename`. `__typename` is special because it can always be queried as a field, even in situations where no other fields are allowed, like on a Union. `__schema` could get automatically added to the query type, but that would require mutating type definitions, which would cause issues.
def intersect_3d(p1, p2): """Find the closes point for a given set of lines in 3D. Parameters ---------- p1 : (M, N) array_like Starting points p2 : (M, N) array_like End points. Returns ------- x : (N,) ndarray Least-squares solution - the closest point of the intersections. Raises ------ numpy.linalg.LinAlgError If computation does not converge. """ v = p2 - p1 normed_v = unit_vector(v) nx = normed_v[:, 0] ny = normed_v[:, 1] nz = normed_v[:, 2] xx = np.sum(nx**2 - 1) yy = np.sum(ny**2 - 1) zz = np.sum(nz**2 - 1) xy = np.sum(nx * ny) xz = np.sum(nx * nz) yz = np.sum(ny * nz) M = np.array([(xx, xy, xz), (xy, yy, yz), (xz, yz, zz)]) x = np.sum( p1[:, 0] * (nx**2 - 1) + p1[:, 1] * (nx * ny) + p1[:, 2] * (nx * nz) ) y = np.sum( p1[:, 0] * (nx * ny) + p1[:, 1] * (ny * ny - 1) + p1[:, 2] * (ny * nz) ) z = np.sum( p1[:, 0] * (nx * nz) + p1[:, 1] * (ny * nz) + p1[:, 2] * (nz**2 - 1) ) return np.linalg.lstsq(M, np.array((x, y, z)), rcond=None)[0]
Find the closes point for a given set of lines in 3D. Parameters ---------- p1 : (M, N) array_like Starting points p2 : (M, N) array_like End points. Returns ------- x : (N,) ndarray Least-squares solution - the closest point of the intersections. Raises ------ numpy.linalg.LinAlgError If computation does not converge.
def pred_eq(self, n, val): """ Test if a node set with setint or setstr equal a certain value example:: R = [ __scope__:n ['a' #setint(n, 12) | 'b' #setint(n, 14)] C [#eq(n, 12) D] ] """ v1 = n.value v2 = val if hasattr(val, 'value'): v2 = val.value if isinstance(v1, int) and not isinstance(v2, int): return v1 == int(v2) return v1 == v2
Test if a node set with setint or setstr equal a certain value example:: R = [ __scope__:n ['a' #setint(n, 12) | 'b' #setint(n, 14)] C [#eq(n, 12) D] ]
def from_body(cls, body): """Create a tunnelling request from a given body of a KNX/IP frame.""" # TODO: Check length request = cls() request.channel = body[1] request.seq = body[2] request.cemi = body[4:] return request
Create a tunnelling request from a given body of a KNX/IP frame.
def insert_code(filename, code, save=True, marker='# ATX CODE END'): """ Auto append code """ content = '' found = False for line in open(filename, 'rb'): if not found and line.strip() == marker: found = True cnt = line.find(marker) content += line[:cnt] + code content += line if not found: if not content.endswith('\n'): content += '\n' content += code + marker + '\n' if save: with open(filename, 'wb') as f: f.write(content) return content
Auto append code
def validate(identifier): '''Validate a source given its identifier''' source = actions.validate_source(identifier) log.info('Source %s (%s) has been validated', source.slug, str(source.id))
Validate a source given its identifier
def get_repo(self, repo: str, branch: str, *, depth: Optional[int]=1, reference: Optional[Path]=None ) -> Repo: """ Returns a :class:`Repo <git.repo.base.Repo>` instance for the branch. See :meth:`run` for arguments descriptions. """ git_repo, _ = self.get_files(repo, branch, depth=depth, reference=reference) return git_repo
Returns a :class:`Repo <git.repo.base.Repo>` instance for the branch. See :meth:`run` for arguments descriptions.
def sleep(seconds=0): """Yield control to another eligible coroutine until at least *seconds* have elapsed. *seconds* may be specified as an integer, or a float if fractional seconds are desired. """ loop = evergreen.current.loop current = Fiber.current() assert loop.task is not current timer = loop.call_later(seconds, current.switch) try: loop.switch() finally: timer.cancel()
Yield control to another eligible coroutine until at least *seconds* have elapsed. *seconds* may be specified as an integer, or a float if fractional seconds are desired.
def stopped(name=None, containers=None, shutdown_timeout=None, unpause=False, error_on_absent=True, **kwargs): ''' Ensure that a container (or containers) is stopped name Name or ID of the container containers Run this state on more than one container at a time. The following two examples accomplish the same thing: .. code-block:: yaml stopped_containers: docker_container.stopped: - names: - foo - bar - baz .. code-block:: yaml stopped_containers: docker_container.stopped: - containers: - foo - bar - baz However, the second example will be a bit quicker since Salt will stop all specified containers in a single run, rather than executing the state separately on each image (as it would in the first example). shutdown_timeout Timeout for graceful shutdown of the container. If this timeout is exceeded, the container will be killed. If this value is not passed, then the container's configured ``stop_timeout`` will be observed. If ``stop_timeout`` was also unset on the container, then a timeout of 10 seconds will be used. unpause : False Set to ``True`` to unpause any paused containers before stopping. If unset, then an error will be raised for any container that was paused. error_on_absent : True By default, this state will return an error if any of the specified containers are absent. Set this to ``False`` to suppress that error. ''' ret = {'name': name, 'changes': {}, 'result': False, 'comment': ''} if not name and not containers: ret['comment'] = 'One of \'name\' and \'containers\' must be provided' return ret if containers is not None: if not isinstance(containers, list): ret['comment'] = 'containers must be a list' return ret targets = [] for target in containers: if not isinstance(target, six.string_types): target = six.text_type(target) targets.append(target) elif name: if not isinstance(name, six.string_types): targets = [six.text_type(name)] else: targets = [name] containers = {} for target in targets: try: c_state = __salt__['docker.state'](target) except CommandExecutionError: containers.setdefault('absent', []).append(target) else: containers.setdefault(c_state, []).append(target) errors = [] if error_on_absent and 'absent' in containers: errors.append( 'The following container(s) are absent: {0}'.format( ', '.join(containers['absent']) ) ) if not unpause and 'paused' in containers: ret['result'] = False errors.append( 'The following container(s) are paused: {0}'.format( ', '.join(containers['paused']) ) ) if errors: ret['result'] = False ret['comment'] = '. '.join(errors) return ret to_stop = containers.get('running', []) + containers.get('paused', []) if not to_stop: ret['result'] = True if len(targets) == 1: ret['comment'] = 'Container \'{0}\' is '.format(targets[0]) else: ret['comment'] = 'All specified containers are ' if 'absent' in containers: ret['comment'] += 'absent or ' ret['comment'] += 'not running' return ret if __opts__['test']: ret['result'] = None ret['comment'] = ( 'The following container(s) will be stopped: {0}' .format(', '.join(to_stop)) ) return ret stop_errors = [] for target in to_stop: stop_kwargs = {'unpause': unpause} if shutdown_timeout: stop_kwargs['timeout'] = shutdown_timeout changes = __salt__['docker.stop'](target, **stop_kwargs) if changes['result'] is True: ret['changes'][target] = changes else: if 'comment' in changes: stop_errors.append(changes['comment']) else: stop_errors.append( 'Failed to stop container \'{0}\''.format(target) ) if stop_errors: ret['comment'] = '; '.join(stop_errors) return ret ret['result'] = True ret['comment'] = ( 'The following container(s) were stopped: {0}' .format(', '.join(to_stop)) ) return ret
Ensure that a container (or containers) is stopped name Name or ID of the container containers Run this state on more than one container at a time. The following two examples accomplish the same thing: .. code-block:: yaml stopped_containers: docker_container.stopped: - names: - foo - bar - baz .. code-block:: yaml stopped_containers: docker_container.stopped: - containers: - foo - bar - baz However, the second example will be a bit quicker since Salt will stop all specified containers in a single run, rather than executing the state separately on each image (as it would in the first example). shutdown_timeout Timeout for graceful shutdown of the container. If this timeout is exceeded, the container will be killed. If this value is not passed, then the container's configured ``stop_timeout`` will be observed. If ``stop_timeout`` was also unset on the container, then a timeout of 10 seconds will be used. unpause : False Set to ``True`` to unpause any paused containers before stopping. If unset, then an error will be raised for any container that was paused. error_on_absent : True By default, this state will return an error if any of the specified containers are absent. Set this to ``False`` to suppress that error.
def from_start_and_end(cls, start, end, sequence, phos_3_prime=False): """Creates a DNA duplex from a start and end point. Parameters ---------- start: [float, float, float] Start of the build axis. end: [float, float, float] End of build axis. sequence: str Nucleotide sequence. phos_3_prime: bool, optional If false the 5' and the 3' phosphor will be omitted.""" strand1 = NucleicAcidStrand.from_start_and_end( start, end, sequence, phos_3_prime=phos_3_prime) duplex = cls(strand1) return duplex
Creates a DNA duplex from a start and end point. Parameters ---------- start: [float, float, float] Start of the build axis. end: [float, float, float] End of build axis. sequence: str Nucleotide sequence. phos_3_prime: bool, optional If false the 5' and the 3' phosphor will be omitted.
def deserialize(self, buffer=bytes(), index=Index(), **options): """ De-serializes the `Field` from the byte *buffer* starting at the begin of the *buffer* or with the given *index* by unpacking the bytes to the :attr:`value` of the `Field` in accordance with the decoding *byte order* for the de-serialization and the decoding :attr:`byte_order` of the `Field`. The specific decoding :attr:`byte_order` of the `Field` overrules the decoding *byte order* for the de-serialization. Returns the :class:`Index` of the *buffer* after the `Field`. Optional the de-serialization of the referenced :attr:`~Pointer.data` object of a :class:`Pointer` field can be enabled. :param bytes buffer: byte stream. :param Index index: current read :class:`Index` within the *buffer*. :keyword byte_order: decoding byte order for the de-serialization. :type byte_order: :class:`Byteorder`, :class:`str` :keyword bool nested: if ``True`` a :class:`Pointer` field de-serialize its referenced :attr:`~Pointer.data` object as well (chained method call). Each :class:`Pointer` field uses for the de-serialization of its referenced :attr:`~Pointer.data` object its own :attr:`~Pointer.bytestream`. """ self.index = index self._value = self.unpack(buffer, index, **options) return self.index_field(index)
De-serializes the `Field` from the byte *buffer* starting at the begin of the *buffer* or with the given *index* by unpacking the bytes to the :attr:`value` of the `Field` in accordance with the decoding *byte order* for the de-serialization and the decoding :attr:`byte_order` of the `Field`. The specific decoding :attr:`byte_order` of the `Field` overrules the decoding *byte order* for the de-serialization. Returns the :class:`Index` of the *buffer* after the `Field`. Optional the de-serialization of the referenced :attr:`~Pointer.data` object of a :class:`Pointer` field can be enabled. :param bytes buffer: byte stream. :param Index index: current read :class:`Index` within the *buffer*. :keyword byte_order: decoding byte order for the de-serialization. :type byte_order: :class:`Byteorder`, :class:`str` :keyword bool nested: if ``True`` a :class:`Pointer` field de-serialize its referenced :attr:`~Pointer.data` object as well (chained method call). Each :class:`Pointer` field uses for the de-serialization of its referenced :attr:`~Pointer.data` object its own :attr:`~Pointer.bytestream`.
def get_encoded_word(value): """ encoded-word = "=?" charset "?" encoding "?" encoded-text "?=" """ ew = EncodedWord() if not value.startswith('=?'): raise errors.HeaderParseError( "expected encoded word but found {}".format(value)) _3to2list1 = list(value[2:].split('?=', 1)) tok, remainder, = _3to2list1[:1] + [_3to2list1[1:]] if tok == value[2:]: raise errors.HeaderParseError( "expected encoded word but found {}".format(value)) remstr = ''.join(remainder) if remstr[:2].isdigit(): _3to2list3 = list(remstr.split('?=', 1)) rest, remainder, = _3to2list3[:1] + [_3to2list3[1:]] tok = tok + '?=' + rest if len(tok.split()) > 1: ew.defects.append(errors.InvalidHeaderDefect( "whitespace inside encoded word")) ew.cte = value value = ''.join(remainder) try: text, charset, lang, defects = _ew.decode('=?' + tok + '?=') except ValueError: raise errors.HeaderParseError( "encoded word format invalid: '{}'".format(ew.cte)) ew.charset = charset ew.lang = lang ew.defects.extend(defects) while text: if text[0] in WSP: token, text = get_fws(text) ew.append(token) continue _3to2list5 = list(_wsp_splitter(text, 1)) chars, remainder, = _3to2list5[:1] + [_3to2list5[1:]] vtext = ValueTerminal(chars, 'vtext') _validate_xtext(vtext) ew.append(vtext) text = ''.join(remainder) return ew, value
encoded-word = "=?" charset "?" encoding "?" encoded-text "?="
def _find_errors_param(self): """ Searches for the parameter on the estimator that contains the array of errors that was used to determine the optimal alpha. If it cannot find the parameter then a YellowbrickValueError is raised. """ # NOTE: The order of the search is very important! if hasattr(self.estimator, 'mse_path_'): return self.estimator.mse_path_.mean(1) if hasattr(self.estimator, 'cv_values_'): return self.estimator.cv_values_.mean(0) raise YellowbrickValueError( "could not find errors param on {} estimator".format( self.estimator.__class__.__name__ ) )
Searches for the parameter on the estimator that contains the array of errors that was used to determine the optimal alpha. If it cannot find the parameter then a YellowbrickValueError is raised.
def filter_on_wire_representation(ava, acs, required=None, optional=None): """ :param ava: A dictionary with attributes and values :param acs: List of tuples (Attribute Converter name, Attribute Converter instance) :param required: A list of saml.Attributes :param optional: A list of saml.Attributes :return: Dictionary of expected/wanted attributes and values """ acsdic = dict([(ac.name_format, ac) for ac in acs]) if required is None: required = [] if optional is None: optional = [] res = {} for attr, val in ava.items(): done = False for req in required: try: _name = acsdic[req.name_format]._to[attr] if _name == req.name: res[attr] = val done = True except KeyError: pass if done: continue for opt in optional: try: _name = acsdic[opt.name_format]._to[attr] if _name == opt.name: res[attr] = val break except KeyError: pass return res
:param ava: A dictionary with attributes and values :param acs: List of tuples (Attribute Converter name, Attribute Converter instance) :param required: A list of saml.Attributes :param optional: A list of saml.Attributes :return: Dictionary of expected/wanted attributes and values
def load_backend(build_configuration, backend_package): """Installs the given backend package into the build configuration. :param build_configuration the :class:``pants.build_graph.build_configuration.BuildConfiguration`` to install the backend plugin into. :param string backend_package: the package name containing the backend plugin register module that provides the plugin entrypoints. :raises: :class:``pants.base.exceptions.BuildConfigurationError`` if there is a problem loading the build configuration.""" backend_module = backend_package + '.register' try: module = importlib.import_module(backend_module) except ImportError as e: traceback.print_exc() raise BackendConfigurationError('Failed to load the {backend} backend: {error}' .format(backend=backend_module, error=e)) def invoke_entrypoint(name): entrypoint = getattr(module, name, lambda: None) try: return entrypoint() except TypeError as e: traceback.print_exc() raise BackendConfigurationError( 'Entrypoint {entrypoint} in {backend} must be a zero-arg callable: {error}' .format(entrypoint=name, backend=backend_module, error=e)) build_file_aliases = invoke_entrypoint('build_file_aliases') if build_file_aliases: build_configuration.register_aliases(build_file_aliases) subsystems = invoke_entrypoint('global_subsystems') if subsystems: build_configuration.register_optionables(subsystems) rules = invoke_entrypoint('rules') if rules: build_configuration.register_rules(rules) invoke_entrypoint('register_goals')
Installs the given backend package into the build configuration. :param build_configuration the :class:``pants.build_graph.build_configuration.BuildConfiguration`` to install the backend plugin into. :param string backend_package: the package name containing the backend plugin register module that provides the plugin entrypoints. :raises: :class:``pants.base.exceptions.BuildConfigurationError`` if there is a problem loading the build configuration.
def _new_convolution(self, use_bias): """Returns new convolution. Args: use_bias: Use bias in convolutions. If False, clean_dict removes bias entries from initializers, partitioners and regularizers passed to the constructor of the convolution. """ def clean_dict(input_dict): if input_dict and not use_bias: cleaned_dict = input_dict.copy() cleaned_dict.pop("b", None) return cleaned_dict return input_dict return self._conv_class( output_channels=4*self._output_channels, kernel_shape=self._kernel_shape, stride=self._stride, rate=self._rate, padding=self._padding, use_bias=use_bias, initializers=clean_dict(self._initializers), partitioners=clean_dict(self._partitioners), regularizers=clean_dict(self._regularizers), name="conv")
Returns new convolution. Args: use_bias: Use bias in convolutions. If False, clean_dict removes bias entries from initializers, partitioners and regularizers passed to the constructor of the convolution.
def _generate_union_tag_vars_funcs(self, union): """Emits the getter methods for retrieving tag-specific state. Setters throw an error in the event an associated tag state variable is accessed without the correct tag state.""" for field in union.all_fields: if not is_void_type(field.data_type): enum_field_name = fmt_enum_name(field.name, union) with self.block_func( func=fmt_camel(field.name), args=[], return_type=fmt_type(field.data_type)): with self.block( 'if (![self is{}])'.format( fmt_camel_upper(field.name)), delim=('{', '}')): error_msg = 'Invalid tag: required {}, but was %@.'.format( enum_field_name) throw_exc = ( '[NSException raise:@"IllegalStateException" ' 'format:@"{}", [self tagName]];') self.emit(throw_exc.format(error_msg)) self.emit('return _{};'.format(fmt_var(field.name))) self.emit()
Emits the getter methods for retrieving tag-specific state. Setters throw an error in the event an associated tag state variable is accessed without the correct tag state.
def get_url(request, application, roles, label=None): """ Retrieve a link that will work for the current user. """ args = [] if label is not None: args.append(label) # don't use secret_token unless we have to if 'is_admin' in roles: # Administrators can access anything without secrets require_secret = False elif 'is_applicant' not in roles: # we never give secrets to anybody but the applicant require_secret = False elif not request.user.is_authenticated: # If applicant is not logged in, we redirect them to secret URL require_secret = True elif request.user != application.applicant: # If logged in as different person, we redirect them to secret # URL. This could happen if the application was open with a different # email address, and the applicant is logged in when accessing it. require_secret = True else: # otherwise redirect them to URL that requires correct login. require_secret = False # return required url if not require_secret: url = reverse( 'kg_application_detail', args=[application.pk, application.state] + args) else: url = reverse( 'kg_application_unauthenticated', args=[application.secret_token, application.state] + args) return url
Retrieve a link that will work for the current user.