code
stringlengths
75
104k
docstring
stringlengths
1
46.9k
def cli_main(): """Render mustache templates using json files""" import argparse import os def is_file_or_pipe(arg): if not os.path.exists(arg) or os.path.isdir(arg): parser.error('The file {0} does not exist!'.format(arg)) else: return arg def is_dir(arg): if not os.path.isdir(arg): parser.error('The directory {0} does not exist!'.format(arg)) else: return arg parser = argparse.ArgumentParser(description=__doc__) parser.add_argument('-v', '--version', action='version', version=version) parser.add_argument('template', help='The mustache file', type=is_file_or_pipe) parser.add_argument('-d', '--data', dest='data', help='The json data file', type=is_file_or_pipe, default={}) parser.add_argument('-p', '--path', dest='partials_path', help='The directory where your partials reside', type=is_dir, default='.') parser.add_argument('-e', '--ext', dest='partials_ext', help='The extension for your mustache\ partials, \'mustache\' by default', default='mustache') parser.add_argument('-l', '--left-delimiter', dest='def_ldel', help='The default left delimiter, "{{" by default.', default='{{') parser.add_argument('-r', '--right-delimiter', dest='def_rdel', help='The default right delimiter, "}}" by default.', default='}}') args = vars(parser.parse_args()) try: sys.stdout.write(main(**args)) sys.stdout.flush() except SyntaxError as e: print('Chevron: syntax error') print(' ' + '\n '.join(e.args[0].split('\n'))) exit(1)
Render mustache templates using json files
def checkin_boardingpass(self, code, passenger_name, seat_class, etkt_bnr, seat='', gate='', boarding_time=None, is_cancel=False, qrcode_data=None, card_id=None): """ 飞机票接口 """ data = { 'code': code, 'passenger_name': passenger_name, 'class': seat_class, 'etkt_bnr': etkt_bnr, 'seat': seat, 'gate': gate, 'is_cancel': is_cancel } if boarding_time: data['boarding_time'] = boarding_time if qrcode_data: data['qrcode_data'] = qrcode_data if card_id: data['card_id'] = card_id return self._post( 'card/boardingpass/checkin', data=data )
飞机票接口
def _import_bin(filename): """Read a .bin file generated by the IRIS Instruments Syscal Pro System Parameters ---------- filename : string Path to input filename Returns ------- metadata : dict General information on the measurement df : :py:class:`pandas.DataFrame` dataframe containing all measurement data """ fid = open(filename, 'rb') def fget(fid, fmt, tsize): buffer = fid.read(tsize) result_raw = struct.unpack(fmt, buffer) if len(result_raw) == 1: return result_raw[0] else: return result_raw # determine overall size fid.seek(0, 2) total_size = fid.tell() # print('total size', total_size) # start from the beginning fid.seek(0) # read version buffer = fid.read(4) version = struct.unpack('I', buffer) # print('version', version) buffer = fid.read(1) typedesyscal = struct.unpack('c', buffer)[0] syscal_type = int.from_bytes(typedesyscal, 'big') # print('Syscal type: {}'.format(syscal_type)) # comment buffer = fid.read(1024) comment_raw = struct.iter_unpack('c', buffer) comment = ''.join([x[0].decode('utf-8') for x in comment_raw]) # print('comment', comment) metadata = { 'version': version, 'syscal_type': syscal_type, 'comment': comment, } measurements = [] # for each measurement counter = 0 while(fid.tell() < total_size): # print('COUNTER', counter) buffer = fid.read(2) array_type = struct.unpack('h', buffer) array_type # print(array_type) # not used moretmeasure = fget(fid, 'h', 2) moretmeasure # print('moretmeasure', moretmeasure) # measurement time [ms] mtime = fget(fid, 'f', 4) # print('measurement time', mtime) # delay before IP measurements start [ms] mdelay = fget(fid, 'f', 4) # print('mdelay', mdelay) TypeCpXyz = fget(fid, 'h', 2) # our file format documentation always assumes this value to be == 1 assert TypeCpXyz == 1 # print('TypeCpXyz', TypeCpXyz) # ignore fget(fid, 'h', 2) # positions: a b m n [m] xpos = fget(fid, '4f', 16) # print('xpos', xpos) ypos = fget(fid, '4f', 16) # print('ypos', ypos) zpos = fget(fid, '4f', 16) # print('zpos', zpos) # self-potential [mV] sp = fget(fid, 'f', 4) # print('sp', sp) # measured voltage at voltage electrodes m and n [mV] vp = fget(fid, 'f', 4) # print('vp', vp) Iab = fget(fid, 'f', 4) # print('iab', Iab) rho = fget(fid, 'f', 4) # print('rho', rho) m = fget(fid, 'f', 4) # print('m', m) # standard deviation q = fget(fid, 'f', 4) # print('q', q) # timing windows Tm = fget(fid, '20f', 20 * 4) Tm = np.array(Tm) # print('Tm', Tm) # chargeabilities Mx = fget(fid, '20f', 20 * 4) Mx = np.array(Mx) # print('Mx', Mx) # this is 4 bytes used to store information on the measured channel # Channel + NbChannel buffer = fid.read(1) buffer_bin = bin(ord(buffer))[2:].rjust(8, '0') # print(buffer_bin) channel = int(buffer_bin[4:], 2) channelnb = int(buffer_bin[0:4], 2) # print('ChannelNB:', channelnb) # print('Channel:', channel) # 4 binaries + unused buffer = fid.read(1) buffer_bin = bin(ord(buffer))[2:].rjust(8, '0') # print(buffer_bin) overload = bool(int(buffer_bin[4])) channel_valid = bool(int(buffer_bin[5])) channel_sync = bool(int(buffer_bin[6])) gap_filler = bool(int(buffer_bin[7])) # print(overload, channel_valid, channel_sync, gap_filler) measurement_num = fget(fid, 'H', 2) # print('measurement_num', measurement_num) filename = fget(fid, '12s', 12) # print('filename', filename) latitude = fget(fid, 'f', 4) # print('lat', latitude) longitude = fget(fid, 'f', 4) # print('long', longitude) # number of stacks NbCren = fget(fid, 'f', 4) # print('Stacks', NbCren) # RS check RsChk = fget(fid, 'f', 4) # print('RsChk', RsChk) # absolute applied voltage vab = fget(fid, 'f', 4) # print('Vab', vab) # tx battery voltage [V] batTX = fget(fid, 'f', 4) # print('batTX', batTX) # rx battery voltage [V] batRX = fget(fid, 'f', 4) # print('batRX', batRX) temperature = fget(fid, 'f', 4) # print('Temp.', temperature) # TODO: date and time not analyzed b = struct.unpack('2f', fid.read(2 * 4)) # print('b', b) b measurements.append({ 'version': version, 'mtime': mtime, 'x_a': xpos[0], 'x_b': xpos[1], 'x_m': xpos[2], 'x_n': xpos[3], 'y_a': ypos[0], 'y_b': ypos[1], 'y_m': ypos[2], 'y_n': ypos[3], 'z_a': zpos[0], 'z_b': zpos[1], 'z_m': zpos[2], 'z_n': zpos[3], 'mdelay': mdelay, 'vp': vp, 'q': q, 'overload': overload, 'channel_valid': channel_valid, 'channel_sync': channel_sync, 'gap_filler': gap_filler, 'NbStacks': NbCren, 'm': m, 'Tm': Tm, 'Mx': Mx, 'nr': measurement_num, 'vab': vab, 'channel': channel, 'sp': sp, 'Iab': Iab, 'rho': rho, 'latitude': latitude, 'longitude': longitude, 'channelnb': channelnb, 'RsCHk': RsChk, 'batTX': batTX, 'batRX': batRX, 'temperature': temperature, 'measurement_num': measurement_num, }) counter += 1 # create a dataframe with all primary data df = pd.DataFrame( measurements ).reset_index() return metadata, df
Read a .bin file generated by the IRIS Instruments Syscal Pro System Parameters ---------- filename : string Path to input filename Returns ------- metadata : dict General information on the measurement df : :py:class:`pandas.DataFrame` dataframe containing all measurement data
def refresh_db(root=None): ''' Force a repository refresh by calling ``zypper refresh --force``, return a dict:: {'<database name>': Bool} root operate on a different root directory. CLI Example: .. code-block:: bash salt '*' pkg.refresh_db ''' # Remove rtag file to keep multiple refreshes from happening in pkg states salt.utils.pkg.clear_rtag(__opts__) ret = {} out = __zypper__(root=root).refreshable.call('refresh', '--force') for line in out.splitlines(): if not line: continue if line.strip().startswith('Repository') and '\'' in line: try: key = line.split('\'')[1].strip() if 'is up to date' in line: ret[key] = False except IndexError: continue elif line.strip().startswith('Building') and '\'' in line: key = line.split('\'')[1].strip() if 'done' in line: ret[key] = True return ret
Force a repository refresh by calling ``zypper refresh --force``, return a dict:: {'<database name>': Bool} root operate on a different root directory. CLI Example: .. code-block:: bash salt '*' pkg.refresh_db
def create_db_schema(cls, cur, schema_name): """ Create Postgres schema script and execute it on cursor """ create_schema_script = "CREATE SCHEMA {0} ;\n".format(schema_name) cur.execute(create_schema_script)
Create Postgres schema script and execute it on cursor
def hello_user(api_client): """Use an authorized client to fetch and print profile information. Parameters api_client (UberRidesClient) An UberRidesClient with OAuth 2.0 credentials. """ try: response = api_client.get_user_profile() except (ClientError, ServerError) as error: fail_print(error) return else: profile = response.json first_name = profile.get('first_name') last_name = profile.get('last_name') email = profile.get('email') message = 'Hello, {} {}. Successfully granted access token to {}.' message = message.format(first_name, last_name, email) success_print(message) success_print(profile) success_print('---') response = api_client.get_home_address() address = response.json success_print(address) success_print('---') response = api_client.get_user_activity() history = response.json success_print(history)
Use an authorized client to fetch and print profile information. Parameters api_client (UberRidesClient) An UberRidesClient with OAuth 2.0 credentials.
def rc4_encrypt(key, data): """ Encrypts plaintext using RC4 with a 40-128 bit key :param key: The encryption key - a byte string 5-16 bytes long :param data: The plaintext - a byte string :raises: ValueError - when any of the parameters contain an invalid value TypeError - when any of the parameters are of the wrong type OSError - when an error is returned by the OS crypto library :return: A byte string of the ciphertext """ if len(key) < 5 or len(key) > 16: raise ValueError(pretty_message( ''' key must be 5 to 16 bytes (40 to 128 bits) long - is %s ''', len(key) )) return _encrypt(Security.kSecAttrKeyTypeRC4, key, data, None, None)
Encrypts plaintext using RC4 with a 40-128 bit key :param key: The encryption key - a byte string 5-16 bytes long :param data: The plaintext - a byte string :raises: ValueError - when any of the parameters contain an invalid value TypeError - when any of the parameters are of the wrong type OSError - when an error is returned by the OS crypto library :return: A byte string of the ciphertext
def read_char(self, c: str) -> bool: """ Consume the c head byte, increment current index and return True else return False. It use peekchar and it's the same as '' in BNF. """ if self.read_eof(): return False self._stream.save_context() if c == self._stream.peek_char: self._stream.incpos() return self._stream.validate_context() return self._stream.restore_context()
Consume the c head byte, increment current index and return True else return False. It use peekchar and it's the same as '' in BNF.
def get_absolute(self, points): """Given a set of points geo referenced to this instance, return the points as absolute values. """ # remember if we got a list is_list = isinstance(points, list) points = ensure_numeric(points, num.float) if len(points.shape) == 1: # One point has been passed msg = 'Single point must have two elements' if not len(points) == 2: raise ValueError(msg) msg = 'Input must be an N x 2 array or list of (x,y) values. ' msg += 'I got an %d x %d array' %points.shape if not points.shape[1] == 2: raise ValueError(msg) # Add geo ref to points if not self.is_absolute(): points = copy.copy(points) # Don't destroy input points[:,0] += self.xllcorner points[:,1] += self.yllcorner if is_list: points = points.tolist() return points
Given a set of points geo referenced to this instance, return the points as absolute values.
def FMErrorByNum( num ): """This function raises an error based on the specified error code.""" if not num in FMErrorNum.keys(): raise FMServerError, (num, FMErrorNum[-1]) elif num == 102: raise FMFieldError, (num, FMErrorNum[num]) else: raise FMServerError, (num, FMErrorNum[num])
This function raises an error based on the specified error code.
async def send_media_group(self, chat_id: typing.Union[base.Integer, base.String], media: typing.Union[types.MediaGroup, typing.List], disable_notification: typing.Union[base.Boolean, None] = None, reply_to_message_id: typing.Union[base.Integer, None] = None) -> typing.List[types.Message]: """ Use this method to send a group of photos or videos as an album. Source: https://core.telegram.org/bots/api#sendmediagroup :param chat_id: Unique identifier for the target chat or username of the target channel :type chat_id: :obj:`typing.Union[base.Integer, base.String]` :param media: A JSON-serialized array describing photos and videos to be sent :type media: :obj:`typing.Union[types.MediaGroup, typing.List]` :param disable_notification: Sends the message silently. Users will receive a notification with no sound :type disable_notification: :obj:`typing.Union[base.Boolean, None]` :param reply_to_message_id: If the message is a reply, ID of the original message :type reply_to_message_id: :obj:`typing.Union[base.Integer, None]` :return: On success, an array of the sent Messages is returned :rtype: typing.List[types.Message] """ # Convert list to MediaGroup if isinstance(media, list): media = types.MediaGroup(media) files = dict(media.get_files()) media = prepare_arg(media) payload = generate_payload(**locals(), exclude=['files']) result = await self.request(api.Methods.SEND_MEDIA_GROUP, payload, files) return [types.Message(**message) for message in result]
Use this method to send a group of photos or videos as an album. Source: https://core.telegram.org/bots/api#sendmediagroup :param chat_id: Unique identifier for the target chat or username of the target channel :type chat_id: :obj:`typing.Union[base.Integer, base.String]` :param media: A JSON-serialized array describing photos and videos to be sent :type media: :obj:`typing.Union[types.MediaGroup, typing.List]` :param disable_notification: Sends the message silently. Users will receive a notification with no sound :type disable_notification: :obj:`typing.Union[base.Boolean, None]` :param reply_to_message_id: If the message is a reply, ID of the original message :type reply_to_message_id: :obj:`typing.Union[base.Integer, None]` :return: On success, an array of the sent Messages is returned :rtype: typing.List[types.Message]
def diff_config(jaide, second_host, mode): """ Perform a show | compare with some set commands. @param jaide: The jaide connection to the device. @type jaide: jaide.Jaide object @param second_host: The device IP or hostname of the second host to | compare with. @type second_host: str @param mode: How to compare the configuration, either in 'set' mode or | 'stanza' mode. @type mode: str @returns: The comparison between the two devices. @rtype str """ try: # create a list of all the lines that differ, and merge it. output = '\n'.join([diff for diff in jaide.diff_config(second_host, mode.lower())]) except errors.SSHError: output = color('Unable to connect to port %s on device: %s\n' % (str(jaide.port), second_host), 'red') except errors.AuthenticationError: # NCClient auth failure output = color('Authentication failed for device: %s' % second_host, 'red') except AuthenticationException: # Paramiko auth failure output = color('Authentication failed for device: %s' % second_host, 'red') except SSHException as e: output = color('Error connecting to device: %s\nError: %s' % (second_host, str(e)), 'red') except socket.timeout: output = color('Timeout exceeded connecting to device: %s' % second_host, 'red') except socket.gaierror: output = color('No route to host, or invalid hostname: %s' % second_host, 'red') except socket.error: output = color('The device refused the connection on port %s, or ' 'no route to host.' % jaide.port, 'red') if output.strip() == '': output = color("There were no config differences between %s and %s\n" % (jaide.host, second_host), 'yel') else: # color removals red, and additions green return color_diffs(output) return output
Perform a show | compare with some set commands. @param jaide: The jaide connection to the device. @type jaide: jaide.Jaide object @param second_host: The device IP or hostname of the second host to | compare with. @type second_host: str @param mode: How to compare the configuration, either in 'set' mode or | 'stanza' mode. @type mode: str @returns: The comparison between the two devices. @rtype str
def rerun(client, revision, roots, siblings, inputs, paths): """Recreate files generated by a sequence of ``run`` commands.""" graph = Graph(client) outputs = graph.build(paths=paths, revision=revision) # Check or extend siblings of outputs. outputs = siblings(graph, outputs) output_paths = {node.path for node in outputs} # Normalize and check all starting paths. roots = {graph.normalize_path(root) for root in roots} assert not roots & output_paths, '--from colides with output paths' # Generate workflow and check inputs. # NOTE The workflow creation is done before opening a new file. workflow = inputs( client, graph.ascwl( input_paths=roots, output_paths=output_paths, outputs=outputs, ) ) # Make sure all inputs are pulled from a storage. client.pull_paths_from_storage( *(path for _, path in workflow.iter_input_files(client.workflow_path)) ) # Store the generated workflow used for updating paths. import yaml output_file = client.workflow_path / '{0}.cwl'.format(uuid.uuid4().hex) with output_file.open('w') as f: f.write( yaml.dump( ascwl( workflow, filter=lambda _, x: x is not None, basedir=client.workflow_path, ), default_flow_style=False ) ) # Execute the workflow and relocate all output files. from ._cwl import execute # FIXME get new output paths for edited tools # output_paths = {path for _, path in workflow.iter_output_files()} execute( client, output_file, output_paths=output_paths, )
Recreate files generated by a sequence of ``run`` commands.
def load_keypair(keypair_file): '''load a keypair from a keypair file. We add attributes key (the raw key) and public_key (the url prepared public key) to the client. Parameters ========== keypair_file: the pem file to load. ''' from Crypto.PublicKey import RSA # Load key with open(keypair_file, 'rb') as filey: key = RSA.import_key(filey.read()) return quote_plus(key.publickey().exportKey().decode('utf-8'))
load a keypair from a keypair file. We add attributes key (the raw key) and public_key (the url prepared public key) to the client. Parameters ========== keypair_file: the pem file to load.
def delete_group(self, group_id): """DeleteGroup. :param str group_id: """ route_values = {} if group_id is not None: route_values['groupId'] = self._serialize.url('group_id', group_id, 'str') self._send(http_method='DELETE', location_id='5966283b-4196-4d57-9211-1b68f41ec1c2', version='5.0', route_values=route_values)
DeleteGroup. :param str group_id:
def content(): """Helper method that returns just the content. This method was added so that the text could be reused in the dock_help module. .. versionadded:: 3.2.2 :returns: A message object without brand element. :rtype: safe.messaging.message.Message """ message = m.Message() paragraph = m.Paragraph( m.Image( 'file:///%s/img/screenshots/' 'shakemap-converter-screenshot.png' % resources_path()), style_class='text-center' ) message.add(paragraph) body = tr( 'This tool will convert an earthquake \'shakemap\' that is in ' 'grid xml format into a GeoTIFF file. The imported file can be used ' 'in InaSAFE as an input for impact functions that require an ' 'earthquake layer. To use this tool effectively:') message.add(body) tips = m.BulletedList() tips.add(tr( 'Select a grid.xml for the input layer.')) tips.add(tr( 'Choose where to write the output layer to.' )) tips.add(tr( 'Choose the interpolation algorithm that should be used when ' 'converting the xml grid to a raster. If unsure keep the default.' )) tips.add(tr( 'If you want to obtain shake data you can get download it free from ' 'the USGS shakemap site: ' 'http://earthquake.usgs.gov/earthquakes/shakemap/list.php?y=2013')) message.add(tips) return message
Helper method that returns just the content. This method was added so that the text could be reused in the dock_help module. .. versionadded:: 3.2.2 :returns: A message object without brand element. :rtype: safe.messaging.message.Message
def compare(self, origin, pattern): """ Args: origin (:obj:`str`): original string pattern (:obj:`str`): Regexp pattern string Returns: bool: True if matches otherwise False. """ if origin is None or pattern is None: return False return re.match(pattern, origin) is not None
Args: origin (:obj:`str`): original string pattern (:obj:`str`): Regexp pattern string Returns: bool: True if matches otherwise False.
def compute_pscale(self,cd11,cd21): """ Compute the pixel scale based on active WCS values. """ return N.sqrt(N.power(cd11,2)+N.power(cd21,2)) * 3600.
Compute the pixel scale based on active WCS values.
def _convert_to_folder(self, packages): """ Silverstripe's page contains a list of composer packages. This function converts those to folder names. These may be different due to installer-name. Implemented exponential backoff in order to prevent packager from being overly sensitive about the number of requests I was making. @see: https://github.com/composer/installers#custom-install-names @see: https://github.com/richardsjoqvist/silverstripe-localdate/issues/7 """ url = 'http://packagist.org/p/%s.json' with ThreadPoolExecutor(max_workers=12) as executor: futures = [] for package in packages: future = executor.submit(self._get, url, package) futures.append({ 'future': future, 'package': package }) folders = [] for i, future in enumerate(futures, start=1): r = future['future'].result() package = future['package'] if not 'installer-name' in r.text: folder_name = package.split('/')[1] else: splat = list(filter(None, re.split(r'[^a-zA-Z0-9-_.,]', r.text))) folder_name = splat[splat.index('installer-name') + 1] if not folder_name in folders: folders.append(folder_name) else: print("Folder %s is duplicated (current %s, previous %s)" % (folder_name, package, folders.index(folder_name))) if i % 25 == 0: print("Done %s." % i) return folders
Silverstripe's page contains a list of composer packages. This function converts those to folder names. These may be different due to installer-name. Implemented exponential backoff in order to prevent packager from being overly sensitive about the number of requests I was making. @see: https://github.com/composer/installers#custom-install-names @see: https://github.com/richardsjoqvist/silverstripe-localdate/issues/7
def to_unitary_matrix( self, qubit_order: ops.QubitOrderOrList = ops.QubitOrder.DEFAULT, qubits_that_should_be_present: Iterable[ops.Qid] = (), ignore_terminal_measurements: bool = True, dtype: Type[np.number] = np.complex128) -> np.ndarray: """Converts the circuit into a unitary matrix, if possible. Args: qubit_order: Determines how qubits are ordered when passing matrices into np.kron. qubits_that_should_be_present: Qubits that may or may not appear in operations within the circuit, but that should be included regardless when generating the matrix. ignore_terminal_measurements: When set, measurements at the end of the circuit are ignored instead of causing the method to fail. dtype: The numpy dtype for the returned unitary. Defaults to np.complex128. Specifying np.complex64 will run faster at the cost of precision. `dtype` must be a complex np.dtype, unless all operations in the circuit have unitary matrices with exclusively real coefficients (e.g. an H + TOFFOLI circuit). Returns: A (possibly gigantic) 2d numpy array corresponding to a matrix equivalent to the circuit's effect on a quantum state. Raises: ValueError: The circuit contains measurement gates that are not ignored. TypeError: The circuit contains gates that don't have a known unitary matrix, e.g. gates parameterized by a Symbol. """ if not ignore_terminal_measurements and any( protocols.is_measurement(op) for op in self.all_operations()): raise ValueError('Circuit contains a measurement.') if not self.are_all_measurements_terminal(): raise ValueError('Circuit contains a non-terminal measurement.') qs = ops.QubitOrder.as_qubit_order(qubit_order).order_for( self.all_qubits().union(qubits_that_should_be_present)) n = len(qs) state = np.eye(1 << n, dtype=np.complex128) state.shape = (2,) * (2 * n) result = _apply_unitary_circuit(self, state, qs, dtype) return result.reshape((1 << n, 1 << n))
Converts the circuit into a unitary matrix, if possible. Args: qubit_order: Determines how qubits are ordered when passing matrices into np.kron. qubits_that_should_be_present: Qubits that may or may not appear in operations within the circuit, but that should be included regardless when generating the matrix. ignore_terminal_measurements: When set, measurements at the end of the circuit are ignored instead of causing the method to fail. dtype: The numpy dtype for the returned unitary. Defaults to np.complex128. Specifying np.complex64 will run faster at the cost of precision. `dtype` must be a complex np.dtype, unless all operations in the circuit have unitary matrices with exclusively real coefficients (e.g. an H + TOFFOLI circuit). Returns: A (possibly gigantic) 2d numpy array corresponding to a matrix equivalent to the circuit's effect on a quantum state. Raises: ValueError: The circuit contains measurement gates that are not ignored. TypeError: The circuit contains gates that don't have a known unitary matrix, e.g. gates parameterized by a Symbol.
def curve_to(self, x1, y1, x2, y2, x3, y3): """Adds a cubic Bézier spline to the path from the current point to position ``(x3, y3)`` in user-space coordinates, using ``(x1, y1)`` and ``(x2, y2)`` as the control points. After this call the current point will be ``(x3, y3)``. If there is no current point before the call to :meth:`curve_to` this method will behave as if preceded by a call to ``context.move_to(x1, y1)``. :param x1: The X coordinate of the first control point. :param y1: The Y coordinate of the first control point. :param x2: The X coordinate of the second control point. :param y2: The Y coordinate of the second control point. :param x3: The X coordinate of the end of the curve. :param y3: The Y coordinate of the end of the curve. :type x1: float :type y1: float :type x2: float :type y2: float :type x3: float :type y3: float """ cairo.cairo_curve_to(self._pointer, x1, y1, x2, y2, x3, y3) self._check_status()
Adds a cubic Bézier spline to the path from the current point to position ``(x3, y3)`` in user-space coordinates, using ``(x1, y1)`` and ``(x2, y2)`` as the control points. After this call the current point will be ``(x3, y3)``. If there is no current point before the call to :meth:`curve_to` this method will behave as if preceded by a call to ``context.move_to(x1, y1)``. :param x1: The X coordinate of the first control point. :param y1: The Y coordinate of the first control point. :param x2: The X coordinate of the second control point. :param y2: The Y coordinate of the second control point. :param x3: The X coordinate of the end of the curve. :param y3: The Y coordinate of the end of the curve. :type x1: float :type y1: float :type x2: float :type y2: float :type x3: float :type y3: float
def _check_operator(self, operator): """ Check Set-Up This method checks algorithm operator against the expected parent classes Parameters ---------- operator : str Algorithm operator to check """ if not isinstance(operator, type(None)): tree = [obj.__name__ for obj in getmro(operator.__class__)] if not any([parent in tree for parent in self._op_parents]): warn('{0} does not inherit an operator ' 'parent.'.format(str(operator.__class__)))
Check Set-Up This method checks algorithm operator against the expected parent classes Parameters ---------- operator : str Algorithm operator to check
def _get_NTLMv2_response(user_name, password, domain_name, server_challenge, client_challenge, timestamp, target_info): """ [MS-NLMP] v28.0 2016-07-14 2.2.2.8 NTLM V2 Response: NTLMv2_RESPONSE The NTLMv2_RESPONSE strucutre defines the NTLMv2 authentication NtChallengeResponse in the AUTHENTICATE_MESSAGE. This response is used only when NTLMv2 authentication is configured. The guide on how this is computed is in 3.3.2 NTLM v2 Authentication. :param user_name: The user name of the user we are trying to authenticate with :param password: The password of the user we are trying to authenticate with :param domain_name: The domain name of the user account we are authenticated with :param server_challenge: A random 8-byte response generated by the server in the CHALLENGE_MESSAGE :param client_challenge: A random 8-byte response generated by the client for the AUTHENTICATE_MESSAGE :param timestamp: An 8-byte timestamp in windows format, 100 nanoseconds since 1601-01-01 :param target_info: The target_info structure from the CHALLENGE_MESSAGE with the CBT attached if required :return response: NtChallengeResponse to the server_challenge :return session_base_key: A session key calculated from the user password challenge """ nt_hash = comphash._ntowfv2(user_name, password, domain_name) temp = ComputeResponse._get_NTLMv2_temp(timestamp, client_challenge, target_info) nt_proof_str = hmac.new(nt_hash, (server_challenge + temp), digestmod=hashlib.md5).digest() response = nt_proof_str + temp session_base_key = hmac.new(nt_hash, nt_proof_str, digestmod=hashlib.md5).digest() return response, session_base_key
[MS-NLMP] v28.0 2016-07-14 2.2.2.8 NTLM V2 Response: NTLMv2_RESPONSE The NTLMv2_RESPONSE strucutre defines the NTLMv2 authentication NtChallengeResponse in the AUTHENTICATE_MESSAGE. This response is used only when NTLMv2 authentication is configured. The guide on how this is computed is in 3.3.2 NTLM v2 Authentication. :param user_name: The user name of the user we are trying to authenticate with :param password: The password of the user we are trying to authenticate with :param domain_name: The domain name of the user account we are authenticated with :param server_challenge: A random 8-byte response generated by the server in the CHALLENGE_MESSAGE :param client_challenge: A random 8-byte response generated by the client for the AUTHENTICATE_MESSAGE :param timestamp: An 8-byte timestamp in windows format, 100 nanoseconds since 1601-01-01 :param target_info: The target_info structure from the CHALLENGE_MESSAGE with the CBT attached if required :return response: NtChallengeResponse to the server_challenge :return session_base_key: A session key calculated from the user password challenge
def smart_text(s, encoding='utf-8', strings_only=False, errors='strict'): """ Returns a text object representing 's' -- unicode on Python 2 and str on Python 3. Treats bytestrings using the 'encoding' codec. If strings_only is True, don't convert (some) non-string-like objects. """ if isinstance(s, Promise): # The input is the result of a gettext_lazy() call. return s return force_text(s, encoding, strings_only, errors)
Returns a text object representing 's' -- unicode on Python 2 and str on Python 3. Treats bytestrings using the 'encoding' codec. If strings_only is True, don't convert (some) non-string-like objects.
def transfer_multiple(self, destinations, priority=prio.NORMAL, payment_id=None, unlock_time=0, relay=True): """ Sends a batch of transfers. Returns a list of resulting transactions. :param destinations: a list of destination and amount pairs: [(:class:`Address <monero.address.Address>`, `Decimal`), ...] :param priority: transaction priority, implies fee. The priority can be a number from 1 to 4 (unimportant, normal, elevated, priority) or a constant from `monero.prio`. :param payment_id: ID for the payment (must be None if :class:`IntegratedAddress <monero.address.IntegratedAddress>` is used as the destination) :param unlock_time: the extra unlock delay :param relay: if `True`, the wallet will relay the transaction(s) to the network immediately; when `False`, it will only return the transaction(s) so they might be broadcasted later :rtype: list of :class:`Transaction <monero.transaction.Transaction>` """ return self._backend.transfer( destinations, priority, payment_id, unlock_time, account=self.index, relay=relay)
Sends a batch of transfers. Returns a list of resulting transactions. :param destinations: a list of destination and amount pairs: [(:class:`Address <monero.address.Address>`, `Decimal`), ...] :param priority: transaction priority, implies fee. The priority can be a number from 1 to 4 (unimportant, normal, elevated, priority) or a constant from `monero.prio`. :param payment_id: ID for the payment (must be None if :class:`IntegratedAddress <monero.address.IntegratedAddress>` is used as the destination) :param unlock_time: the extra unlock delay :param relay: if `True`, the wallet will relay the transaction(s) to the network immediately; when `False`, it will only return the transaction(s) so they might be broadcasted later :rtype: list of :class:`Transaction <monero.transaction.Transaction>`
def OnShiftVideo(self, event): """Shifts through the video""" length = self.player.get_length() time = self.player.get_time() if event.GetWheelRotation() < 0: target_time = max(0, time-length/100.0) elif event.GetWheelRotation() > 0: target_time = min(length, time+length/100.0) self.player.set_time(int(target_time))
Shifts through the video
def tob32(val): """Return provided 32 bit value as a string of four bytes.""" ret = bytearray(4) ret[0] = (val>>24)&M8 ret[1] = (val>>16)&M8 ret[2] = (val>>8)&M8 ret[3] = val&M8 return ret
Return provided 32 bit value as a string of four bytes.
def map(self, fn, *iterables, timeout=None, chunksize=1, prefetch=None): """ Collects iterables lazily, rather than immediately. Docstring same as parent: https://docs.python.org/3/library/concurrent.futures.html#concurrent.futures.Executor Implmentation taken from this PR: https://github.com/python/cpython/pull/707 """ if timeout is not None: end_time = timeout + time.time() if prefetch is None: prefetch = self._max_workers if prefetch < 0: raise ValueError("prefetch count may not be negative") argsiter = zip(*iterables) fs = collections.deque(self.submit(fn, *args) for args in itertools.islice(argsiter, self._max_workers+prefetch)) # Yield must be hidden in closure so that the futures are submitted before the first iterator value is required. def result_iterator(): nonlocal argsiter try: while fs: res = fs[0].result() if timeout is None else fs[0].result(end_time-time.time()) # Got a result, future needn't be cancelled del fs[0] # Dispatch next task before yielding to keep pipeline full if argsiter: try: args = next(argsiter) except StopIteration: argsiter = None else: fs.append(self.submit(fn, *args)) yield res finally: for future in fs: future.cancel() return result_iterator()
Collects iterables lazily, rather than immediately. Docstring same as parent: https://docs.python.org/3/library/concurrent.futures.html#concurrent.futures.Executor Implmentation taken from this PR: https://github.com/python/cpython/pull/707
def loadInstance(self): """ Loads the plugin from the proxy information that was created from the registry file. """ if self._loaded: return self._loaded = True module_path = self.modulePath() package = projex.packageFromPath(module_path) path = os.path.normpath(projex.packageRootPath(module_path)) if path in sys.path: sys.path.remove(path) sys.path.insert(0, path) try: __import__(package) except Exception, e: err = Plugin(self.name(), self.version()) err.setError(e) err.setFilepath(module_path) self._instance = err self.setError(e) msg = "%s.plugin('%s') errored loading instance from %s" opts = (self.proxyClass().__name__, self.name(), module_path) logger.warning(msg % opts) logger.error(e)
Loads the plugin from the proxy information that was created from the registry file.
def read_excel_file(inputfile, sheet_name): """ Return a matrix containing all the information present in the excel sheet of the specified excel document. :arg inputfile: excel document to read :arg sheetname: the name of the excel sheet to return """ workbook = xlrd.open_workbook(inputfile) output = [] found = False for sheet in workbook.sheets(): if sheet.name == sheet_name: found = True for row in range(sheet.nrows): values = [] for col in range(sheet.ncols): values.append(sheet.cell(row, col).value) output.append(values) if not found: # pragma: no cover raise MQ2Exception('Invalid session identifier provided') return output
Return a matrix containing all the information present in the excel sheet of the specified excel document. :arg inputfile: excel document to read :arg sheetname: the name of the excel sheet to return
def _set_up_pool_config(self): ''' Helper to configure pool options during DatabaseWrapper initialization. ''' self._max_conns = self.settings_dict['OPTIONS'].get('MAX_CONNS', pool_config_defaults['MAX_CONNS']) self._min_conns = self.settings_dict['OPTIONS'].get('MIN_CONNS', self._max_conns) self._test_on_borrow = self.settings_dict["OPTIONS"].get('TEST_ON_BORROW', pool_config_defaults['TEST_ON_BORROW']) if self._test_on_borrow: self._test_on_borrow_query = self.settings_dict["OPTIONS"].get('TEST_ON_BORROW_QUERY', pool_config_defaults['TEST_ON_BORROW_QUERY']) else: self._test_on_borrow_query = None
Helper to configure pool options during DatabaseWrapper initialization.
def _as_symbol(value, is_symbol_value=True): """Converts the input to a :class:`SymbolToken` suitable for being emitted as part of a :class:`IonEvent`. If the input has an `as_symbol` method (e.g. :class:`CodePointArray`), it will be converted using that method. Otherwise, it must already be a `SymbolToken`. In this case, there is nothing to do unless the input token is not a symbol value and it is an :class:`_IVMToken`. This requires the `_IVMToken` to be converted to a regular `SymbolToken`. """ try: return value.as_symbol() except AttributeError: assert isinstance(value, SymbolToken) if not is_symbol_value: try: # This converts _IVMTokens to regular SymbolTokens when the _IVMToken cannot represent an IVM (i.e. # it is a field name or annotation). return value.regular_token() except AttributeError: pass return value
Converts the input to a :class:`SymbolToken` suitable for being emitted as part of a :class:`IonEvent`. If the input has an `as_symbol` method (e.g. :class:`CodePointArray`), it will be converted using that method. Otherwise, it must already be a `SymbolToken`. In this case, there is nothing to do unless the input token is not a symbol value and it is an :class:`_IVMToken`. This requires the `_IVMToken` to be converted to a regular `SymbolToken`.
def i2c_slave_read(self): """Read the bytes from an I2C slave reception. The bytes are returned as a string object. """ data = array.array('B', (0,) * self.BUFFER_SIZE) status, addr, rx_len = api.py_aa_i2c_slave_read_ext(self.handle, self.BUFFER_SIZE, data) _raise_i2c_status_code_error_if_failure(status) # In case of general call, actually return the general call address if addr == 0x80: addr = 0x00 del data[rx_len:] return (addr, bytes(data))
Read the bytes from an I2C slave reception. The bytes are returned as a string object.
def _get_pdf_filenames_at(source_directory): """Find all PDF files in the specified directory. Args: source_directory (str): The source directory. Returns: list(str): Filepaths to all PDF files in the specified directory. Raises: ValueError """ if not os.path.isdir(source_directory): raise ValueError("%s is not a directory!" % source_directory) return [os.path.join(source_directory, filename) for filename in os.listdir(source_directory) if filename.endswith(PDF_EXTENSION)]
Find all PDF files in the specified directory. Args: source_directory (str): The source directory. Returns: list(str): Filepaths to all PDF files in the specified directory. Raises: ValueError
def parse_changes(json): """ Gets price changes from JSON Args: json: JSON data as a list of dict dates, where the keys are the raw market statistics. Returns: List of floats of price changes between entries in JSON. """ changes = [] dates = len(json) for date in range(1, dates): last_close = json[date - 1]['close'] now_close = json[date]['close'] changes.append(now_close - last_close) logger.debug('Market Changes (from JSON):\n{0}'.format(changes)) return changes
Gets price changes from JSON Args: json: JSON data as a list of dict dates, where the keys are the raw market statistics. Returns: List of floats of price changes between entries in JSON.
def find_by_task(self, task, params={}, **options): """Returns the compact records for all attachments on the task. Parameters ---------- task : {Id} Globally unique identifier for the task. [params] : {Object} Parameters for the request """ path = "/tasks/%s/attachments" % (task) return self.client.get_collection(path, params, **options)
Returns the compact records for all attachments on the task. Parameters ---------- task : {Id} Globally unique identifier for the task. [params] : {Object} Parameters for the request
def getionimage(p, mz_value, tol=0.1, z=1, reduce_func=sum): """ Get an image representation of the intensity distribution of the ion with specified m/z value. By default, the intensity values within the tolerance region are summed. :param p: the ImzMLParser (or anything else with similar attributes) for the desired dataset :param mz_value: m/z value for which the ion image shall be returned :param tol: Absolute tolerance for the m/z value, such that all ions with values mz_value-|tol| <= x <= mz_value+|tol| are included. Defaults to 0.1 :param z: z Value if spectrogram is 3-dimensional. :param reduce_func: the bahaviour for reducing the intensities between mz_value-|tol| and mz_value+|tol| to a single value. Must be a function that takes a sequence as input and outputs a number. By default, the values are summed. :return: numpy matrix with each element representing the ion intensity in this pixel. Can be easily plotted with matplotlib """ tol = abs(tol) im = np.zeros((p.imzmldict["max count of pixels y"], p.imzmldict["max count of pixels x"])) for i, (x, y, z_) in enumerate(p.coordinates): if z_ == 0: UserWarning("z coordinate = 0 present, if you're getting blank images set getionimage(.., .., z=0)") if z_ == z: mzs, ints = map(lambda x: np.asarray(x), p.getspectrum(i)) min_i, max_i = _bisect_spectrum(mzs, mz_value, tol) im[y - 1, x - 1] = reduce_func(ints[min_i:max_i+1]) return im
Get an image representation of the intensity distribution of the ion with specified m/z value. By default, the intensity values within the tolerance region are summed. :param p: the ImzMLParser (or anything else with similar attributes) for the desired dataset :param mz_value: m/z value for which the ion image shall be returned :param tol: Absolute tolerance for the m/z value, such that all ions with values mz_value-|tol| <= x <= mz_value+|tol| are included. Defaults to 0.1 :param z: z Value if spectrogram is 3-dimensional. :param reduce_func: the bahaviour for reducing the intensities between mz_value-|tol| and mz_value+|tol| to a single value. Must be a function that takes a sequence as input and outputs a number. By default, the values are summed. :return: numpy matrix with each element representing the ion intensity in this pixel. Can be easily plotted with matplotlib
def dispatch(self, *args, **kwargs) -> Awaitable[bool]: """ Create and dispatch an event. This method constructs an event object and then passes it to :meth:`dispatch_event` for the actual dispatching. :param args: positional arguments to the constructor of the associated event class :param kwargs: keyword arguments to the constructor of the associated event class :returns: an awaitable that completes when all the callbacks have been called (and any awaitables waited on) and resolves to ``True`` if there were no exceptions raised by the callbacks, ``False`` otherwise """ event = self.event_class(self.source(), cast(str, self.topic), *args, **kwargs) return self.dispatch_raw(event)
Create and dispatch an event. This method constructs an event object and then passes it to :meth:`dispatch_event` for the actual dispatching. :param args: positional arguments to the constructor of the associated event class :param kwargs: keyword arguments to the constructor of the associated event class :returns: an awaitable that completes when all the callbacks have been called (and any awaitables waited on) and resolves to ``True`` if there were no exceptions raised by the callbacks, ``False`` otherwise
def load(self, config, file_object, prefer=None): """ An abstract method that loads from a given file object. :param class config: The config class to load into :param file file_object: The file object to load from :param str prefer: The preferred serialization module name :returns: A dictionary converted from the content of the given file object :rtype: dict """ return self.loads(config, file_object.read(), prefer=prefer)
An abstract method that loads from a given file object. :param class config: The config class to load into :param file file_object: The file object to load from :param str prefer: The preferred serialization module name :returns: A dictionary converted from the content of the given file object :rtype: dict
def func_load(code, defaults=None, closure=None, globs=None): '''Deserialize user defined function.''' if isinstance(code, (tuple, list)): # unpack previous dump code, defaults, closure = code code = marshal.loads(code.encode('raw_unicode_escape')) if globs is None: globs = globals() return python_types.FunctionType(code, globs, name=code.co_name, argdefs=defaults, closure=closure)
Deserialize user defined function.
def get_context(self, template): """Get the context for a template. If no matching value is found, an empty context is returned. Otherwise, this returns either the matching value if the value is dictionary-like or the dictionary returned by calling it with *template* if the value is a function. If several matching values are found, the resulting dictionaries will be merged before being returned if mergecontexts is True. Otherwise, only the first matching value is returned. :param template: the template to get the context for """ context = {} for regex, context_generator in self.contexts: if re.match(regex, template.name): if inspect.isfunction(context_generator): if _has_argument(context_generator): context.update(context_generator(template)) else: context.update(context_generator()) else: context.update(context_generator) if not self.mergecontexts: break return context
Get the context for a template. If no matching value is found, an empty context is returned. Otherwise, this returns either the matching value if the value is dictionary-like or the dictionary returned by calling it with *template* if the value is a function. If several matching values are found, the resulting dictionaries will be merged before being returned if mergecontexts is True. Otherwise, only the first matching value is returned. :param template: the template to get the context for
async def get_target(config, url): """ Given a URL, get the webmention endpoint """ previous = config.cache.get( 'target', url, schema_version=SCHEMA_VERSION) if config.cache else None headers = previous.caching if previous else None request = await utils.retry_get(config, url, headers=headers) if not request or not request.success: return previous if request.cached: return previous current = Target(request) if config.cache: config.cache.set('target', url, current) return current
Given a URL, get the webmention endpoint
def call(self, method, args={}, retry=False, retry_policy=None, ticket=None, **props): """Send message to the same actor and return :class:`AsyncResult`.""" ticket = ticket or uuid() reply_q = self.get_reply_queue(ticket) self.cast(method, args, declare=[reply_q], reply_to=ticket, **props) return self.AsyncResult(ticket, self)
Send message to the same actor and return :class:`AsyncResult`.
def _sift_and_init_configs(self, input_dict): """ Removes all key/v for keys that exist in the overall config and activates them. Used to weed out config keys from tokens in a given input. """ configs = {} for k, v in iteritems(input_dict): if (k not in map(str.lower, self.format_order) and any([f_order.lower() in k for f_order in self.format_order])): try: self.CFG.get(self.CONFIG_PATH + [k]) except errors.ResourceNotFoundError: pass finally: configs[k] = v for key, val in iteritems(configs): input_dict.pop(key, None) if configs: self.initialize_overall_config_settings(input_dict=configs)
Removes all key/v for keys that exist in the overall config and activates them. Used to weed out config keys from tokens in a given input.
def chunk(seq: ActualIterable[T]) -> ActualIterable[ActualIterable[T]]: """ >>> from Redy.Collections import Traversal, Flow >>> x = [1, 1, 2] >>> assert Flow(x)[Traversal.chunk][list].unbox == [[1, 1], [2]] >>> assert Flow([])[Traversal.chunk][list].unbox == [] """ seq = iter(seq) try: head = next(seq) except StopIteration: return iter(seq) current_status = head group = [head] for each in seq: status = each if status != current_status: yield group group = [each] else: group.append(each) current_status = status if group: yield group
>>> from Redy.Collections import Traversal, Flow >>> x = [1, 1, 2] >>> assert Flow(x)[Traversal.chunk][list].unbox == [[1, 1], [2]] >>> assert Flow([])[Traversal.chunk][list].unbox == []
def plotting_context(context=None, font_scale=1, rc=None): """Return a parameter dict to scale elements of the figure. This affects things like the size of the labels, lines, and other elements of the plot, but not the overall style. The base context is "notebook", and the other contexts are "paper", "talk", and "poster", which are version of the notebook parameters scaled by .8, 1.3, and 1.6, respectively. This function returns an object that can be used in a ``with`` statement to temporarily change the context parameters. Parameters ---------- context : dict, None, or one of {paper, notebook, talk, poster} A dictionary of parameters or the name of a preconfigured set. font_scale : float, optional Separate scaling factor to independently scale the size of the font elements. rc : dict, optional Parameter mappings to override the values in the preset seaborn context dictionaries. This only updates parameters that are considered part of the context definition. Examples -------- >>> c = plotting_context("poster") >>> c = plotting_context("notebook", font_scale=1.5) >>> c = plotting_context("talk", rc={"lines.linewidth": 2}) >>> import matplotlib.pyplot as plt >>> with plotting_context("paper"): ... f, ax = plt.subplots() ... ax.plot(x, y) # doctest: +SKIP See Also -------- set_context : set the matplotlib parameters to scale plot elements axes_style : return a dict of parameters defining a figure style color_palette : define the color palette for a plot """ if context is None: context_dict = {k: mpl.rcParams[k] for k in _context_keys} elif isinstance(context, dict): context_dict = context else: contexts = ["paper", "notebook", "talk", "poster"] if context not in contexts: raise ValueError("context must be in %s" % ", ".join(contexts)) # Set up dictionary of default parameters base_context = { "figure.figsize": np.array([8, 5.5]), "font.size": 12, "axes.labelsize": 11, "axes.titlesize": 12, "xtick.labelsize": 10, "ytick.labelsize": 10, "legend.fontsize": 10, "grid.linewidth": 1, "lines.linewidth": 1.75, "patch.linewidth": .3, "lines.markersize": 7, "lines.markeredgewidth": 0, "xtick.major.width": 1, "ytick.major.width": 1, "xtick.minor.width": .5, "ytick.minor.width": .5, "xtick.major.pad": 7, "ytick.major.pad": 7, } # Scale all the parameters by the same factor depending on the context scaling = dict(paper=.8, notebook=1, talk=1.3, poster=1.6)[context] context_dict = {k: v * scaling for k, v in base_context.items()} # Now independently scale the fonts font_keys = ["axes.labelsize", "axes.titlesize", "legend.fontsize", "xtick.labelsize", "ytick.labelsize", "font.size"] font_dict = {k: context_dict[k] * font_scale for k in font_keys} context_dict.update(font_dict) # Implement hack workaround for matplotlib bug # See https://github.com/mwaskom/seaborn/issues/344 # There is a bug in matplotlib 1.4.2 that makes points invisible when # they don't have an edgewidth. It will supposedly be fixed in 1.4.3. if mpl.__version__ == "1.4.2": context_dict["lines.markeredgewidth"] = 0.01 # Override these settings with the provided rc dictionary if rc is not None: rc = {k: v for k, v in rc.items() if k in _context_keys} context_dict.update(rc) # Wrap in a _PlottingContext object so this can be used in a with statement context_object = _PlottingContext(context_dict) return context_object
Return a parameter dict to scale elements of the figure. This affects things like the size of the labels, lines, and other elements of the plot, but not the overall style. The base context is "notebook", and the other contexts are "paper", "talk", and "poster", which are version of the notebook parameters scaled by .8, 1.3, and 1.6, respectively. This function returns an object that can be used in a ``with`` statement to temporarily change the context parameters. Parameters ---------- context : dict, None, or one of {paper, notebook, talk, poster} A dictionary of parameters or the name of a preconfigured set. font_scale : float, optional Separate scaling factor to independently scale the size of the font elements. rc : dict, optional Parameter mappings to override the values in the preset seaborn context dictionaries. This only updates parameters that are considered part of the context definition. Examples -------- >>> c = plotting_context("poster") >>> c = plotting_context("notebook", font_scale=1.5) >>> c = plotting_context("talk", rc={"lines.linewidth": 2}) >>> import matplotlib.pyplot as plt >>> with plotting_context("paper"): ... f, ax = plt.subplots() ... ax.plot(x, y) # doctest: +SKIP See Also -------- set_context : set the matplotlib parameters to scale plot elements axes_style : return a dict of parameters defining a figure style color_palette : define the color palette for a plot
def get_namespace_statistics(self, namespace, start_offset, end_offset): """Get namespace statistics for the period between start_offset and end_offset (inclusive)""" cursor = self.cursor cursor.execute('SELECT SUM(data_points), SUM(byte_count) ' 'FROM gauged_statistics WHERE namespace = %s ' 'AND offset BETWEEN %s AND %s', (namespace, start_offset, end_offset)) return [long(count or 0) for count in cursor.fetchone()]
Get namespace statistics for the period between start_offset and end_offset (inclusive)
def init_account(self): """Setup a new GitHub account.""" ghuser = self.api.me() # Setup local access tokens to be used by the webhooks hook_token = ProviderToken.create_personal( 'github-webhook', self.user_id, scopes=['webhooks:event'], is_internal=True, ) # Initial structure of extra data self.account.extra_data = dict( id=ghuser.id, login=ghuser.login, name=ghuser.name, tokens=dict( webhook=hook_token.id, ), repos=dict(), last_sync=iso_utcnow(), ) db.session.add(self.account) # Sync data from GitHub, but don't check repository hooks yet. self.sync(hooks=False)
Setup a new GitHub account.
def netspeed_by_name(self, hostname): """ Returns NetSpeed name from hostname. Can be Unknown, Dial-up, Cable, or Corporate. :arg hostname: Hostname (e.g. example.com) """ addr = self._gethostbyname(hostname) return self.netspeed_by_addr(addr)
Returns NetSpeed name from hostname. Can be Unknown, Dial-up, Cable, or Corporate. :arg hostname: Hostname (e.g. example.com)
def from_array(array): """ Deserialize a new ReplyKeyboardMarkup from a given dictionary. :return: new ReplyKeyboardMarkup instance. :rtype: ReplyKeyboardMarkup """ if array is None or not array: return None # end if assert_type_or_raise(array, dict, parameter_name="array") from pytgbot.api_types.sendable.reply_markup import KeyboardButton data = {} data['keyboard'] = KeyboardButton.from_array_list(array.get('keyboard'), list_level=2) data['resize_keyboard'] = bool(array.get('resize_keyboard')) if array.get('resize_keyboard') is not None else None data['one_time_keyboard'] = bool(array.get('one_time_keyboard')) if array.get('one_time_keyboard') is not None else None data['selective'] = bool(array.get('selective')) if array.get('selective') is not None else None instance = ReplyKeyboardMarkup(**data) instance._raw = array return instance
Deserialize a new ReplyKeyboardMarkup from a given dictionary. :return: new ReplyKeyboardMarkup instance. :rtype: ReplyKeyboardMarkup
def get_command(self, ctx, cmd_name): """Map some aliases to their 'real' names.""" cmd_name = self.MAP.get(cmd_name, cmd_name) return super(AliasedGroup, self).get_command(ctx, cmd_name)
Map some aliases to their 'real' names.
def imagetransformer_sep_channels(): """separate rgb embeddings.""" hparams = imagetransformer_base() hparams.num_heads = 4 hparams.attention_key_channels = hparams.attention_value_channels = 0 hparams.hidden_size = 256 hparams.filter_size = 512 hparams.num_hidden_layers = 6 return hparams
separate rgb embeddings.
def _GenerateFleetspeakConfig(self, template_dir, rpm_build_dir): """Generates a Fleetspeak config for GRR.""" source_config = os.path.join( template_dir, "fleetspeak", os.path.basename( config.CONFIG.Get( "ClientBuilder.fleetspeak_config_path", context=self.context))) fleetspeak_service_dir = config.CONFIG.Get( "ClientBuilder.fleetspeak_service_dir", context=self.context) dest_config_dir = os.path.join(rpm_build_dir, fleetspeak_service_dir[1:]) utils.EnsureDirExists(dest_config_dir) dest_config_path = os.path.join( dest_config_dir, config.CONFIG.Get( "Client.fleetspeak_unsigned_config_fname", context=self.context)) self.GenerateFile( input_filename=source_config, output_filename=dest_config_path)
Generates a Fleetspeak config for GRR.
def do_json_set_many(self, params): """ \x1b[1mNAME\x1b[0m json_set_many - like `json_set`, but for multiple key/value pairs \x1b[1mSYNOPSIS\x1b[0m json_set_many <path> <keys> <value> <value_type> <keys1> <value1> <value_type1> ... \x1b[1mDESCRIPTION\x1b[0m If the key exists and the value is different, the znode will be updated with the key set to its new value. If the key does not exist, it'll be created and the znode will be updated with the serialized version of the new object. The value's type will be determined by the value_type parameter. This is an atomic operation, either all given keys are set in one ZK operation or none are. \x1b[1mEXAMPLES\x1b[0m > create /props '{"a": {"b": 4}}' > json_cat /props { "a": { "b": 4 } } > json_set_many /props a.b 5 int a.c.d true bool > json_cat /props { "a": { "c": { "d": true }, "b": 5 } } """ # Ensure we have a balance set of (key, value, type) tuples. if len(params.keys_values_types) % 3 != 0: self.show_output('Bad list of parameters') return for key, _, _ in grouper(params.keys_values_types, 3): try: Keys.validate(key) except Keys.Bad as ex: self.show_output(str(ex)) return # Fetch & deserialize znode. jstr, stat = self._zk.get(params.path) try: obj_src = json_deserialize(jstr) except BadJSON: self.show_output("Path %s has bad JSON.", params.path) obj_dst = copy.deepcopy(obj_src) # Cast values to their given type. for key, value, ptype in grouper(params.keys_values_types, 3): try: Keys.set(obj_dst, key, to_type(value, ptype)) except Keys.Missing as ex: self.show_output("Path %s is missing key %s.", params.path, ex) return except ValueError: self.show_output("Bad value_type") return # Pass along the read version, to ensure we are updating what we read. self.set(params.path, json.dumps(obj_dst), version=stat.version)
\x1b[1mNAME\x1b[0m json_set_many - like `json_set`, but for multiple key/value pairs \x1b[1mSYNOPSIS\x1b[0m json_set_many <path> <keys> <value> <value_type> <keys1> <value1> <value_type1> ... \x1b[1mDESCRIPTION\x1b[0m If the key exists and the value is different, the znode will be updated with the key set to its new value. If the key does not exist, it'll be created and the znode will be updated with the serialized version of the new object. The value's type will be determined by the value_type parameter. This is an atomic operation, either all given keys are set in one ZK operation or none are. \x1b[1mEXAMPLES\x1b[0m > create /props '{"a": {"b": 4}}' > json_cat /props { "a": { "b": 4 } } > json_set_many /props a.b 5 int a.c.d true bool > json_cat /props { "a": { "c": { "d": true }, "b": 5 } }
def main(): """This is run if file is directly executed, but not if imported as module. Having this in a separate function allows importing the file into interactive python, and still able to execute the function for testing""" parser = argparse.ArgumentParser() parser.add_argument("-f", "--file", required=True, help="input file", type=str) parser.add_argument("-l", "--locus", required=True, help="Locus", type=str) parser.add_argument("-k", "--kir", help="Option for running with KIR", action='store_true') parser.add_argument("-s", "--server", help="Option for running with a server", action='store_true') parser.add_argument("-v", "--verbose", help="Option for running in verbose", action='store_true') args = parser.parse_args() fastafile = args.file locus = args.locus verbose = False if args.verbose: verbose = True verbose = False if args.verbose: verbose = True kir = False if args.kir: kir = True serv = False if args.server: serv = True if verbose: logging.basicConfig(format='%(asctime)s - %(name)-35s - %(levelname)-5s - %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p', level=logging.INFO) server = None if serv: server = BioSeqDatabase.open_database(driver="pymysql", user="root", passwd="", host="localhost", db="bioseqdb") seqann = BioSeqAnn(verbose=True, kir=kir) for seq in SeqIO.parse(fastafile, "fasta"): ann = seqann.annotate(seq, locus=locus) print('{:*^20} {:^20} {:*^20}'.format("", str(seq.description), "")) l = 0 for f in ann.annotation: if isinstance(ann.annotation[f], DBSeq): print(f, ann.method, str(ann.annotation[f]), sep="\t") l += len(ann.annotation[f]) else: print(f, ann.method, str(ann.annotation[f].seq), sep="\t") l += len(ann.annotation[f].seq) print("") if serv: server.close()
This is run if file is directly executed, but not if imported as module. Having this in a separate function allows importing the file into interactive python, and still able to execute the function for testing
def version(): """Return version string.""" with open(os.path.join('curtsies', '__init__.py')) as input_file: for line in input_file: if line.startswith('__version__'): return ast.parse(line).body[0].value.s
Return version string.
def retract_project_bid(session, bid_id): """ Retract a bid on a project """ headers = { 'Content-Type': 'application/x-www-form-urlencoded' } bid_data = { 'action': 'retract' } # POST /api/projects/0.1/bids/{bid_id}/?action=revoke endpoint = 'bids/{}'.format(bid_id) response = make_put_request(session, endpoint, headers=headers, params_data=bid_data) json_data = response.json() if response.status_code == 200: return json_data['status'] else: json_data = response.json() raise BidNotRetractedException(message=json_data['message'], error_code=json_data['error_code'], request_id=json_data['request_id'])
Retract a bid on a project
def custom_code(self, mask: str = '@###', char: str = '@', digit: str = '#') -> str: """Generate custom code using ascii uppercase and random integers. :param mask: Mask of code. :param char: Placeholder for characters. :param digit: Placeholder for digits. :return: Custom code. """ char_code = ord(char) digit_code = ord(digit) code = bytearray(len(mask)) def random_int(a: int, b: int) -> int: b = b - a return int(self.random() * b) + a _mask = mask.encode() for i, p in enumerate(_mask): if p == char_code: a = random_int(65, 91) # A-Z elif p == digit_code: a = random_int(48, 58) # 0-9 else: a = p code[i] = a return code.decode()
Generate custom code using ascii uppercase and random integers. :param mask: Mask of code. :param char: Placeholder for characters. :param digit: Placeholder for digits. :return: Custom code.
def process(self, event): """Put and process tasks in queue. """ logger.info(f"{self}: put {event.src_path}") self.queue.put(os.path.basename(event.src_path))
Put and process tasks in queue.
def retrieveJsonResponseFromServer(url): """Retrieves a JSON response from the server. Input parameters ---------------- url : url to call for retrieving the JSON response Return ------ A dictionary Exception --------- SITools2Exception when a problem during the download or parsing happens""" jsonData = None try: data = urllib.urlopen(url) jsonData = simplejson.load(data) except Exception as ex: raise Sitools2Exception(ex) return jsonData
Retrieves a JSON response from the server. Input parameters ---------------- url : url to call for retrieving the JSON response Return ------ A dictionary Exception --------- SITools2Exception when a problem during the download or parsing happens
def read_tcp(self, length): """Read Transmission Control Protocol (TCP). Structure of TCP header [RFC 793]: 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Source Port | Destination Port | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Sequence Number | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Acknowledgement Number | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Data | |U|A|P|R|S|F| | | Offset| Reserved |R|C|S|S|Y|I| Window | | | |G|K|H|T|N|N| | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Checksum | Urgent Pointer | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Options | Padding | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | data | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 tcp.srcport Source Port 2 16 tcp.dstport Destination Port 4 32 tcp.seq Sequence Number 8 64 tcp.ack Acknowledgement Number (if ACK set) 12 96 tcp.hdr_len Data Offset 12 100 - Reserved (must be zero) 12 103 tcp.flags.ns ECN Concealment Protection (NS) 13 104 tcp.flags.cwr Congestion Window Reduced (CWR) 13 105 tcp.flags.ece ECN-Echo (ECE) 13 106 tcp.flags.urg Urgent (URG) 13 107 tcp.flags.ack Acknowledgement (ACK) 13 108 tcp.flags.psh Push Function (PSH) 13 109 tcp.flags.rst Reset Connection (RST) 13 110 tcp.flags.syn Synchronize Sequence Numbers (SYN) 13 111 tcp.flags.fin Last Packet from Sender (FIN) 14 112 tcp.window_size Size of Receive Window 16 128 tcp.checksum Checksum 18 144 tcp.urgent_pointer Urgent Pointer (if URG set) 20 160 tcp.opt TCP Options (if data offset > 5) """ if length is None: length = len(self) _srcp = self._read_unpack(2) _dstp = self._read_unpack(2) _seqn = self._read_unpack(4) _ackn = self._read_unpack(4) _lenf = self._read_binary(1) _flag = self._read_binary(1) _wins = self._read_unpack(2) _csum = self._read_fileng(2) _urgp = self._read_unpack(2) tcp = dict( srcport=_srcp, dstport=_dstp, seq=_seqn, ack=_ackn, hdr_len=int(_lenf[:4], base=2) * 4, flags=dict( ns=True if int(_lenf[7]) else False, cwr=True if int(_flag[0]) else False, ece=True if int(_flag[1]) else False, urg=True if int(_flag[2]) else False, ack=True if int(_flag[3]) else False, psh=True if int(_flag[4]) else False, rst=True if int(_flag[5]) else False, syn=True if int(_flag[6]) else False, fin=True if int(_flag[7]) else False, ), window_size=_wins, checksum=_csum, urgent_pointer=_urgp, ) # packet type flags self._syn = True if int(_flag[6]) else False self._ack = True if int(_flag[3]) else False _hlen = tcp['hdr_len'] _optl = _hlen - 20 if _optl: options = self._read_tcp_options(_optl) tcp['opt'] = options[0] # tuple of option acronyms tcp.update(options[1]) # merge option info to buffer length -= _hlen tcp['packet'] = self._read_packet(header=_hlen, payload=length) return self._decode_next_layer(tcp, None, length)
Read Transmission Control Protocol (TCP). Structure of TCP header [RFC 793]: 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Source Port | Destination Port | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Sequence Number | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Acknowledgement Number | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Data | |U|A|P|R|S|F| | | Offset| Reserved |R|C|S|S|Y|I| Window | | | |G|K|H|T|N|N| | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Checksum | Urgent Pointer | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Options | Padding | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | data | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 tcp.srcport Source Port 2 16 tcp.dstport Destination Port 4 32 tcp.seq Sequence Number 8 64 tcp.ack Acknowledgement Number (if ACK set) 12 96 tcp.hdr_len Data Offset 12 100 - Reserved (must be zero) 12 103 tcp.flags.ns ECN Concealment Protection (NS) 13 104 tcp.flags.cwr Congestion Window Reduced (CWR) 13 105 tcp.flags.ece ECN-Echo (ECE) 13 106 tcp.flags.urg Urgent (URG) 13 107 tcp.flags.ack Acknowledgement (ACK) 13 108 tcp.flags.psh Push Function (PSH) 13 109 tcp.flags.rst Reset Connection (RST) 13 110 tcp.flags.syn Synchronize Sequence Numbers (SYN) 13 111 tcp.flags.fin Last Packet from Sender (FIN) 14 112 tcp.window_size Size of Receive Window 16 128 tcp.checksum Checksum 18 144 tcp.urgent_pointer Urgent Pointer (if URG set) 20 160 tcp.opt TCP Options (if data offset > 5)
def build_schema(self, fields): """ Build the schema from fields. :param fields: A list of fields in the index :returns: list of dictionaries Each dictionary has the keys field_name: The name of the field index type: what type of value it is 'multi_valued': if it allows more than one value 'column': a number identifying it 'type': the type of the field 'multi_valued': 'false', 'column': 0} """ content_field_name = '' schema_fields = [ {'field_name': ID, 'type': 'text', 'multi_valued': 'false', 'column': 0}, {'field_name': DJANGO_ID, 'type': 'integer', 'multi_valued': 'false', 'column': 1}, {'field_name': DJANGO_CT, 'type': 'text', 'multi_valued': 'false', 'column': 2}, ] self._columns[ID] = 0 self._columns[DJANGO_ID] = 1 self._columns[DJANGO_CT] = 2 column = len(schema_fields) for field_name, field_class in sorted(list(fields.items()), key=lambda n: n[0]): if field_class.document is True: content_field_name = field_class.index_fieldname if field_class.indexed is True: field_data = { 'field_name': field_class.index_fieldname, 'type': 'text', 'multi_valued': 'false', 'column': column, } if field_class.field_type == 'date': field_data['type'] = 'date' elif field_class.field_type == 'datetime': field_data['type'] = 'datetime' elif field_class.field_type == 'integer': field_data['type'] = 'integer' elif field_class.field_type == 'float': field_data['type'] = 'float' elif field_class.field_type == 'boolean': field_data['type'] = 'boolean' elif field_class.field_type == 'ngram': field_data['type'] = 'ngram' elif field_class.field_type == 'edge_ngram': field_data['type'] = 'edge_ngram' if field_class.is_multivalued: field_data['multi_valued'] = 'true' schema_fields.append(field_data) self._columns[field_data['field_name']] = column column += 1 return content_field_name, schema_fields
Build the schema from fields. :param fields: A list of fields in the index :returns: list of dictionaries Each dictionary has the keys field_name: The name of the field index type: what type of value it is 'multi_valued': if it allows more than one value 'column': a number identifying it 'type': the type of the field 'multi_valued': 'false', 'column': 0}
def reflection_matrix_pow(reflection_matrix: np.ndarray, exponent: float): """Raises a matrix with two opposing eigenvalues to a power. Args: reflection_matrix: The matrix to raise to a power. exponent: The power to raise the matrix to. Returns: The given matrix raised to the given power. """ # The eigenvalues are x and -x for some complex unit x. Determine x. squared_phase = np.dot(reflection_matrix[:, 0], reflection_matrix[0, :]) phase = complex(np.sqrt(squared_phase)) # Extract +x and -x eigencomponents of the matrix. i = np.eye(reflection_matrix.shape[0]) * phase pos_part = (i + reflection_matrix) * 0.5 neg_part = (i - reflection_matrix) * 0.5 # Raise the matrix to a power by raising its eigencomponents to that power. pos_factor = phase**(exponent - 1) neg_factor = pos_factor * complex(-1)**exponent pos_part_raised = pos_factor * pos_part neg_part_raised = neg_part * neg_factor return pos_part_raised + neg_part_raised
Raises a matrix with two opposing eigenvalues to a power. Args: reflection_matrix: The matrix to raise to a power. exponent: The power to raise the matrix to. Returns: The given matrix raised to the given power.
def get_app(self, app_id, embed_tasks=False, embed_counts=False, embed_deployments=False, embed_readiness=False, embed_last_task_failure=False, embed_failures=False, embed_task_stats=False): """Get a single app. :param str app_id: application ID :param bool embed_tasks: embed tasks in result :param bool embed_counts: embed all task counts :param bool embed_deployments: embed all deployment identifier :param bool embed_readiness: embed all readiness check results :param bool embed_last_task_failure: embeds the last task failure :param bool embed_failures: shorthand for embed_last_task_failure :param bool embed_task_stats: embed task stats in result :returns: application :rtype: :class:`marathon.models.app.MarathonApp` """ params = {} embed_params = { 'app.tasks': embed_tasks, 'app.counts': embed_counts, 'app.deployments': embed_deployments, 'app.readiness': embed_readiness, 'app.lastTaskFailure': embed_last_task_failure, 'app.failures': embed_failures, 'app.taskStats': embed_task_stats } filtered_embed_params = [k for (k, v) in embed_params.items() if v] if filtered_embed_params: params['embed'] = filtered_embed_params response = self._do_request( 'GET', '/v2/apps/{app_id}'.format(app_id=app_id), params=params) return self._parse_response(response, MarathonApp, resource_name='app')
Get a single app. :param str app_id: application ID :param bool embed_tasks: embed tasks in result :param bool embed_counts: embed all task counts :param bool embed_deployments: embed all deployment identifier :param bool embed_readiness: embed all readiness check results :param bool embed_last_task_failure: embeds the last task failure :param bool embed_failures: shorthand for embed_last_task_failure :param bool embed_task_stats: embed task stats in result :returns: application :rtype: :class:`marathon.models.app.MarathonApp`
def disassemble(co, lasti=-1): """Disassemble a code object.""" # Taken from dis.disassemble, returns disassembled code instead of printing # it (the fuck python ?). # Also, unicodified. # Also, use % operator instead of string operations. # Also, one statement per line. out = StringIO() code = co.co_code labels = dis.findlabels(code) linestarts = dict(dis.findlinestarts(co)) n = len(code) i = 0 extended_arg = 0 free = None while i < n: c = code[i] op = ord(c) if i in linestarts: if i > 0: print(end=u'\n', file=out) print(u'%3d' % linestarts[i], end=u' ', file=out) else: print(u' ', end=u' ', file=out) if i == lasti: print(u'-->', end=u' ', file=out) else: print(u' ', end=u' ', file=out) if i in labels: print(u'>>', end=u' ', file=out) else: print(u' ', end=u' ', file=out) print(u'%4i' % i, end=u' ', file=out) print(u'%-20s' % dis.opname[op], end=u' ', file=out) i = i + 1 if op >= dis.HAVE_ARGUMENT: oparg = ord(code[i]) + ord(code[i + 1]) * 256 + extended_arg extended_arg = 0 i = i + 2 if op == dis.EXTENDED_ARG: extended_arg = oparg * 65536 print(u'%5i' % oparg, end=u' ', file=out) if op in dis.hasconst: print(u'(%r)' % co.co_consts[oparg], end=u' ', file=out) elif op in dis.hasname: print(u'(%s)' % co.co_names[oparg], end=u' ', file=out) elif op in dis.hasjrel: print(u'(to %r)' % (i + oparg), end=u' ', file=out) elif op in dis.haslocal: print(u'(%s)' % co.co_varnames[oparg], end=u' ', file=out) elif op in dis.hascompare: print(u'(%s)' % dis.cmp_op[oparg], end=u' ', file=out) elif op in dis.hasfree: if free is None: free = co.co_cellvars + co.co_freevars print(u'(%s)' % free[oparg], end=u' ', file=out) print(end=u'\n', file=out) return out.getvalue()
Disassemble a code object.
def log_y_cb(self, w, val): """Toggle linear/log scale for Y-axis.""" self.tab_plot.logy = val self.plot_two_columns()
Toggle linear/log scale for Y-axis.
def changelist_view(self, request, extra_context=None): """Add advanced_filters form to changelist context""" if extra_context is None: extra_context = {} response = self.adv_filters_handle(request, extra_context=extra_context) if response: return response return super(AdminAdvancedFiltersMixin, self ).changelist_view(request, extra_context=extra_context)
Add advanced_filters form to changelist context
def _encode(s, encoding=None, errors=None): """Encodes *s*.""" if encoding is None: encoding = ENCODING if errors is None: errors = ENCODING_ERRORS return s.encode(encoding, errors) if isinstance(s, unicode) else s
Encodes *s*.
def ExtractConfig(self): """This installer extracts a config file from the .pkg file.""" logging.info("Extracting config file from .pkg.") pkg_path = os.environ.get("PACKAGE_PATH", None) if pkg_path is None: logging.error("Could not locate package, giving up.") return zf = zipfile.ZipFile(pkg_path, mode="r") fd = zf.open("config.yaml") install_dir = os.path.dirname(config.CONFIG.parser.filename) # We write this config to disk so that Intialize can find the build.yaml # referenced inside the config as a relative path. This config isn't used # after install time. installer_config = os.path.join(install_dir, "installer_config.yaml") with open(installer_config, "wb") as f: f.write(fd.read()) packaged_config = config.CONFIG.MakeNewConfig() packaged_config.Initialize( filename=installer_config, parser=config_lib.YamlParser) new_config = config.CONFIG.MakeNewConfig() new_config.SetWriteBack(config.CONFIG["Config.writeback"]) for info in config.CONFIG.type_infos: try: new_value = packaged_config.GetRaw(info.name, None) except type_info.TypeValueError: continue try: old_value = config.CONFIG.GetRaw(info.name, None) if not new_value or new_value == old_value: continue except type_info.TypeValueError: pass new_config.SetRaw(info.name, new_value) new_config.Write() logging.info("Config file extracted successfully.") logging.info("Extracting additional files.") for zinfo in zf.filelist: basename = os.path.basename(zinfo.filename) if basename != "config.yaml": with open(os.path.join(install_dir, basename), "wb") as f: f.write(zf.open(zinfo.filename).read())
This installer extracts a config file from the .pkg file.
def get_params(self): """ returns a list """ value = self._get_lookup(self.operator, self.value) self.params.append(self.value) return self.params
returns a list
def get_instance(self, payload): """ Build an instance of StreamMessageInstance :param dict payload: Payload response from the API :returns: twilio.rest.sync.v1.service.sync_stream.stream_message.StreamMessageInstance :rtype: twilio.rest.sync.v1.service.sync_stream.stream_message.StreamMessageInstance """ return StreamMessageInstance( self._version, payload, service_sid=self._solution['service_sid'], stream_sid=self._solution['stream_sid'], )
Build an instance of StreamMessageInstance :param dict payload: Payload response from the API :returns: twilio.rest.sync.v1.service.sync_stream.stream_message.StreamMessageInstance :rtype: twilio.rest.sync.v1.service.sync_stream.stream_message.StreamMessageInstance
def update_webhook(self, webhook, name=None, metadata=None): """ Updates the specified webhook. One or more of the parameters may be specified. """ return self.manager.update_webhook(self.scaling_group, policy=self, webhook=webhook, name=name, metadata=metadata)
Updates the specified webhook. One or more of the parameters may be specified.
def create(self, name, targetUrl, resource, event, filter=None, secret=None, **request_parameters): """Create a webhook. Args: name(basestring): A user-friendly name for this webhook. targetUrl(basestring): The URL that receives POST requests for each event. resource(basestring): The resource type for the webhook. event(basestring): The event type for the webhook. filter(basestring): The filter that defines the webhook scope. secret(basestring): The secret used to generate payload signature. **request_parameters: Additional request parameters (provides support for parameters that may be added in the future). Returns: Webhook: A Webhook object with the details of the created webhook. Raises: TypeError: If the parameter types are incorrect. ApiError: If the Webex Teams cloud returns an error. """ check_type(name, basestring, may_be_none=False) check_type(targetUrl, basestring, may_be_none=False) check_type(resource, basestring, may_be_none=False) check_type(event, basestring, may_be_none=False) check_type(filter, basestring) check_type(secret, basestring) post_data = dict_from_items_with_values( request_parameters, name=name, targetUrl=targetUrl, resource=resource, event=event, filter=filter, secret=secret, ) # API request json_data = self._session.post(API_ENDPOINT, json=post_data) # Return a webhook object created from the response JSON data return self._object_factory(OBJECT_TYPE, json_data)
Create a webhook. Args: name(basestring): A user-friendly name for this webhook. targetUrl(basestring): The URL that receives POST requests for each event. resource(basestring): The resource type for the webhook. event(basestring): The event type for the webhook. filter(basestring): The filter that defines the webhook scope. secret(basestring): The secret used to generate payload signature. **request_parameters: Additional request parameters (provides support for parameters that may be added in the future). Returns: Webhook: A Webhook object with the details of the created webhook. Raises: TypeError: If the parameter types are incorrect. ApiError: If the Webex Teams cloud returns an error.
def main(): """ NAME irmaq_magic.py DESCRIPTION plots IRM acquisition curves from measurements file SYNTAX irmaq_magic [command line options] INPUT takes magic formatted magic_measurements.txt files OPTIONS -h prints help message and quits -f FILE: specify input file, default is: magic_measurements.txt/measurements.txt -obj OBJ: specify object [loc, sit, sam, spc] for plot, default is by location -N ; do not normalize by last point - use original units -fmt [png,jpg,eps,pdf] set plot file format [default is svg] -sav save plot[s] and quit -DM MagIC data model number, default is 3 NOTE loc: location (study); sit: site; sam: sample; spc: specimen """ FIG = {} # plot dictionary FIG['exp'] = 1 # exp is figure 1 dir_path = './' plot, fmt = 0, 'svg' units = 'T', XLP = [] norm = 1 LP = "LP-IRM" if len(sys.argv) > 1: if '-h' in sys.argv: print(main.__doc__) sys.exit() data_model = int(pmag.get_named_arg("-DM", 3)) if '-N' in sys.argv: norm = 0 if '-sav' in sys.argv: plot = 1 if '-fmt' in sys.argv: ind = sys.argv.index("-fmt") fmt = sys.argv[ind + 1] if data_model == 3: in_file = pmag.get_named_arg("-f", 'measurements.txt') else: in_file = pmag.get_named_arg("-f", 'magic_measurements.txt') if '-WD' in sys.argv: ind = sys.argv.index('-WD') dir_path = sys.argv[ind + 1] dir_path = os.path.realpath(dir_path) in_file = pmag.resolve_file_name(in_file, dir_path) if '-WD' not in sys.argv: dir_path = os.path.split(in_file)[0] plot_by = pmag.get_named_arg("-obj", "loc") if data_model == 3: plot_key = 'location' if plot_by == 'sit': plot_key = 'site' if plot_by == 'sam': plot_key = 'sample' if plot_by == 'spc': plot_key = 'specimen' else: plot_key = 'er_location_name' if plot_by == 'sit': plot_key = 'er_site_name' if plot_by == 'sam': plot_key = 'er_sample_name' if plot_by == 'spc': plot_key = 'er_specimen_name' # set defaults and get more information if needed if data_model == 3: dmag_key = 'treat_dc_field' else: dmag_key = 'treatment_dc_field' # if data_model == 3 and plot_key != 'specimen': # gonna need to read in more files print('-W- You are trying to plot measurements by {}'.format(plot_key)) print(' By default, this information is not available in your measurement file.') print(' Trying to acquire this information from {}'.format(dir_path)) con = cb.Contribution(dir_path) meas_df = con.propagate_location_to_measurements() if meas_df is None: print('-W- No data found in {}'.format(dir_path)) return if plot_key not in meas_df.columns: print('-W- Could not find required data.') print(' Try a different plot key.') return else: print('-I- Found {} information, continuing with plotting'.format(plot_key)) # need to take the data directly from the contribution here, to keep # location/site/sample columns in the measurements table data = con.tables['measurements'].convert_to_pmag_data_list() file_type = "measurements" else: data, file_type = pmag.magic_read(in_file) # read in data sids = pmag.get_specs(data) pmagplotlib.plot_init(FIG['exp'], 6, 6) # # # find desired intensity data # # get plotlist # plotlist = [] if data_model == 3: intlist = ['magn_moment', 'magn_volume', 'magn_mass', 'magnitude'] else: intlist = ['measurement_magnitude', 'measurement_magn_moment', 'measurement_magn_volume', 'measurement_magn_mass'] IntMeths = [] # get all the records with this lab protocol #print('data', len(data)) #print('data[0]', data[0]) if data_model == 3: data = pmag.get_dictitem(data, 'method_codes', LP, 'has') else: data = pmag.get_dictitem(data, 'magic_method_codes', LP, 'has') Ints = {} NoInts, int_key = 1, "" for key in intlist: # get all non-blank data for intensity type Ints[key] = pmag.get_dictitem(data, key, '', 'F') if len(Ints[key]) > 0: NoInts = 0 if int_key == "": int_key = key if NoInts == 1: print('No intensity information found') sys.exit() for rec in Ints[int_key]: if rec[plot_key] not in plotlist: plotlist.append(rec[plot_key]) plotlist.sort() for plt in plotlist: print(plt) INTblock = [] # get data with right intensity info whose plot_key matches plot data = pmag.get_dictitem(Ints[int_key], plot_key, plt, 'T') # get a list of specimens with appropriate data sids = pmag.get_specs(data) if len(sids) > 0: title = data[0][plot_key] for s in sids: INTblock = [] # get data for each specimen if data_model == 3: sdata = pmag.get_dictitem(data, 'specimen', s, 'T') else: sdata = pmag.get_dictitem(data, 'er_specimen_name', s, 'T') for rec in sdata: INTblock.append([float(rec[dmag_key]), 0, 0, float(rec[int_key]), 1, 'g']) pmagplotlib.plot_mag(FIG['exp'], INTblock, title, 0, units, norm) files = {} for key in list(FIG.keys()): files[key] = title + '_' + LP + '.' + fmt if plot == 0: pmagplotlib.draw_figs(FIG) ans = input(" S[a]ve to save plot, [q]uit, Return to continue: ") if ans == 'q': sys.exit() if ans == "a": pmagplotlib.save_plots(FIG, files) if plt != plotlist[-1]: # if it isn't the last plot, init the next one pmagplotlib.plot_init(FIG['exp'], 6, 6) else: pmagplotlib.save_plots(FIG, files) pmagplotlib.clearFIG(FIG['exp'])
NAME irmaq_magic.py DESCRIPTION plots IRM acquisition curves from measurements file SYNTAX irmaq_magic [command line options] INPUT takes magic formatted magic_measurements.txt files OPTIONS -h prints help message and quits -f FILE: specify input file, default is: magic_measurements.txt/measurements.txt -obj OBJ: specify object [loc, sit, sam, spc] for plot, default is by location -N ; do not normalize by last point - use original units -fmt [png,jpg,eps,pdf] set plot file format [default is svg] -sav save plot[s] and quit -DM MagIC data model number, default is 3 NOTE loc: location (study); sit: site; sam: sample; spc: specimen
def fix_config(self, options): """ Fixes the options, if necessary. I.e., it adds all required elements to the dictionary. :param options: the options to fix :type options: dict :return: the (potentially) fixed options :rtype: dict """ options = super(Trigger, self).fix_config(options) opt = "condition" if opt not in options: options[opt] = "True" if opt not in self.help: self.help[opt] = "The (optional) condition for teeing off the tokens; uses the 'eval' method, "\ "ie the expression must evaluate to a boolean value; storage values placeholders "\ "'@{...}' get replaced with their string representations before evaluating the "\ "expression (string)." return options
Fixes the options, if necessary. I.e., it adds all required elements to the dictionary. :param options: the options to fix :type options: dict :return: the (potentially) fixed options :rtype: dict
def add_children(self, *children, **kwargs): """Conveniience function: Adds objects as children in the scene graph.""" for child in children: self.add_child(child, **kwargs)
Conveniience function: Adds objects as children in the scene graph.
def saveSettings(self, settings): """ Records the current structure of the view widget to the inputed \ settings instance. :param settings | <QSettings> """ # record the profile profile = self.saveProfile() key = self.objectName() settings.setValue('%s/profile' % key, wrapVariant(profile.toString())) # record the view type settings for viewType in self.viewTypes(): viewType.saveGlobalSettings(settings)
Records the current structure of the view widget to the inputed \ settings instance. :param settings | <QSettings>
def get_requirements(lookup=None): '''get_requirements reads in requirements and versions from the lookup obtained with get_lookup''' if lookup == None: lookup = get_lookup() install_requires = [] for module in lookup['INSTALL_REQUIRES']: module_name = module[0] module_meta = module[1] if "exact_version" in module_meta: dependency = "%s==%s" %(module_name,module_meta['exact_version']) elif "min_version" in module_meta: if module_meta['min_version'] == None: dependency = module_name else: dependency = "%s>=%s" %(module_name,module_meta['min_version']) install_requires.append(dependency) return install_requires
get_requirements reads in requirements and versions from the lookup obtained with get_lookup
def _set_exception(self): """Called by a Job object to tell that an exception occured during the processing of the function. The object will become ready but not successful. The collector's notify_ready() method will be called, but NOT the callback method""" assert not self.ready() self._data = sys.exc_info() self._success = False self._event.set() if self._collector is not None: self._collector.notify_ready(self)
Called by a Job object to tell that an exception occured during the processing of the function. The object will become ready but not successful. The collector's notify_ready() method will be called, but NOT the callback method
def load_sbml(filename): """ Load a model from a SBML file. Parameters ---------- filename : str The input SBML filename. Returns ------- model : NetworkModel y0 : dict Initial condition. volume : Real or Real3, optional A size of the simulation volume. """ import libsbml document = libsbml.readSBML(filename) document.validateSBML() num_errors = (document.getNumErrors(libsbml.LIBSBML_SEV_ERROR) + document.getNumErrors(libsbml.LIBSBML_SEV_FATAL)) if num_errors > 0: messages = "The generated document is not valid." messages += " {} errors were found:\n".format(num_errors) for i in range(document.getNumErrors(libsbml.LIBSBML_SEV_ERROR)): err = document.getErrorWithSeverity(i, libsbml.LIBSBML_SEV_ERROR) messages += "{}: {}\n".format(err.getSeverityAsString(), err.getShortMessage()) for i in range(document.getNumErrors(libsbml.LIBSBML_SEV_FATAL)): err = document.getErrorWithSeverity(i, libsbml.LIBSBML_SEV_FATAL) messages += "{}: {}\n".format(err.getSeverityAsString(), err.getShortMessage()) raise RuntimeError(messages) return import_sbml(document)
Load a model from a SBML file. Parameters ---------- filename : str The input SBML filename. Returns ------- model : NetworkModel y0 : dict Initial condition. volume : Real or Real3, optional A size of the simulation volume.
def fdr(p, q=.05): """ Determine FDR threshold given a p value array and desired false discovery rate q. """ s = np.sort(p) nvox = p.shape[0] null = np.array(range(1, nvox + 1), dtype='float') * q / nvox below = np.where(s <= null)[0] return s[max(below)] if len(below) else -1
Determine FDR threshold given a p value array and desired false discovery rate q.
def send(self, request, ordered=False): """ This method enqueues the given request to be sent. Its send state will be saved until a response arrives, and a ``Future`` that will be resolved when the response arrives will be returned: .. code-block:: python async def method(): # Sending (enqueued for the send loop) future = sender.send(request) # Receiving (waits for the receive loop to read the result) result = await future Designed like this because Telegram may send the response at any point, and it can send other items while one waits for it. Once the response for this future arrives, it is set with the received result, quite similar to how a ``receive()`` call would otherwise work. Since the receiving part is "built in" the future, it's impossible to await receive a result that was never sent. """ if not self._user_connected: raise ConnectionError('Cannot send requests while disconnected') if not utils.is_list_like(request): state = RequestState(request, self._loop) self._send_queue.append(state) return state.future else: states = [] futures = [] state = None for req in request: state = RequestState(req, self._loop, after=ordered and state) states.append(state) futures.append(state.future) self._send_queue.extend(states) return futures
This method enqueues the given request to be sent. Its send state will be saved until a response arrives, and a ``Future`` that will be resolved when the response arrives will be returned: .. code-block:: python async def method(): # Sending (enqueued for the send loop) future = sender.send(request) # Receiving (waits for the receive loop to read the result) result = await future Designed like this because Telegram may send the response at any point, and it can send other items while one waits for it. Once the response for this future arrives, it is set with the received result, quite similar to how a ``receive()`` call would otherwise work. Since the receiving part is "built in" the future, it's impossible to await receive a result that was never sent.
def frequent_signups(self): """Return a QuerySet of activity id's and counts for the activities that a given user has signed up for more than `settings.SIMILAR_THRESHOLD` times""" key = "{}:frequent_signups".format(self.username) cached = cache.get(key) if cached: return cached freq_signups = self.eighthsignup_set.exclude(scheduled_activity__activity__administrative=True).exclude( scheduled_activity__activity__special=True).exclude(scheduled_activity__activity__restricted=True).exclude( scheduled_activity__activity__deleted=True).values('scheduled_activity__activity').annotate( count=Count('scheduled_activity__activity')).filter(count__gte=settings.SIMILAR_THRESHOLD).order_by('-count') cache.set(key, freq_signups, timeout=60 * 60 * 24 * 7) return freq_signups
Return a QuerySet of activity id's and counts for the activities that a given user has signed up for more than `settings.SIMILAR_THRESHOLD` times
def set_prompt(self, prompt_command="", position=0): """ writes the prompt line """ self.description_docs = u'{}'.format(prompt_command) self.cli.current_buffer.reset( initial_document=Document( self.description_docs, cursor_position=position)) self.cli.request_redraw()
writes the prompt line
def scan_in_memory(node, env, path=()): """ "Scans" a Node.FS.Dir for its in-memory entries. """ try: entries = node.entries except AttributeError: # It's not a Node.FS.Dir (or doesn't look enough like one for # our purposes), which can happen if a target list containing # mixed Node types (Dirs and Files, for example) has a Dir as # the first entry. return [] entry_list = sorted(filter(do_not_scan, list(entries.keys()))) return [entries[n] for n in entry_list]
"Scans" a Node.FS.Dir for its in-memory entries.
def symmetrize_compact_force_constants(force_constants, primitive, level=1): """Symmetry force constants by translational and permutation symmetries Parameters ---------- force_constants: ndarray Compact force constants. Symmetrized force constants are overwritten. dtype=double shape=(n_patom,n_satom,3,3) primitive: Primitive Primitive cell level: int Controls the number of times the following steps repeated: 1) Subtract drift force constants along row and column 2) Average fc and fc.T """ s2p_map = primitive.get_supercell_to_primitive_map() p2s_map = primitive.get_primitive_to_supercell_map() p2p_map = primitive.get_primitive_to_primitive_map() permutations = primitive.get_atomic_permutations() s2pp_map, nsym_list = get_nsym_list_and_s2pp(s2p_map, p2p_map, permutations) try: import phonopy._phonopy as phonoc phonoc.perm_trans_symmetrize_compact_fc(force_constants, permutations, s2pp_map, p2s_map, nsym_list, level) except ImportError: text = ("Import error at phonoc.perm_trans_symmetrize_compact_fc. " "Corresponding pytono code is not implemented.") raise RuntimeError(text)
Symmetry force constants by translational and permutation symmetries Parameters ---------- force_constants: ndarray Compact force constants. Symmetrized force constants are overwritten. dtype=double shape=(n_patom,n_satom,3,3) primitive: Primitive Primitive cell level: int Controls the number of times the following steps repeated: 1) Subtract drift force constants along row and column 2) Average fc and fc.T
def marvcli_comment_list(datasets): """Lists comments for datasets. Output: setid comment_id date time author message """ app = create_app() ids = parse_setids(datasets, dbids=True) comments = db.session.query(Comment)\ .options(db.joinedload(Comment.dataset))\ .filter(Comment.dataset_id.in_(ids)) for comment in sorted(comments, key=lambda x: (x.dataset._setid, x.id)): print(comment.dataset.setid, comment.id, datetime.datetime.fromtimestamp(int(comment.time_added / 1000)), comment.author, repr(comment.text))
Lists comments for datasets. Output: setid comment_id date time author message
def add_membership(self, subject_descriptor, container_descriptor): """AddMembership. [Preview API] Create a new membership between a container and subject. :param str subject_descriptor: A descriptor to a group or user that can be the child subject in the relationship. :param str container_descriptor: A descriptor to a group that can be the container in the relationship. :rtype: :class:`<GraphMembership> <azure.devops.v5_1.graph.models.GraphMembership>` """ route_values = {} if subject_descriptor is not None: route_values['subjectDescriptor'] = self._serialize.url('subject_descriptor', subject_descriptor, 'str') if container_descriptor is not None: route_values['containerDescriptor'] = self._serialize.url('container_descriptor', container_descriptor, 'str') response = self._send(http_method='PUT', location_id='3fd2e6ca-fb30-443a-b579-95b19ed0934c', version='5.1-preview.1', route_values=route_values) return self._deserialize('GraphMembership', response)
AddMembership. [Preview API] Create a new membership between a container and subject. :param str subject_descriptor: A descriptor to a group or user that can be the child subject in the relationship. :param str container_descriptor: A descriptor to a group that can be the container in the relationship. :rtype: :class:`<GraphMembership> <azure.devops.v5_1.graph.models.GraphMembership>`
def fftconv(a, b, axes=(0, 1)): """ Compute a multi-dimensional convolution via the Discrete Fourier Transform. Note that the output has a phase shift relative to the output of :func:`scipy.ndimage.convolve` with the default ``origin`` parameter. Parameters ---------- a : array_like Input array b : array_like Input array axes : sequence of ints, optional (default (0, 1)) Axes on which to perform convolution Returns ------- ab : ndarray Convolution of input arrays, a and b, along specified axes """ if np.isrealobj(a) and np.isrealobj(b): fft = rfftn ifft = irfftn else: fft = fftn ifft = ifftn dims = np.maximum([a.shape[i] for i in axes], [b.shape[i] for i in axes]) af = fft(a, dims, axes) bf = fft(b, dims, axes) return ifft(af * bf, dims, axes)
Compute a multi-dimensional convolution via the Discrete Fourier Transform. Note that the output has a phase shift relative to the output of :func:`scipy.ndimage.convolve` with the default ``origin`` parameter. Parameters ---------- a : array_like Input array b : array_like Input array axes : sequence of ints, optional (default (0, 1)) Axes on which to perform convolution Returns ------- ab : ndarray Convolution of input arrays, a and b, along specified axes
def mendelian_check(tp1, tp2, tpp, is_xlinked=False): """ Compare TRED calls for Parent1, Parent2 and Proband. """ call_to_ints = lambda x: tuple(int(_) for _ in x.split("|") if _ != ".") tp1_sex, tp1_call = tp1[:2] tp2_sex, tp2_call = tp2[:2] tpp_sex, tpp_call = tpp[:2] # tp1_evidence = sum(int(x) for x in tp1[2:]) # tp2_evidence = sum(int(x) for x in tp2[2:]) # tpp_evidence = sum(int(x) for x in tpp[2:]) tp1_call = call_to_ints(tp1_call) tp2_call = call_to_ints(tp2_call) tpp_call = call_to_ints(tpp_call) possible_progenies = set(tuple(sorted(x)) \ for x in product(tp1_call, tp2_call)) if is_xlinked and tpp_sex == "Male": possible_progenies = set(tuple((x,)) for x in tp1_call) if -1 in tp1_call or -1 in tp2_call or -1 in tpp_call: tag = "Missing" # elif tp1_evidence < 2 or tp2_evidence < 2 or tpp_evidence < 2: # tag = "Missing" else: tag = "Correct" if tpp_call in possible_progenies else "Error" return tag
Compare TRED calls for Parent1, Parent2 and Proband.
def __set_components(self, requisite=True): """ Sets the Components. :param requisite: Set only requisite Components. :type requisite: bool """ components = self.__components_manager.list_components() candidate_components = \ getattr(set(components), "intersection" if requisite else "difference")(self.__requisite_components) deactivated_components = self.__settings.get_key("Settings", "deactivated_components").toString().split(",") candidate_components = \ sorted(filter(lambda x: x not in deactivated_components, candidate_components), key=(components).index) for component in candidate_components: try: profile = self.__components_manager.components[component] interface = self.__components_manager.get_interface(component) setattr(self, "_{0}__{1}".format(self.__class__.__name__, foundations.namespace.get_leaf(component, ".")), interface) self.__splashscreen and self.__splashscreen.show_message( "{0} - {1} | Activating {2}.".format(self.__class__.__name__, Constants.version, component)) interface.activate(self) if profile.category in ("Default", "QObject"): interface.initialize() elif profile.category == "QWidget": interface.add_widget() interface.initialize_ui() except Exception as error: if requisite: message = "'{0}' Component failed to activate!\nException raised: {1}" handler = umbra.reporter.system_exit_exception_handler else: message = "'{0}' Component failed to activate, unexpected behavior may occur!\nException raised: {1}" handler = umbra.reporter.base_exception_handler exception = manager.exceptions.ComponentActivationError(message.format(component, error)) handler(exception)
Sets the Components. :param requisite: Set only requisite Components. :type requisite: bool
def from_lambda(cls, name, lambda_): """Make a :class:`SassFunction` object from the given ``lambda_`` function. Since lambda functions don't have their name, it need its ``name`` as well. Arguments are automatically inspected. :param name: the function name :type name: :class:`str` :param lambda_: the actual lambda function to be called :type lambda_: :class:`types.LambdaType` :returns: a custom function wrapper of the ``lambda_`` function :rtype: :class:`SassFunction` """ if PY2: # pragma: no cover a = inspect.getargspec(lambda_) varargs, varkw, defaults, kwonlyargs = ( a.varargs, a.keywords, a.defaults, None, ) else: # pragma: no cover a = inspect.getfullargspec(lambda_) varargs, varkw, defaults, kwonlyargs = ( a.varargs, a.varkw, a.defaults, a.kwonlyargs, ) if varargs or varkw or defaults or kwonlyargs: raise TypeError( 'functions cannot have starargs or defaults: {} {}'.format( name, lambda_, ), ) return cls(name, a.args, lambda_)
Make a :class:`SassFunction` object from the given ``lambda_`` function. Since lambda functions don't have their name, it need its ``name`` as well. Arguments are automatically inspected. :param name: the function name :type name: :class:`str` :param lambda_: the actual lambda function to be called :type lambda_: :class:`types.LambdaType` :returns: a custom function wrapper of the ``lambda_`` function :rtype: :class:`SassFunction`
def version_binary(self): ''' Return version number which is stored in binary format. Returns: str: <major 0-255>.<minior 0-255>.<build 0-65535> or None if not found ''' # Under MSI 'Version' is a 'REG_DWORD' which then sets other registry # values like DisplayVersion to x.x.x to the same value. # However not everyone plays by the rules, so we need to check first. # version_binary_data will be None if the reg value does not exist. # Some installs set 'Version' to REG_SZ (string) which is not # the MSI standard try: item_value, item_type = self.__reg_query_value(self.__reg_uninstall_handle, 'version') except pywintypes.error as exc: # pylint: disable=no-member if exc.winerror == winerror.ERROR_FILE_NOT_FOUND: # Not Found return '', '' version_binary_text = '' version_src = '' if item_value: if item_type == win32con.REG_DWORD: if isinstance(item_value, six.integer_types): version_binary_raw = item_value if version_binary_raw: # Major.Minor.Build version_binary_text = '{0}.{1}.{2}'.format( version_binary_raw >> 24 & 0xff, version_binary_raw >> 16 & 0xff, version_binary_raw & 0xffff) version_src = 'binary-version' elif (item_type == win32con.REG_SZ and isinstance(item_value, six.string_types) and self.__version_pattern.match(item_value) is not None): # Hey, version should be a int/REG_DWORD, an installer has set # it to a string version_binary_text = item_value.strip(' ') version_src = 'binary-version (string)' return (version_binary_text, version_src)
Return version number which is stored in binary format. Returns: str: <major 0-255>.<minior 0-255>.<build 0-65535> or None if not found
def delete_migration(connection, basename): """ Delete a migration in `migrations_applied` table """ # Prepare query sql = "DELETE FROM migrations_applied WHERE name = %s" # Run with connection.cursor() as cursor: cursor.execute(sql, (basename,)) connection.commit() return True
Delete a migration in `migrations_applied` table
def _structure_default(self, obj, cl): """This is the fallthrough case. Everything is a subclass of `Any`. A special condition here handles ``attrs`` classes. Bare optionals end here too (optionals with arguments are unions.) We treat bare optionals as Any. """ if cl is Any or cl is Optional: return obj # We don't know what this is, so we complain loudly. msg = ( "Unsupported type: {0}. Register a structure hook for " "it.".format(cl) ) raise ValueError(msg)
This is the fallthrough case. Everything is a subclass of `Any`. A special condition here handles ``attrs`` classes. Bare optionals end here too (optionals with arguments are unions.) We treat bare optionals as Any.
def stop(self): """Stop the timer.""" dd = time() - self._start self.ms = int(round(1000 * dd))
Stop the timer.
def get_(key, recurse=False, profile=None, **kwargs): ''' .. versionadded:: 2014.7.0 Get a value from etcd, by direct path. Returns None on failure. CLI Examples: .. code-block:: bash salt myminion etcd.get /path/to/key salt myminion etcd.get /path/to/key profile=my_etcd_config salt myminion etcd.get /path/to/key recurse=True profile=my_etcd_config salt myminion etcd.get /path/to/key host=127.0.0.1 port=2379 ''' client = __utils__['etcd_util.get_conn'](__opts__, profile, **kwargs) if recurse: return client.tree(key) else: return client.get(key, recurse=recurse)
.. versionadded:: 2014.7.0 Get a value from etcd, by direct path. Returns None on failure. CLI Examples: .. code-block:: bash salt myminion etcd.get /path/to/key salt myminion etcd.get /path/to/key profile=my_etcd_config salt myminion etcd.get /path/to/key recurse=True profile=my_etcd_config salt myminion etcd.get /path/to/key host=127.0.0.1 port=2379
def update_form_labels(self, request=None, obj=None, form=None): """Returns a form obj after modifying form labels referred to in custom_form_labels. """ for form_label in self.custom_form_labels: if form_label.field in form.base_fields: label = form_label.get_form_label( request=request, obj=obj, model=self.model, form=form ) if label: form.base_fields[form_label.field].label = mark_safe(label) return form
Returns a form obj after modifying form labels referred to in custom_form_labels.
def load(pathtovector, wordlist=(), num_to_load=None, truncate_embeddings=None, unk_word=None, sep=" "): r""" Read a file in word2vec .txt format. The load function will raise a ValueError when trying to load items which do not conform to line lengths. Parameters ---------- pathtovector : string The path to the vector file. header : bool Whether the vector file has a header of the type (NUMBER OF ITEMS, SIZE OF VECTOR). wordlist : iterable, optional, default () A list of words you want loaded from the vector file. If this is None (default), all words will be loaded. num_to_load : int, optional, default None The number of items to load from the file. Because loading can take some time, it is sometimes useful to onlyl load the first n items from a vector file for quick inspection. truncate_embeddings : int, optional, default None If this value is not None, the vectors in the vector space will be truncated to the number of dimensions indicated by this value. unk_word : object The object to treat as UNK in your vector space. If this is not in your items dictionary after loading, we add it with a zero vector. Returns ------- r : Reach An initialized Reach instance. """ vectors, items = Reach._load(pathtovector, wordlist, num_to_load, truncate_embeddings, sep) if unk_word is not None: if unk_word not in set(items): unk_vec = np.zeros((1, vectors.shape[1])) vectors = np.concatenate([unk_vec, vectors], 0) items = [unk_word] + items unk_index = 0 else: unk_index = items.index(unk_word) else: unk_index = None return Reach(vectors, items, name=os.path.split(pathtovector)[-1], unk_index=unk_index)
r""" Read a file in word2vec .txt format. The load function will raise a ValueError when trying to load items which do not conform to line lengths. Parameters ---------- pathtovector : string The path to the vector file. header : bool Whether the vector file has a header of the type (NUMBER OF ITEMS, SIZE OF VECTOR). wordlist : iterable, optional, default () A list of words you want loaded from the vector file. If this is None (default), all words will be loaded. num_to_load : int, optional, default None The number of items to load from the file. Because loading can take some time, it is sometimes useful to onlyl load the first n items from a vector file for quick inspection. truncate_embeddings : int, optional, default None If this value is not None, the vectors in the vector space will be truncated to the number of dimensions indicated by this value. unk_word : object The object to treat as UNK in your vector space. If this is not in your items dictionary after loading, we add it with a zero vector. Returns ------- r : Reach An initialized Reach instance.
def set_actuator_control_target_encode(self, time_usec, group_mlx, target_system, target_component, controls): ''' Set the vehicle attitude and body angular rates. time_usec : Timestamp (micros since boot or Unix epoch) (uint64_t) group_mlx : Actuator group. The "_mlx" indicates this is a multi-instance message and a MAVLink parser should use this field to difference between instances. (uint8_t) target_system : System ID (uint8_t) target_component : Component ID (uint8_t) controls : Actuator controls. Normed to -1..+1 where 0 is neutral position. Throttle for single rotation direction motors is 0..1, negative range for reverse direction. Standard mapping for attitude controls (group 0): (index 0-7): roll, pitch, yaw, throttle, flaps, spoilers, airbrakes, landing gear. Load a pass-through mixer to repurpose them as generic outputs. (float) ''' return MAVLink_set_actuator_control_target_message(time_usec, group_mlx, target_system, target_component, controls)
Set the vehicle attitude and body angular rates. time_usec : Timestamp (micros since boot or Unix epoch) (uint64_t) group_mlx : Actuator group. The "_mlx" indicates this is a multi-instance message and a MAVLink parser should use this field to difference between instances. (uint8_t) target_system : System ID (uint8_t) target_component : Component ID (uint8_t) controls : Actuator controls. Normed to -1..+1 where 0 is neutral position. Throttle for single rotation direction motors is 0..1, negative range for reverse direction. Standard mapping for attitude controls (group 0): (index 0-7): roll, pitch, yaw, throttle, flaps, spoilers, airbrakes, landing gear. Load a pass-through mixer to repurpose them as generic outputs. (float)