code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def channels_kick(self, room_id, user_id, **kwargs): return self.__call_api_post('channels.kick', roomId=room_id, userId=user_id, kwargs=kwargs)
Removes a user from the channel.
def _assemble_complex(stmt): member_strs = [_assemble_agent_str(m) for m in stmt.members] stmt_str = member_strs[0] + ' binds ' + _join_list(member_strs[1:]) return _make_sentence(stmt_str)
Assemble Complex statements into text.
def is_micropython_usb_device(port): if type(port).__name__ == 'Device': if ('ID_BUS' not in port or port['ID_BUS'] != 'usb' or 'SUBSYSTEM' not in port or port['SUBSYSTEM'] != 'tty'): return False usb_id = 'usb vid:pid={}:{}'.format(port['ID_VENDOR_ID'], port['ID_MODEL_ID']) else: usb_id = port[2].lower() if usb_id.startswith('usb vid:pid=f055:980'): return True if usb_id.startswith('usb vid:pid=16c0:0483'): return True return False
Checks a USB device to see if it looks like a MicroPython device.
def read_stats(self): self.statistics = TgnObjectsDict() for port in self.session.ports.values(): self.statistics[port] = port.read_port_stats() return self.statistics
Read current ports statistics from chassis. :return: dictionary {port name {group name, {stat name: stat value}}}
def generateRandomSymbol(numColumns, sparseCols): symbol = list() remainingCols = sparseCols while remainingCols > 0: col = random.randrange(numColumns) if col not in symbol: symbol.append(col) remainingCols -= 1 return symbol
Generates a random SDR with sparseCols number of active columns @param numColumns (int) number of columns in the temporal memory @param sparseCols (int) number of sparse columns for desired SDR @return symbol (list) SDR
def addParameter( self, k, r ): if isinstance(r, six.string_types) or not isinstance(r, collections.Iterable): r = [ r ] else: if isinstance(r, collections.Iterable): r = list(r) self._parameters[k] = r
Add a parameter to the experiment's parameter space. k is the parameter name, and r is its range. :param k: parameter name :param r: parameter range
def read_jp2_image(filename): image = read_image(filename) with open(filename, 'rb') as file: bit_depth = get_jp2_bit_depth(file) return fix_jp2_image(image, bit_depth)
Read data from JPEG2000 file :param filename: name of JPEG2000 file to be read :type filename: str :return: data stored in JPEG2000 file
async def add(client: Client, identity_signed_raw: str) -> ClientResponse: return await client.post(MODULE + '/add', {'identity': identity_signed_raw}, rtype=RESPONSE_AIOHTTP)
POST identity raw document :param client: Client to connect to the api :param identity_signed_raw: Identity raw document :return:
def get_states(self, config_ids): return itertools.chain.from_iterable(self.generate_config_states(config_id) for config_id in config_ids)
Generates state information for the selected containers. :param config_ids: List of MapConfigId tuples. :type config_ids: list[dockermap.map.input.MapConfigId] :return: Iterable of configuration states. :rtype: collections.Iterable[dockermap.map.state.ConfigState]
def gpg_version(): cmd = flatten([gnupg_bin(), "--version"]) output = stderr_output(cmd) output = output \ .split('\n')[0] \ .split(" ")[2] \ .split('.') return tuple([int(x) for x in output])
Returns the GPG version
def close(self): if not self._process: return if self._process.returncode is not None: return _logger.debug('Terminate process.') try: self._process.terminate() except OSError as error: if error.errno != errno.ESRCH: raise for dummy in range(10): if self._process.returncode is not None: return time.sleep(0.05) _logger.debug('Failed to terminate. Killing.') try: self._process.kill() except OSError as error: if error.errno != errno.ESRCH: raise
Terminate or kill the subprocess. This function is blocking.
async def send_rpc_message(self, message, context): conn_string = message.get('connection_string') rpc_id = message.get('rpc_id') address = message.get('address') timeout = message.get('timeout') payload = message.get('payload') client_id = context.user_data self._logger.debug("Calling RPC %d:0x%04X with payload %s on %s", address, rpc_id, payload, conn_string) response = bytes() err = None try: response = await self.send_rpc(client_id, conn_string, address, rpc_id, payload, timeout=timeout) except VALID_RPC_EXCEPTIONS as internal_err: err = internal_err except (DeviceAdapterError, DeviceServerError): raise except Exception as internal_err: self._logger.warning("Unexpected exception calling RPC %d:0x%04x", address, rpc_id, exc_info=True) raise ServerCommandError('send_rpc', str(internal_err)) from internal_err status, response = pack_rpc_response(response, err) return { 'status': status, 'payload': base64.b64encode(response) }
Handle a send_rpc message. See :meth:`AbstractDeviceAdapter.send_rpc`.
def get_last_modified_timestamp(self): cmd = "find . -print0 | xargs -0 stat -f '%T@ %p' | sort -n | tail -1 | cut -f2- -d' '" ps = subprocess.Popen(cmd,shell=True,stdout=subprocess.PIPE,stderr=subprocess.STDOUT) output = ps.communicate()[0] print output
Looks at the files in a git root directory and grabs the last modified timestamp
def subvolume_deleted(name, device, commit=False, __dest=None): ret = { 'name': name, 'result': False, 'changes': {}, 'comment': [], } path = os.path.join(__dest, name) exists = __salt__['btrfs.subvolume_exists'](path) if not exists: ret['comment'].append('Subvolume {} already missing'.format(name)) if __opts__['test']: ret['result'] = None if exists: ret['comment'].append('Subvolume {} will be removed'.format(name)) return ret commit = 'after' if commit else None if not exists: try: __salt__['btrfs.subvolume_delete'](path, commit=commit) except CommandExecutionError: ret['comment'].append('Error removing subvolume {}'.format(name)) return ret ret['changes'][name] = 'Removed subvolume {}'.format(name) ret['result'] = True return ret
Makes sure that a btrfs subvolume is removed. name Name of the subvolume to remove device Device where to remove the subvolume commit Wait until the transaction is over
def dRV(self, dt, band='g'): return (self.orbpop.dRV_1(dt)*self.A_brighter(band) + self.orbpop.dRV_2(dt)*self.BC_brighter(band))
Returns dRV of star A, if A is brighter than B+C, or of star B if B+C is brighter
def convertforinput(self,filepath, metadata): assert isinstance(metadata, CLAMMetaData) if not metadata.__class__ in self.acceptforinput: raise Exception("Convertor " + self.__class__.__name__ + " can not convert input files to " + metadata.__class__.__name__ + "!") return False
Convert from target format into one of the source formats. Relevant if converters are used in InputTemplates. Metadata already is metadata for the to-be-generated file. 'filepath' is both the source and the target file, the source file will be erased and overwritten with the conversion result!
def _set_current(self, new_current): new_cur_full_path = self.join(new_current) if not os.path.exists(new_cur_full_path): raise PrefixNotFound( 'Prefix "%s" does not exist in workdir %s' % (new_current, self.path) ) if os.path.lexists(self.join('current')): os.unlink(self.join('current')) os.symlink(new_current, self.join('current')) self.current = new_current
Change the current default prefix, for internal usage Args: new_current(str): Name of the new current prefix, it must already exist Returns: None Raises: PrefixNotFound: if the given prefix name does not exist in the workdir
def _call(self, x, out=None): if out is None: out = self.range.element() out.lincomb(self.a, x[0], self.b, x[1]) return out
Linearly combine ``x`` and write to ``out`` if given.
def _call(self, path, method, body=None, headers=None): try: resp = self.http.do_call(path, method, body, headers) except http.HTTPError as err: if err.status == 401: raise PermissionError('Insufficient permissions to query ' + '%s with user %s :%s' % (path, self.user, err)) raise return resp
Wrapper around http.do_call that transforms some HTTPError into our own exceptions
def convert_instancenorm(node, **kwargs): name, input_nodes, attrs = get_inputs(node, kwargs) eps = float(attrs.get("eps", 0.001)) node = onnx.helper.make_node( 'InstanceNormalization', inputs=input_nodes, outputs=[name], name=name, epsilon=eps) return [node]
Map MXNet's InstanceNorm operator attributes to onnx's InstanceNormalization operator based on the input node's attributes and return the created node.
def cmd_devid(self, args): for p in self.mav_param.keys(): if p.startswith('COMPASS_DEV_ID'): mp_util.decode_devid(self.mav_param[p], p) if p.startswith('INS_') and p.endswith('_ID'): mp_util.decode_devid(self.mav_param[p], p)
decode device IDs from parameters
def getInfo(sign, lon): return { 'ruler': ruler(sign), 'exalt': exalt(sign), 'dayTrip': dayTrip(sign), 'nightTrip': nightTrip(sign), 'partTrip': partTrip(sign), 'term': term(sign, lon), 'face': face(sign, lon), 'exile': exile(sign), 'fall': fall(sign) }
Returns the complete essential dignities for a sign and longitude.
def get_attr_filters(self): for f in self.data.keys(): if f not in self.multi_attrs: continue fv = self.data[f] if isinstance(fv, dict): fv['key'] = f else: fv = {f: fv} vf = ValueFilter(fv) vf.annotate = False yield vf
Return an iterator resource attribute filters configured.
def get_link_text_from_selector(selector): if selector.startswith('link='): return selector.split('link=')[1] elif selector.startswith('link_text='): return selector.split('link_text=')[1] return selector
A basic method to get the link text from a link text selector.
def strip_html(text): def reply_to(text): replying_to = [] split_text = text.split() for index, token in enumerate(split_text): if token.startswith('@'): replying_to.append(token[1:]) else: message = split_text[index:] break rply_msg = "" if len(replying_to) > 0: rply_msg = "Replying to " for token in replying_to[:-1]: rply_msg += token+"," if len(replying_to)>1: rply_msg += 'and ' rply_msg += replying_to[-1]+". " return rply_msg + " ".join(message) text = reply_to(text) text = text.replace('@', ' ') return " ".join([token for token in text.split() if ('http:' not in token) and ('https:' not in token)])
Get rid of ugly twitter html
def select_slice(self, row_slc, col_slc, add_to_selected=False): if not add_to_selected: self.grid.ClearSelection() if row_slc == row_slc == slice(None, None, None): self.grid.SelectAll() elif row_slc.stop is None and col_slc.stop is None: self.grid.SelectBlock(row_slc.start, col_slc.start, row_slc.stop - 1, col_slc.stop - 1) else: for row in xrange(row_slc.start, row_slc.stop, row_slc.step): for col in xrange(col_slc.start, col_slc.stop, col_slc.step): self.select_cell(row, col, add_to_selected=True)
Selects a slice of cells Parameters ---------- * row_slc: Integer or Slice \tRows to be selected * col_slc: Integer or Slice \tColumns to be selected * add_to_selected: Bool, defaults to False \tOld selections are cleared if False
def fact(name, puppet=False): opt_puppet = '--puppet' if puppet else '' ret = __salt__['cmd.run_all']( 'facter {0} {1}'.format(opt_puppet, name), python_shell=False) if ret['retcode'] != 0: raise CommandExecutionError(ret['stderr']) if not ret['stdout']: return '' return ret['stdout']
Run facter for a specific fact CLI Example: .. code-block:: bash salt '*' puppet.fact kernel
def addIDs(self, asfield=False): ids = vtk.vtkIdFilter() ids.SetInputData(self.poly) ids.PointIdsOn() ids.CellIdsOn() if asfield: ids.FieldDataOn() else: ids.FieldDataOff() ids.Update() return self.updateMesh(ids.GetOutput())
Generate point and cell ids. :param bool asfield: flag to control whether to generate scalar or field data.
def _dump_multilinestring(obj, decimals): coords = obj['coordinates'] mlls = 'MULTILINESTRING (%s)' linestrs = ('(%s)' % ', '.join(' '.join(_round_and_pad(c, decimals) for c in pt) for pt in linestr) for linestr in coords) mlls %= ', '.join(ls for ls in linestrs) return mlls
Dump a GeoJSON-like MultiLineString object to WKT. Input parameters and return value are the MULTILINESTRING equivalent to :func:`_dump_point`.
def _update(self): self.dataChanged.emit(self.createIndex(0, 0), self.createIndex( len(self.collection), len(self.header)))
Emit dataChanged signal on all cells
def open(self, new=False): self._db.new() if new else self._db.open() self._run_init_queries()
Init the database, if required.
def interrupt(self): if self._database and self._databaseThreadId: try: self._database.interrupt(self._databaseThreadId) except AttributeError: pass self._database = None self._databaseThreadId = 0
Interrupts the current database from processing.
def _get_parsers(self, name): parserlist = BaseParser.__subclasses__() forced = name is None if isinstance(name, (six.text_type, six.binary_type)): parserlist = [p for p in parserlist if p.__name__ == name] if not parserlist: raise ValueError("could not find parser: {}".format(name)) elif name is not None: raise TypeError("parser must be {types} or None, not {actual}".format( types=" or ".join([six.text_type.__name__, six.binary_type.__name__]), actual=type(parser).__name__, )) return not forced, parserlist
Return the appropriate parser asked by the user. Todo: Change `Ontology._get_parsers` behaviour to look for parsers through a setuptools entrypoint instead of mere subclasses.
def onPollCreated( self, mid=None, poll=None, author_id=None, thread_id=None, thread_type=None, ts=None, metadata=None, msg=None, ): log.info( "{} created poll {} in {} ({})".format( author_id, poll, thread_id, thread_type.name ) )
Called when the client is listening, and somebody creates a group poll :param mid: The action ID :param poll: Created poll :param author_id: The ID of the person who created the poll :param thread_id: Thread ID that the action was sent to. See :ref:`intro_threads` :param thread_type: Type of thread that the action was sent to. See :ref:`intro_threads` :param ts: A timestamp of the action :param metadata: Extra metadata about the action :param msg: A full set of the data recieved :type poll: models.Poll :type thread_type: models.ThreadType
def get_trainer(name): name = name.lower() return int(hashlib.md5(name.encode('utf-8')).hexdigest(), 16) % 10**8
return the unique id for a trainer, determined by the md5 sum
def state_view_for_block(block_wrapper, state_view_factory): state_root_hash = \ block_wrapper.state_root_hash \ if block_wrapper is not None else None return state_view_factory.create_view(state_root_hash)
Returns the state view for an arbitrary block. Args: block_wrapper (BlockWrapper): The block for which a state view is to be returned state_view_factory (StateViewFactory): The state view factory used to create the StateView object Returns: StateView object associated with the block
def sort(expr, field = None, keytype=None, ascending=True): weld_obj = WeldObject(encoder_, decoder_) expr_var = weld_obj.update(expr) if isinstance(expr, WeldObject): expr_var = expr.obj_id weld_obj.dependencies[expr_var] = expr if field is not None: key_str = "x.$%s" % field else: key_str = "x" if not ascending: key_str = key_str + "* %s(-1)" % keytype weld_template = weld_obj.weld_code = weld_template % {"expr":expr_var, "key":key_str} return weld_obj
Sorts the vector. If the field parameter is provided then the sort operators on a vector of structs where the sort key is the field of the struct. Args: expr (WeldObject) field (Int)
def _match_item(item, any_all=any, ignore_case=False, normalize_values=False, **kwargs): it = get_item_tags(item) return any_all( _match_field( get_field(it, field), pattern, ignore_case=ignore_case, normalize_values=normalize_values ) for field, patterns in kwargs.items() for pattern in patterns )
Match items by metadata. Note: Metadata values are lowercased when ``normalized_values`` is ``True``, so ``ignore_case`` is automatically set to ``True``. Parameters: item (~collections.abc.Mapping, str, os.PathLike): Item dict or filepath. any_all (callable): A callable to determine if any or all filters must match to match item. Expected values :obj:`any` (default) or :obj:`all`. ignore_case (bool): Perform case-insensitive matching. Default: ``False`` normalize_values (bool): Normalize metadata values to remove common differences between sources. Default: ``False`` kwargs (list): Lists of values to match the given metadata field. Returns: bool: True if matched, False if not.
def to_json(self): obj = { "vertices": [ { "id": vertex.id, "annotation": vertex.annotation, } for vertex in self.vertices ], "edges": [ { "id": edge.id, "annotation": edge.annotation, "head": edge.head, "tail": edge.tail, } for edge in self._edges ], } return six.text_type(json.dumps(obj, ensure_ascii=False))
Convert to a JSON string.
def to_wea(self, file_path, hoys=None): hoys = hoys or xrange(len(self.direct_normal_radiation.datetimes)) if not file_path.lower().endswith('.wea'): file_path += '.wea' originally_ip = False if self.is_ip is True: self.convert_to_si() originally_ip = True lines = [self._get_wea_header()] datetimes = self.direct_normal_radiation.datetimes for hoy in hoys: dir_rad = self.direct_normal_radiation[hoy] dif_rad = self.diffuse_horizontal_radiation[hoy] line = "%d %d %.3f %d %d\n" \ % (datetimes[hoy].month, datetimes[hoy].day, datetimes[hoy].hour + 0.5, dir_rad, dif_rad) lines.append(line) file_data = ''.join(lines) write_to_file(file_path, file_data, True) if originally_ip is True: self.convert_to_ip() return file_path
Write an wea file from the epw file. WEA carries radiation values from epw. Gendaymtx uses these values to generate the sky. For an annual analysis it is identical to using epw2wea. args: file_path: Full file path for output file. hoys: List of hours of the year. Default is 0-8759.
def get_owner(obj_name, obj_type='file'): r try: obj_type_flag = flags().obj_type[obj_type.lower()] except KeyError: raise SaltInvocationError( 'Invalid "obj_type" passed: {0}'.format(obj_type)) if obj_type in ['registry', 'registry32']: obj_name = dacl().get_reg_name(obj_name) try: security_descriptor = win32security.GetNamedSecurityInfo( obj_name, obj_type_flag, win32security.OWNER_SECURITY_INFORMATION) owner_sid = security_descriptor.GetSecurityDescriptorOwner() except MemoryError: owner_sid = 'S-1-0-0' except pywintypes.error as exc: if exc.winerror == 1 or exc.winerror == 50: owner_sid = 'S-1-0-0' else: log.exception('Failed to get the owner: %s', obj_name) raise CommandExecutionError( 'Failed to get owner: {0}'.format(obj_name), exc.strerror) return get_name(owner_sid)
r''' Gets the owner of the passed object Args: obj_name (str): The path for which to obtain owner information. The format of this parameter is different depending on the ``obj_type`` obj_type (str): The type of object to query. This value changes the format of the ``obj_name`` parameter as follows: - file: indicates a file or directory - a relative path, such as ``FileName.txt`` or ``..\FileName`` - an absolute path, such as ``C:\DirName\FileName.txt`` - A UNC name, such as ``\\ServerName\ShareName\FileName.txt`` - service: indicates the name of a Windows service - printer: indicates the name of a printer - registry: indicates a registry key - Uses the following literal strings to denote the hive: - HKEY_LOCAL_MACHINE - MACHINE - HKLM - HKEY_USERS - USERS - HKU - HKEY_CURRENT_USER - CURRENT_USER - HKCU - HKEY_CLASSES_ROOT - CLASSES_ROOT - HKCR - Should be in the format of ``HIVE\Path\To\Key``. For example, ``HKLM\SOFTWARE\Windows`` - registry32: indicates a registry key under WOW64. Formatting is the same as it is for ``registry`` - share: indicates a network share Returns: str: The owner (group or user) Usage: .. code-block:: python salt.utils.win_dacl.get_owner('c:\\file')
def install_package(package, wheels_path, venv=None, requirement_files=None, upgrade=False, install_args=None): requirement_files = requirement_files or [] logger.info('Installing %s...', package) if venv and not os.path.isdir(venv): raise WagonError('virtualenv {0} does not exist'.format(venv)) pip_command = _construct_pip_command( package, wheels_path, venv, requirement_files, upgrade, install_args) if IS_VIRTUALENV and not venv: logger.info('Installing within current virtualenv') result = _run(pip_command) if not result.returncode == 0: raise WagonError('Could not install package: {0} ({1})'.format( package, result.aggr_stderr))
Install a Python package. Can specify a specific version. Can specify a prerelease. Can specify a venv to install in. Can specify a list of paths or urls to requirement txt files. Can specify a local wheels_path to use for offline installation. Can request an upgrade.
def remove_if_exists(filename): try: os.unlink(filename) except OSError as ex: if ex.errno != errno.ENOENT: raise
Remove file. This is like :func:`os.remove` (or :func:`os.unlink`), except that no error is raised if the file does not exist.
def get_project_groups_roles(request, project): groups_roles = collections.defaultdict(list) project_role_assignments = role_assignments_list(request, project=project) for role_assignment in project_role_assignments: if not hasattr(role_assignment, 'group'): continue group_id = role_assignment.group['id'] role_id = role_assignment.role['id'] if ('project' in role_assignment.scope and role_assignment.scope['project']['id'] == project): groups_roles[group_id].append(role_id) return groups_roles
Gets the groups roles in a given project. :param request: the request entity containing the login user information :param project: the project to filter the groups roles. It accepts both project object resource or project ID :returns group_roles: a dictionary mapping the groups and their roles in given project
def _ScanFileSystemForWindowsDirectory(self, path_resolver): result = False for windows_path in self._WINDOWS_DIRECTORIES: windows_path_spec = path_resolver.ResolvePath(windows_path) result = windows_path_spec is not None if result: self._windows_directory = windows_path break return result
Scans a file system for a known Windows directory. Args: path_resolver (WindowsPathResolver): Windows path resolver. Returns: bool: True if a known Windows directory was found.
def get_trust_id(self): if not bool(self._my_map['trustId']): raise errors.IllegalState('this Authorization has no trust') else: return Id(self._my_map['trustId'])
Gets the ``Trust`` ``Id`` for this authorization. return: (osid.id.Id) - the trust ``Id`` raise: IllegalState - ``has_trust()`` is ``false`` *compliance: mandatory -- This method must be implemented.*
def get_postgresql_args(db_config, extra_args=None): db = db_config['NAME'] mapping = [('--username={0}', db_config.get('USER')), ('--host={0}', db_config.get('HOST')), ('--port={0}', db_config.get('PORT'))] args = apply_arg_values(mapping) if extra_args is not None: args.extend(shlex.split(extra_args)) args.append(db) return args
Returns an array of argument values that will be passed to a `psql` or `pg_dump` process when it is started based on the given database configuration.
def setControl( self, request_type, request, value, index, buffer_or_len, callback=None, user_data=None, timeout=0): if self.__submitted: raise ValueError('Cannot alter a submitted transfer') if self.__doomed: raise DoomedTransferError('Cannot reuse a doomed transfer') if isinstance(buffer_or_len, (int, long)): length = buffer_or_len string_buffer, transfer_py_buffer = create_binary_buffer( length + CONTROL_SETUP_SIZE, ) else: length = len(buffer_or_len) string_buffer, transfer_py_buffer = create_binary_buffer( CONTROL_SETUP + buffer_or_len, ) self.__initialized = False self.__transfer_buffer = string_buffer self.__transfer_py_buffer = integer_memoryview( transfer_py_buffer, )[CONTROL_SETUP_SIZE:] self.__user_data = user_data libusb1.libusb_fill_control_setup( string_buffer, request_type, request, value, index, length) libusb1.libusb_fill_control_transfer( self.__transfer, self.__handle, string_buffer, self.__ctypesCallbackWrapper, None, timeout) self.__callback = callback self.__initialized = True
Setup transfer for control use. request_type, request, value, index See USBDeviceHandle.controlWrite. request_type defines transfer direction (see ENDPOINT_OUT and ENDPOINT_IN)). buffer_or_len Either a string (when sending data), or expected data length (when receiving data). callback Callback function to be invoked on transfer completion. Called with transfer as parameter, return value ignored. user_data User data to pass to callback function. timeout Transfer timeout in milliseconds. 0 to disable.
def _send_solr_command(self, core_url, json_command): url = _get_url(core_url, "update") try: response = self.req_session.post(url, data=json_command, headers={'Content-Type': 'application/json'}) response.raise_for_status() except requests.RequestException as e: logger.error("Failed to send update to Solr endpoint [%s]: %s", core_url, e, exc_info=True) raise SolrException("Failed to send command to Solr [%s]: %s" % (core_url, e,)) return True
Sends JSON string to Solr instance
def arch(self): if self.machine in ["x86_64", "AMD64", "i686"]: if self.architecture == "32bit": return "i386" return "amd64" elif self.machine == "x86": return "i386" return self.machine
Return a more standard representation of the architecture.
def to_array(self, *args, **kwargs): from root_numpy import tree2array return tree2array(self, *args, **kwargs)
Convert this tree into a NumPy structured array
def post(self, command, data=None): now = calendar.timegm(datetime.datetime.now().timetuple()) if now > self.expiration: auth = self.__open("/oauth/token", data=self.oauth) self.__sethead(auth['access_token']) return self.__open("%s%s" % (self.api, command), headers=self.head, data=data)
Post data to API.
def analyze(problem, X, Y, second_order=False, print_to_console=False, seed=None): if seed: np.random.seed(seed) problem = extend_bounds(problem) num_vars = problem['num_vars'] X = generate_contrast(problem) main_effect = (1. / (2 * num_vars)) * np.dot(Y, X) Si = ResultDict((k, [None] * num_vars) for k in ['names', 'ME']) Si['ME'] = main_effect Si['names'] = problem['names'] if print_to_console: print("Parameter ME") for j in range(num_vars): print("%s %f" % (problem['names'][j], Si['ME'][j])) if second_order: interaction_names, interaction_effects = interactions(problem, Y, print_to_console) Si['interaction_names'] = interaction_names Si['IE'] = interaction_effects Si.to_df = MethodType(to_df, Si) return Si
Perform a fractional factorial analysis Returns a dictionary with keys 'ME' (main effect) and 'IE' (interaction effect). The techniques bulks out the number of parameters with dummy parameters to the nearest 2**n. Any results involving dummy parameters could indicate a problem with the model runs. Arguments --------- problem: dict The problem definition X: numpy.matrix The NumPy matrix containing the model inputs Y: numpy.array The NumPy array containing the model outputs second_order: bool, default=False Include interaction effects print_to_console: bool, default=False Print results directly to console Returns ------- Si: dict A dictionary of sensitivity indices, including main effects ``ME``, and interaction effects ``IE`` (if ``second_order`` is True) Examples -------- >>> X = sample(problem) >>> Y = X[:, 0] + (0.1 * X[:, 1]) + ((1.2 * X[:, 2]) * (0.2 + X[:, 0])) >>> analyze(problem, X, Y, second_order=True, print_to_console=True)
def _buffer(self, event: Message): if isinstance(event, BytesMessage): self._byte_buffer.write(event.data) elif isinstance(event, TextMessage): self._string_buffer.write(event.data)
Buffers an event, if applicable.
def included_length(self): return sum([shot.length for shot in self.shots if shot.is_included])
Surveyed length, not including "excluded" shots
def _maybe_cast_slice_bound(self, label, side, kind): if isinstance(label, str): parsed, resolution = _parse_iso8601_with_reso(self.date_type, label) start, end = _parsed_string_to_bounds(self.date_type, resolution, parsed) if self.is_monotonic_decreasing and len(self) > 1: return end if side == 'left' else start return start if side == 'left' else end else: return label
Adapted from pandas.tseries.index.DatetimeIndex._maybe_cast_slice_bound
def class_parameters(decorator): def decorate(the_class): if not isclass(the_class): raise TypeError( 'class_parameters(the_class=%s) you must pass a class' % ( the_class ) ) for attr in the_class.__dict__: if callable( getattr( the_class, attr)): setattr( the_class, attr, decorator( getattr(the_class, attr))) return the_class return decorate
To wrap all class methods with static_parameters decorator
def calc_exp(skydir, ltc, event_class, event_types, egy, cth_bins, npts=None): if npts is None: npts = int(np.ceil(np.max(cth_bins[1:] - cth_bins[:-1]) / 0.025)) exp = np.zeros((len(egy), len(cth_bins) - 1)) cth_bins = utils.split_bin_edges(cth_bins, npts) cth = edge_to_center(cth_bins) ltw = ltc.get_skydir_lthist(skydir, cth_bins).reshape(-1, npts) for et in event_types: aeff = create_aeff(event_class, et, egy, cth) aeff = aeff.reshape(exp.shape + (npts,)) exp += np.sum(aeff * ltw[np.newaxis, :, :], axis=-1) return exp
Calculate the exposure on a 2D grid of energy and incidence angle. Parameters ---------- npts : int Number of points by which to sample the response in each incidence angle bin. If None then npts will be automatically set such that incidence angle is sampled on intervals of < 0.05 in Cos(Theta). Returns ------- exp : `~numpy.ndarray` 2D Array of exposures vs. energy and incidence angle.
def _is_in_restart(self, x, y): x1, y1, x2, y2 = self._new_game return x1 <= x < x2 and y1 <= y < y2
Checks if the game is to be restarted by request.
def commit(self, f): if self._overwrite: replace_atomic(f.name, self._path) else: move_atomic(f.name, self._path)
Move the temporary file to the target location.
def get_thermostability(self, at_temp): import ssbio.protein.sequence.properties.thermostability as ts dG = ts.get_dG_at_T(seq=self, temp=at_temp) self.annotations['thermostability_{}_C-{}'.format(at_temp, dG[2].lower())] = (dG[0], dG[1])
Run the thermostability calculator using either the Dill or Oobatake methods. Stores calculated (dG, Keq) tuple in the ``annotations`` attribute, under the key `thermostability_<TEMP>-<METHOD_USED>`. See :func:`ssbio.protein.sequence.properties.thermostability.get_dG_at_T` for instructions and details.
def deploy_snmp(snmp, host=None, admin_username=None, admin_password=None, module=None): return __execute_cmd('deploy -v SNMPv2 {0} ro'.format(snmp), host=host, admin_username=admin_username, admin_password=admin_password, module=module)
Change the QuickDeploy SNMP community string, used for switches as well CLI Example: .. code-block:: bash salt dell dracr.deploy_snmp SNMP_STRING host=<remote DRAC or CMC> admin_username=<DRAC user> admin_password=<DRAC PW> salt dell dracr.deploy_password diana secret
def loadJson(d): with codecs.open(jsonFn(d), 'r', 'utf-8') as f: return json.load(f)
Return JSON data.
def extract_keywords(cls, line, items): unprocessed = list(reversed(line.split('='))) while unprocessed: chunk = unprocessed.pop() key = None if chunk.strip() in cls.allowed: key = chunk.strip() else: raise SyntaxError("Invalid keyword: %s" % chunk.strip()) value = unprocessed.pop().strip() if len(unprocessed) != 0: for option in cls.allowed: if value.endswith(option): value = value[:-len(option)].strip() unprocessed.append(option) break else: raise SyntaxError("Invalid keyword: %s" % value.split()[-1]) keyword = '%s=%s' % (key, value) try: items.update(eval('dict(%s)' % keyword)) except: raise SyntaxError("Could not evaluate keyword: %s" % keyword) return items
Given the keyword string, parse a dictionary of options.
def _send_and_wait(self, **kwargs): frame_id = self.next_frame_id kwargs.update(dict(frame_id=frame_id)) self._send(**kwargs) timeout = datetime.now() + const.RX_TIMEOUT while datetime.now() < timeout: try: frame = self._rx_frames.pop(frame_id) raise_if_error(frame) return frame except KeyError: sleep(0.1) continue _LOGGER.exception( "Did not receive response within configured timeout period.") raise exceptions.ZigBeeResponseTimeout()
Send a frame to either the local ZigBee or a remote device and wait for a pre-defined amount of time for its response.
def get(self, url, ignore_access_time=False): key = hashlib.md5(url).hexdigest() accessed = self._cache_meta_get(key) if not accessed: self.debug("From inet {}".format(url)) return None, None if isinstance(accessed, dict): cached = CacheInfo.from_dict(accessed) else: cached = CacheInfo(accessed) now = now_utc() if now - cached.access_time > self.duration and not ignore_access_time: self.debug("From inet (expired) {}".format(url)) return None, cached try: res = self._cache_get(key) except: self.exception("Failed to read cache") self.debug("From inet (failure) {}".format(url)) return None, None self.debug("From cache {}".format(url)) return res, cached
Try to retrieve url from cache if available :param url: Url to retrieve :type url: str | unicode :param ignore_access_time: Should ignore the access time :type ignore_access_time: bool :return: (data, CacheInfo) None, None -> not found in cache None, CacheInfo -> found, but is expired data, CacheInfo -> found in cache :rtype: (None | str | unicode, None | floscraper.models.CacheInfo)
def django_url(step, url=None): base_url = step.test.live_server_url if url: return urljoin(base_url, url) else: return base_url
The URL for a page from the test server. :param step: A Gherkin step :param url: If specified, the relative URL to append.
def read_be_array(fmt, count, fp): arr = array.array(str(fmt)) if hasattr(arr, 'frombytes'): arr.frombytes(fp.read(count * arr.itemsize)) else: arr.fromstring(fp.read(count * arr.itemsize)) return fix_byteorder(arr)
Reads an array from a file with big-endian data.
def _is_viable_phone_number(number): if len(number) < _MIN_LENGTH_FOR_NSN: return False match = fullmatch(_VALID_PHONE_NUMBER_PATTERN, number) return bool(match)
Checks to see if a string could possibly be a phone number. At the moment, checks to see that the string begins with at least 2 digits, ignoring any punctuation commonly found in phone numbers. This method does not require the number to be normalized in advance - but does assume that leading non-number symbols have been removed, such as by the method _extract_possible_number. Arguments: number -- string to be checked for viability as a phone number Returns True if the number could be a phone number of some sort, otherwise False
def attached_socket(self, *args, **kwargs): try: sock = self.attach(*args, **kwargs) yield sock finally: sock.shutdown(socket.SHUT_RDWR) sock.close()
Opens a raw socket in a ``with`` block to write data to Splunk. The arguments are identical to those for :meth:`attach`. The socket is automatically closed at the end of the ``with`` block, even if an exception is raised in the block. :param host: The host value for events written to the stream. :type host: ``string`` :param source: The source value for events written to the stream. :type source: ``string`` :param sourcetype: The sourcetype value for events written to the stream. :type sourcetype: ``string`` :returns: Nothing. **Example**:: import splunklib.client as client s = client.connect(...) index = s.indexes['some_index'] with index.attached_socket(sourcetype='test') as sock: sock.send('Test event\\r\\n')
def _get(self, *args, **kwargs): all_messages = [] for storage in self.storages: messages, all_retrieved = storage._get() if messages is None: break if messages: self._used_storages.add(storage) all_messages.extend(messages) if all_retrieved: break return all_messages, all_retrieved
Gets a single list of messages from all storage backends.
def send(self, obj_id): response = self._client.session.post( '{url}/{id}/send'.format( url=self.endpoint_url, id=obj_id ) ) return self.process_response(response)
Send email to the assigned lists :param obj_id: int :return: dict|str
def fill_in_arguments(config, modules, args): def work_in(config, module, name): rkeys = getattr(module, 'runtime_keys', {}) for (attr, cname) in iteritems(rkeys): v = args.get(attr, None) if v is not None: config[cname] = v if not isinstance(args, collections.Mapping): args = vars(args) return _walk_config(config, modules, work_in)
Fill in configuration fields from command-line arguments. `config` is a dictionary holding the initial configuration, probably the result of :func:`assemble_default_config`. It reads through `modules`, and for each, fills in any configuration values that are provided in `args`. `config` is modified in place. `args` may be either a dictionary or an object (as the result of :mod:`argparse`). :param dict config: configuration tree to update :param modules: modules or Configurable instances to use :type modules: iterable of :class:`~yakonfig.configurable.Configurable` :param args: command-line objects :paramtype args: dict or object :return: config
def purge_bad_timestamp_files(file_list): "Given a list of image files, find bad frames, remove them and modify file_list" MAX_INITIAL_BAD_FRAMES = 15 bad_ts = Kinect.detect_bad_timestamps(Kinect.timestamps_from_file_list(file_list)) if not bad_ts: return file_list last_bad = max(bad_ts) if last_bad >= MAX_INITIAL_BAD_FRAMES: raise Exception('Only 15 initial bad frames are allowed, but last bad frame is %d' % last_bad) for i in range(last_bad + 1): os.remove(file_list[i]) file_list = file_list[last_bad+1:] return file_list
Given a list of image files, find bad frames, remove them and modify file_list
def layer_from_element(element, style_function=None): from telluric.collections import BaseCollection if isinstance(element, BaseCollection): styled_element = element.map(lambda feat: style_element(feat, style_function)) else: styled_element = style_element(element, style_function) return GeoJSON(data=mapping(styled_element), name='GeoJSON')
Return Leaflet layer from shape. Parameters ---------- element : telluric.vectors.GeoVector, telluric.features.GeoFeature, telluric.collections.BaseCollection Data to plot.
def add_special(self, name): self.undeclared.discard(name) self.declared.add(name)
Register a special name like `loop`.
def request_goto(self, tc=None): if not tc: tc = TextHelper(self.editor).word_under_cursor( select_whole_word=True) if not self._definition or isinstance(self.sender(), QAction): self.select_word(tc) if self._definition is not None: QTimer.singleShot(100, self._goto_def)
Request a go to assignment. :param tc: Text cursor which contains the text that we must look for its assignment. Can be None to go to the text that is under the text cursor. :type tc: QtGui.QTextCursor
def _generate_encryption_data_dict(kek, cek, iv): wrapped_cek = kek.wrap_key(cek) wrapped_content_key = OrderedDict() wrapped_content_key['KeyId'] = kek.get_kid() wrapped_content_key['EncryptedKey'] = _encode_base64(wrapped_cek) wrapped_content_key['Algorithm'] = kek.get_key_wrap_algorithm() encryption_agent = OrderedDict() encryption_agent['Protocol'] = _ENCRYPTION_PROTOCOL_V1 encryption_agent['EncryptionAlgorithm'] = _EncryptionAlgorithm.AES_CBC_256 encryption_data_dict = OrderedDict() encryption_data_dict['WrappedContentKey'] = wrapped_content_key encryption_data_dict['EncryptionAgent'] = encryption_agent encryption_data_dict['ContentEncryptionIV'] = _encode_base64(iv) encryption_data_dict['KeyWrappingMetadata'] = {'EncryptionLibrary': 'Python ' + __version__} return encryption_data_dict
Generates and returns the encryption metadata as a dict. :param object kek: The key encryption key. See calling functions for more information. :param bytes cek: The content encryption key. :param bytes iv: The initialization vector. :return: A dict containing all the encryption metadata. :rtype: dict
def commit(self): self.logger.debug("Starting injections...") self.logger.debug("Injections dict is:") self.logger.debug(self.inject_dict) self.logger.debug("Clear list is:") self.logger.debug(self.clear_set) for filename, content in self.inject_dict.items(): content = _unicode(content) self.logger.debug("Injecting values into %s..." % filename) self.destructive_inject(filename, content) for filename in self.clear_set: self.logger.debug("Clearing injection from %s..." % filename) self.destructive_clear(filename)
commit the injections desired, overwriting any previous injections in the file.
def get_compute(self, compute=None, **kwargs): if compute is not None: kwargs['compute'] = compute kwargs['context'] = 'compute' return self.filter(**kwargs)
Filter in the 'compute' context :parameter str compute: name of the compute options (optional) :parameter **kwargs: any other tags to do the filter (except compute or context) :return: :class:`phoebe.parameters.parameters.ParameterSet`
def _create_axes(filenames, file_dict): try: f = iter(f for tup in file_dict.itervalues() for f in tup if f is not None).next() except StopIteration as e: raise (ValueError("No FITS files were found. " "Searched filenames: '{f}'." .format( f=filenames.values())), None, sys.exc_info()[2]) axes = FitsAxes(f[0].header) for i, u in enumerate(axes.cunit): if u == 'DEG': axes.cunit[i] = 'RAD' axes.set_axis_scale(i, np.pi/180.0) return axes
Create a FitsAxes object
def simulate(self, T): x = [] for t in range(T): law_x = self.PX0() if t == 0 else self.PX(t, x[-1]) x.append(law_x.rvs(size=1)) y = self.simulate_given_x(x) return x, y
Simulate state and observation processes. Parameters ---------- T: int processes are simulated from time 0 to time T-1 Returns ------- x, y: lists lists of length T
def similar(self, address_line, max_results=None): params = {"term": address_line, "max_results": max_results or self.max_results} return self._make_request('/address/getSimilar', params)
Gets a list of valid addresses that are similar to the given term, can be used to match invalid addresses to valid addresses.
def delete_record(self, identifier=None, rtype=None, name=None, content=None, **kwargs): if not rtype and kwargs.get('type'): warnings.warn('Parameter "type" is deprecated, use "rtype" instead.', DeprecationWarning) rtype = kwargs.get('type') return self._delete_record(identifier=identifier, rtype=rtype, name=name, content=content)
Delete an existing record. If record does not exist, do nothing. If an identifier is specified, use it, otherwise do a lookup using type, name and content.
def listFieldsFromSource(self, template_source): ast = self.environment.parse(template_source) return jinja2.meta.find_undeclared_variables(ast)
List all the attributes to be rendered directly from template source :param template_source: the template source (usually represents the template content in string format) :return: a :class:`set` contains all the needed attributes :rtype: set
def _mark_target(type, item): if type not in ('input', 'output'): msg = 'Error (7D74X): Type is not valid: {0}'.format(type) raise ValueError(msg) orig_item = item if isinstance(item, list): item_s = item else: item_s = [item] for item in item_s: if isinstance(item, str) and os.path.isabs(item): msg = ( 'Error (5VWOZ): Given path is not relative path: {0}.' ).format(item) raise ValueError(msg) return _ItemWrapper(type=type, item=orig_item)
Wrap given item as input or output target that should be added to task. Wrapper object will be handled specially in \ :paramref:`create_cmd_task.parts`. :param type: Target type. Allowed values: - 'input' - 'output' :param item: Item to mark as input or output target. Allowed values: - Relative path relative to top directory. - Node object. - List of these. :return: Wrapper object.
def _get_select_commands(self, source, tables): row_queries = {tbl: self.select_all(tbl, execute=False) for tbl in tqdm(tables, total=len(tables), desc='Getting {0} select queries'.format(source))} for tbl, command in row_queries.items(): if isinstance(command, str): row_queries[tbl] = [command] return [(tbl, cmd) for tbl, cmds in row_queries.items() for cmd in cmds]
Create select queries for all of the tables from a source database. :param source: Source database name :param tables: Iterable of table names :return: Dictionary of table keys, command values
def lexical_parent(self): if not hasattr(self, '_lexical_parent'): self._lexical_parent = conf.lib.clang_getCursorLexicalParent(self) return self._lexical_parent
Return the lexical parent for this cursor.
def user_has_access(self, user): if ROLE_ADMIN in user.roles: return True if self.enabled: if not self.required_roles: return True for role in self.required_roles: if role in user.roles: return True return False
Check if a user has access to view information for the account Args: user (:obj:`User`): User object to check Returns: True if user has access to the account, else false
def _linear_predictor(self, X=None, modelmat=None, b=None, term=-1): if modelmat is None: modelmat = self._modelmat(X, term=term) if b is None: b = self.coef_[self.terms.get_coef_indices(term)] return modelmat.dot(b).flatten()
linear predictor compute the linear predictor portion of the model ie multiply the model matrix by the spline basis coefficients Parameters --------- at least 1 of (X, modelmat) and at least 1 of (b, feature) X : array-like of shape (n_samples, m_features) or None, optional containing the input dataset if None, will attempt to use modelmat modelmat : array-like or None, optional contains the spline basis for each feature evaluated at the input values for each feature, ie model matrix if None, will attempt to construct the model matrix from X b : array-like or None, optional contains the spline coefficients if None, will use current model coefficients feature : int, optional feature for which to compute the linear prediction if -1, will compute for all features Returns ------- lp : np.array of shape (n_samples,)
def set_file(name, source, template=None, context=None, defaults=None, **kwargs): ret = {'name': name, 'changes': {}, 'result': True, 'comment': ''} if context is None: context = {} elif not isinstance(context, dict): ret['result'] = False ret['comment'] = 'Context must be formed as a dict' return ret if defaults is None: defaults = {} elif not isinstance(defaults, dict): ret['result'] = False ret['comment'] = 'Defaults must be formed as a dict' return ret if __opts__['test']: ret['result'] = None ret['comment'] = 'Debconf selections would have been set.' return ret if template: result = __salt__['debconf.set_template'](source, template, context, defaults, **kwargs) else: result = __salt__['debconf.set_file'](source, **kwargs) if result: ret['comment'] = 'Debconf selections were set.' else: ret['result'] = False ret['comment'] = 'Unable to set debconf selections from file.' return ret
Set debconf selections from a file or a template .. code-block:: yaml <state_id>: debconf.set_file: - source: salt://pathto/pkg.selections <state_id>: debconf.set_file: - source: salt://pathto/pkg.selections?saltenv=myenvironment <state_id>: debconf.set_file: - source: salt://pathto/pkg.selections.jinja2 - template: jinja - context: some_value: "false" source: The location of the file containing the package selections template If this setting is applied then the named templating engine will be used to render the package selections file, currently jinja, mako, and wempy are supported context Overrides default context variables passed to the template. defaults Default context passed to the template.
def set_attr(self, name, val, dval=None, dtype=None, reset=False): if dval is not None and val is None: val = dval if dtype is not None and val is not None: if isinstance(dtype, type): val = dtype(val) else: val = dtype.type(val) if reset or not hasattr(self, name) or \ (hasattr(self, name) and getattr(self, name) is None): setattr(self, name, val)
Set an object attribute by its name. The attribute value can be specified as a primary value `val`, and as default value 'dval` that will be used if the primary value is None. This arrangement allows an attribute to be set from an entry in an options object, passed as `val`, while specifying a default value to use, passed as `dval` in the event that the options entry is None. Unless `reset` is True, the attribute is only set if it doesn't exist, or if it exists with value None. This arrangement allows for attributes to be set in both base and derived class initialisers, with the derived class value taking preference. Parameters ---------- name : string Attribute name val : any Primary attribute value dval : any Default attribute value in case `val` is None dtype : data-type, optional (default None) If the `dtype` parameter is not None, the attribute `name` is set to `val` (which is assumed to be of numeric type) after conversion to the specified type. reset : bool, optional (default False) Flag indicating whether attribute assignment should be conditional on the attribute not existing or having value None. If False, an attribute value other than None will not be overwritten.
def get_verse(self, v=1): verse_count = len(self.verses) if v - 1 < verse_count: return self.verses[v - 1]
Get a specific verse.
def search(session, query): flat_query = "".join(query.split()) artists = session.query(Artist).filter( or_(Artist.name.ilike(f"%%{query}%%"), Artist.name.ilike(f"%%{flat_query}%%")) ).all() albums = session.query(Album).filter( Album.title.ilike(f"%%{query}%%")).all() tracks = session.query(Track).filter( Track.title.ilike(f"%%{query}%%")).all() return dict(artists=artists, albums=albums, tracks=tracks)
Naive search of the database for `query`. :return: A dict with keys 'artists', 'albums', and 'tracks'. Each containing a list of the respective ORM type.
def timeout_queue_add(self, item, cache_time=0): self.timeout_add_queue.append((item, cache_time)) if self.timeout_due is None or cache_time < self.timeout_due: self.update_request.set()
Add a item to be run at a future time. This must be a Module, I3statusModule or a Task
def raw_data_engine(**kwargs): logger.debug("cycles_engine") raise NotImplementedError experiments = kwargs["experiments"] farms = [] barn = "raw_dir" for experiment in experiments: farms.append([]) return farms, barn
engine to extract raw data
def lambda_A_calc(classes, table, P, POP): try: result = 0 maxreference = max(list(P.values())) length = POP for i in classes: col = [] for col_item in table.values(): col.append(col_item[i]) result += max(col) result = (result - maxreference) / (length - maxreference) return result except Exception: return "None"
Calculate Goodman and Kruskal's lambda A. :param classes: confusion matrix classes :type classes : list :param table: confusion matrix table :type table : dict :param P: condition positive :type P : dict :param POP: population :type POP : int :return: Goodman and Kruskal's lambda A as float
def fix_germline_samplename(in_file, sample_name, data): out_file = "%s-fixnames%s" % utils.splitext_plus(in_file) if not utils.file_exists(out_file): with file_transaction(data, out_file) as tx_out_file: sample_file = "%s-samples.txt" % utils.splitext_plus(tx_out_file)[0] with open(sample_file, "w") as out_handle: out_handle.write("%s\n" % sample_name) cmd = ("bcftools reheader -s {sample_file} {in_file} -o {tx_out_file}") do.run(cmd.format(**locals()), "Fix germline samplename: %s" % sample_name) return vcfutils.bgzip_and_index(out_file, data["config"])
Replace germline sample names, originally from normal BAM file.
def write(gmt, out_path): with open(out_path, 'w') as f: for _, each_dict in enumerate(gmt): f.write(each_dict[SET_IDENTIFIER_FIELD] + '\t') f.write(each_dict[SET_DESC_FIELD] + '\t') f.write('\t'.join([str(entry) for entry in each_dict[SET_MEMBERS_FIELD]])) f.write('\n')
Write a GMT to a text file. Args: gmt (GMT object): list of dicts out_path (string): output path Returns: None
def check_required_keys(self, required_keys): h = self._contents_hash for key in required_keys: if key not in h: raise InsufficientGraftMPackageException("Package missing key %s" % key)
raise InsufficientGraftMPackageException if this package does not conform to the standard of the given package