code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def _update_version(connection, version): if connection.engine.name == 'sqlite': connection.execute('PRAGMA user_version = {}'.format(version)) elif connection.engine.name == 'postgresql': connection.execute(DDL('CREATE SCHEMA IF NOT EXISTS {};'.format(POSTGRES_SCHEMA_NAME))) connection.execute(DDL('CREATE SCHEMA IF NOT EXISTS {};'.format(POSTGRES_PARTITION_SCHEMA_NAME))) connection.execute('CREATE TABLE IF NOT EXISTS {}.user_version(version INTEGER NOT NULL);' .format(POSTGRES_SCHEMA_NAME)) if connection.execute('SELECT * FROM {}.user_version;'.format(POSTGRES_SCHEMA_NAME)).fetchone(): connection.execute('UPDATE {}.user_version SET version = {};' .format(POSTGRES_SCHEMA_NAME, version)) else: connection.execute('INSERT INTO {}.user_version (version) VALUES ({})' .format(POSTGRES_SCHEMA_NAME, version)) else: raise DatabaseMissingError('Do not know how to migrate {} engine.' .format(connection.engine.driver))
Updates version in the db to the given version. Args: connection (sqlalchemy connection): sqlalchemy session where to update version. version (int): version of the migration.
def create_criteria(cls, query): criteria = [] for name, value in query.items(): if isinstance(value, list): for inner_value in value: criteria += cls.create_criteria({name: inner_value}) else: criteria.append({ 'criteria': { 'field': name, 'value': value, }, }) return criteria or None
Return a criteria from a dictionary containing a query. Query should be a dictionary, keyed by field name. If the value is a list, it will be divided into multiple criteria as required.
def score(self, periods=None): periods = np.asarray(periods) return self._score(periods.ravel()).reshape(periods.shape)
Compute the periodogram for the given period or periods Parameters ---------- periods : float or array_like Array of periods at which to compute the periodogram. Returns ------- scores : np.ndarray Array of normalized powers (between 0 and 1) for each period. Shape of scores matches the shape of the provided periods.
def to_data_rows(self, brains): fields = self.get_field_names() return map(lambda brain: self.get_data_record(brain, fields), brains)
Returns a list of dictionaries representing the values of each brain
def _most_restrictive(date_elems): most_index = len(DATE_ELEMENTS) for date_elem in date_elems: if date_elem in DATE_ELEMENTS and DATE_ELEMENTS.index(date_elem) < most_index: most_index = DATE_ELEMENTS.index(date_elem) if most_index < len(DATE_ELEMENTS): return DATE_ELEMENTS[most_index] else: raise KeyError('No least restrictive date element found')
Return the date_elem that has the most restrictive range from date_elems
def sensor(self, name, config=None, inactive_sensor_expiration_time_seconds=sys.maxsize, parents=None): sensor = self.get_sensor(name) if sensor: return sensor with self._lock: sensor = self.get_sensor(name) if not sensor: sensor = Sensor(self, name, parents, config or self.config, inactive_sensor_expiration_time_seconds) self._sensors[name] = sensor if parents: for parent in parents: children = self._children_sensors.get(parent) if not children: children = [] self._children_sensors[parent] = children children.append(sensor) logger.debug('Added sensor with name %s', name) return sensor
Get or create a sensor with the given unique name and zero or more parent sensors. All parent sensors will receive every value recorded with this sensor. Arguments: name (str): The name of the sensor config (MetricConfig, optional): A default configuration to use for this sensor for metrics that don't have their own config inactive_sensor_expiration_time_seconds (int, optional): If no value if recorded on the Sensor for this duration of time, it is eligible for removal parents (list of Sensor): The parent sensors Returns: Sensor: The sensor that is created
def polls_slug_get(self, slug, **kwargs): kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.polls_slug_get_with_http_info(slug, **kwargs) else: (data) = self.polls_slug_get_with_http_info(slug, **kwargs) return data
Poll A Poll on Pollster is a collection of questions and responses published by a reputable survey house. This endpoint provides raw data from the survey house, plus Pollster-provided metadata about each question. Pollster editors don't include every question when they enter Polls, and they don't necessarily enter every subpopulation for the responses they _do_ enter. They make editorial decisions about which questions belong in the database. This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.polls_slug_get(slug, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str slug: Unique Poll identifier. For example: `gallup-26892`. (required) :return: Poll If the method is called asynchronously, returns the request thread.
def encrypt(self, plaintext_data_key, encryption_context): if self.wrapping_algorithm.encryption_type is EncryptionType.ASYMMETRIC: if self.wrapping_key_type is EncryptionKeyType.PRIVATE: encrypted_key = self._wrapping_key.public_key().encrypt( plaintext=plaintext_data_key, padding=self.wrapping_algorithm.padding ) else: encrypted_key = self._wrapping_key.encrypt( plaintext=plaintext_data_key, padding=self.wrapping_algorithm.padding ) return EncryptedData(iv=None, ciphertext=encrypted_key, tag=None) serialized_encryption_context = serialize_encryption_context(encryption_context=encryption_context) iv = os.urandom(self.wrapping_algorithm.algorithm.iv_len) return encrypt( algorithm=self.wrapping_algorithm.algorithm, key=self._derived_wrapping_key, plaintext=plaintext_data_key, associated_data=serialized_encryption_context, iv=iv, )
Encrypts a data key using a direct wrapping key. :param bytes plaintext_data_key: Data key to encrypt :param dict encryption_context: Encryption context to use in encryption :returns: Deserialized object containing encrypted key :rtype: aws_encryption_sdk.internal.structures.EncryptedData
def post(self, request, *args, **kwargs): self.object = self.get_object() self.object.content = request.POST['content'] self.object.title = request.POST['title'] self.object = self._mark_html_fields_as_safe(self.object) context = self.get_context_data(object=self.object) return self.render_to_response(context, content_type=self.get_mimetype())
Accepts POST requests, and substitute the data in for the page's attributes.
def update_application_metadata(template, application_id, sar_client=None): if not template or not application_id: raise ValueError('Require SAM template and application ID to update application metadata') if not sar_client: sar_client = boto3.client('serverlessrepo') template_dict = _get_template_dict(template) app_metadata = get_app_metadata(template_dict) request = _update_application_request(app_metadata, application_id) sar_client.update_application(**request)
Update the application metadata. :param template: Content of a packaged YAML or JSON SAM template :type template: str_or_dict :param application_id: The Amazon Resource Name (ARN) of the application :type application_id: str :param sar_client: The boto3 client used to access SAR :type sar_client: boto3.client :raises ValueError
def _create_logger(name='did', level=None): logger = logging.getLogger(name) handler = logging.StreamHandler() handler.setFormatter(Logging.ColoredFormatter()) logger.addHandler(handler) for level in Logging.LEVELS: setattr(logger, level, getattr(logging, level)) logger.DATA = LOG_DATA logger.DETAILS = LOG_DETAILS logger.ALL = LOG_ALL logger.details = lambda message: logger.log( LOG_DETAILS, message) logger.data = lambda message: logger.log( LOG_DATA, message) logger.all = lambda message: logger.log( LOG_ALL, message) return logger
Create did logger
def by_user_and_perm(cls, user_id, perm_name, db_session=None): db_session = get_db_session(db_session) query = db_session.query(cls.model).filter(cls.model.user_id == user_id) query = query.filter(cls.model.perm_name == perm_name) return query.first()
return by user and permission name :param user_id: :param perm_name: :param db_session: :return:
def ensure_header(self, header: BlockHeader=None) -> BlockHeader: if header is None: head = self.get_canonical_head() return self.create_header_from_parent(head) else: return header
Return ``header`` if it is not ``None``, otherwise return the header of the canonical head.
def set_agent(self, agent): self.agent = agent self.queue = asyncio.Queue(loop=self.agent.loop) self.presence = agent.presence self.web = agent.web
Links behaviour with its owner agent Args: agent (spade.agent.Agent): the agent who owns the behaviour
def change_db_user_password(username, password): sql = "ALTER USER %s WITH PASSWORD '%s'" % (username, password) excute_query(sql, use_sudo=True)
Change a db user's password.
def project_dev_requirements(): from peltak.core import conf from peltak.core import shell for dep in sorted(conf.requirements): shell.cprint(dep)
List requirements for peltak commands configured for the project. This list is dynamic and depends on the commands you have configured in your project's pelconf.yaml. This will be the combined list of packages needed to be installed in order for all the configured commands to work.
def setup_function(self): log.options.LogOptions.set_stderr_log_level('google:INFO') if app.get_options().debug: log.options.LogOptions.set_stderr_log_level('google:DEBUG') if not app.get_options().build_root: app.set_option('build_root', os.path.join( app.get_options().butcher_basedir, 'build')) self.buildroot = app.get_options().build_root if not os.path.exists(self.buildroot): os.makedirs(self.buildroot) if app.get_options().disable_cache_fetch: self.options['cache_fetch'] = False if app.get_options().disable_hardlinks: base.BaseBuilder.linkfiles = False
Runs prior to the global main function.
def fwd_chunk(self): raise NotImplementedError("%s not implemented for %s" % (self.fwd_chunk.__func__.__name__, self.__class__.__name__))
Returns the chunk following this chunk in the list of free chunks.
def relabel(self, qubits: Qubits) -> 'Channel': chan = copy(self) chan.vec = chan.vec.relabel(qubits) return chan
Return a copy of this channel with new qubits
def _filter_headers(self): headers = {} for user in self.usernames: headers["fedora_messaging_user_{}".format(user)] = True for package in self.packages: headers["fedora_messaging_rpm_{}".format(package)] = True for container in self.containers: headers["fedora_messaging_container_{}".format(container)] = True for module in self.modules: headers["fedora_messaging_module_{}".format(module)] = True for flatpak in self.flatpaks: headers["fedora_messaging_flatpak_{}".format(flatpak)] = True return headers
Add headers designed for filtering messages based on objects. Returns: dict: Filter-related headers to be combined with the existing headers
def getShape3D(self, includeJunctions=False): if includeJunctions and not self._edge.isSpecial(): if self._shapeWithJunctions3D is None: self._shapeWithJunctions3D = addJunctionPos(self._shape3D, self._edge.getFromNode( ).getCoord3D(), self._edge.getToNode().getCoord3D()) return self._shapeWithJunctions3D return self._shape3D
Returns the shape of the lane in 3d. This function returns the shape of the lane, as defined in the net.xml file. The returned shape is a list containing numerical 3-tuples representing the x,y,z coordinates of the shape points where z defaults to zero. For includeJunction=True the returned list will contain additionally the coords (x,y,z) of the fromNode of the corresponding edge as first element and the coords (x,y,z) of the toNode as last element. For internal lanes, includeJunctions is ignored and the unaltered shape of the lane is returned.
def offline(f): @click.pass_context @verbose def new_func(ctx, *args, **kwargs): ctx.obj["offline"] = True ctx.bitshares = BitShares(**ctx.obj) ctx.blockchain = ctx.bitshares ctx.bitshares.set_shared_instance() return ctx.invoke(f, *args, **kwargs) return update_wrapper(new_func, f)
This decorator allows you to access ``ctx.bitshares`` which is an instance of BitShares with ``offline=True``.
def managepy(cmd, extra=None): extra = extra.split() if extra else [] run_django_cli(['invoke', cmd] + extra)
Run manage.py using this component's specific Django settings
def _format_extname(self, ext): if ext is None: outs = ext else: outs = '{0},{1}'.format(ext[0], ext[1]) return outs
Pretty print given extension name and number tuple.
def main(argv): _, black_model, white_model = argv utils.ensure_dir_exists(FLAGS.eval_sgf_dir) play_match(black_model, white_model, FLAGS.num_evaluation_games, FLAGS.eval_sgf_dir)
Play matches between two neural nets.
def from_blob(cls, s): atom_str, edge_str = s.split() numbers = np.array([int(s) for s in atom_str.split(",")]) edges = [] orders = [] for s in edge_str.split(","): i, j, o = (int(w) for w in s.split("_")) edges.append((i, j)) orders.append(o) return cls(edges, numbers, np.array(orders))
Construct a molecular graph from the blob representation
def _can_be_double(x): return ((np.issubdtype(x.dtype, np.floating) and x.dtype.itemsize <= np.dtype(float).itemsize) or (np.issubdtype(x.dtype, np.signedinteger) and np.can_cast(x, float)))
Return if the array can be safely converted to double. That happens when the dtype is a float with the same size of a double or narrower, or when is an integer that can be safely converted to double (if the roundtrip conversion works).
def _collect_layer_output_min_max(mod, data, include_layer=None, max_num_examples=None, logger=None): collector = _LayerOutputMinMaxCollector(include_layer=include_layer, logger=logger) num_examples = _collect_layer_statistics(mod, data, collector, max_num_examples, logger) return collector.min_max_dict, num_examples
Collect min and max values from layer outputs and save them in a dictionary mapped by layer names.
def cors(*args, **kwargs): def decorator(fn): cors_fn = flask_cors.cross_origin(automatic_options=False, *args, **kwargs) if inspect.isclass(fn): apply_function_to_members(fn, cors_fn) else: return cors_fn(fn) return fn return decorator
A wrapper around flask-cors cross_origin, to also act on classes **An extra note about cors, a response must be available before the cors is applied. Dynamic return is applied after the fact, so use the decorators, json, xml, or return self.render() for txt/html ie: @cors() class Index(Mocha): def index(self): return self.render() @json def json(self): return {} class Index2(Mocha): def index(self): return self.render() @cors() @json def json(self): return {} :return:
def _file_local_or_remote(f, get_retriever): if os.path.exists(f): return f integration, config = get_retriever.integration_and_config(f) if integration: return integration.file_exists(f, config)
Check for presence of a local or remote file.
def convert_to_shape(x): if x is None: return None if isinstance(x, Shape): return x if isinstance(x, str): x = _parse_string_to_list_of_pairs(x, seconds_to_int=True) return Shape(x)
Converts input to a Shape. Args: x: Shape, str, or None. Returns: Shape or None. Raises: ValueError: If x cannot be converted to a Shape.
def recov_date(self, return_date=False): dd = self.drawdown_idx() mask = nancumprod(dd != nanmin(dd)).astype(bool) res = dd.mask(mask) == 0 if not res.any(): recov = pd.NaT else: recov = res.idxmax() if return_date: return recov.date() return recov
Drawdown recovery date. Date at which `self` recovered to previous high-water mark. Parameters ---------- return_date : bool, default False If True, return a `datetime.date` object. If False, return a Pandas Timestamp object. Returns ------- {datetime.date, pandas._libs.tslib.Timestamp, pd.NaT} Returns NaT if recovery has not occured.
def from_tushare(dataframe, dtype='day'): if dtype in ['day']: return QA_DataStruct_Stock_day( dataframe.assign(date=pd.to_datetime(dataframe.date) ).set_index(['date', 'code'], drop=False), dtype='stock_day' ) elif dtype in ['min']: return QA_DataStruct_Stock_min( dataframe.assign(datetime=pd.to_datetime(dataframe.datetime) ).set_index(['datetime', 'code'], drop=False), dtype='stock_min' )
dataframe from tushare Arguments: dataframe {[type]} -- [description] Returns: [type] -- [description]
def translate_connect_args(self, names=[], **kw): translated = {} attribute_names = ["host", "database", "username", "password", "port"] for sname in attribute_names: if names: name = names.pop(0) elif sname in kw: name = kw[sname] else: name = sname if name is not None and getattr(self, sname, False): translated[name] = getattr(self, sname) return translated
Translate url attributes into a dictionary of connection arguments. Returns attributes of this url (`host`, `database`, `username`, `password`, `port`) as a plain dictionary. The attribute names are used as the keys by default. Unset or false attributes are omitted from the final dictionary. :param \**kw: Optional, alternate key names for url attributes. :param names: Deprecated. Same purpose as the keyword-based alternate names, but correlates the name to the original positionally.
def _init_user_stub(self, **stub_kwargs): task_args = stub_kwargs.copy() self.testbed.setup_env(overwrite=True, USER_ID=task_args.pop('USER_ID', 'testuser'), USER_EMAIL=task_args.pop('USER_EMAIL', 'testuser@example.org'), USER_IS_ADMIN=task_args.pop('USER_IS_ADMIN', '1')) self.testbed.init_user_stub(**task_args)
Initializes the user stub using nosegae config magic
def cast(self, dtype): for child in self._children.values(): child.cast(dtype) for _, param in self.params.items(): param.cast(dtype)
Cast this Block to use another data type. Parameters ---------- dtype : str or numpy.dtype The new data type.
def update_cluster(cluster_ref, cluster_spec): cluster_name = get_managed_object_name(cluster_ref) log.trace('Updating cluster \'%s\'', cluster_name) try: task = cluster_ref.ReconfigureComputeResource_Task(cluster_spec, modify=True) except vim.fault.NoPermission as exc: log.exception(exc) raise salt.exceptions.VMwareApiError( 'Not enough permissions. Required privilege: ' '{}'.format(exc.privilegeId)) except vim.fault.VimFault as exc: log.exception(exc) raise salt.exceptions.VMwareApiError(exc.msg) except vmodl.RuntimeFault as exc: log.exception(exc) raise salt.exceptions.VMwareRuntimeError(exc.msg) wait_for_task(task, cluster_name, 'ClusterUpdateTask')
Updates a cluster in a datacenter. cluster_ref The cluster reference. cluster_spec The cluster spec (vim.ClusterConfigSpecEx). Defaults to None.
def parse_yaml(self, y): super(Preceding, self).parse_yaml(y) c = y['condition']['preceding'] if 'timeout' in c: self.timeout = int(c['timeout']) else: self.timeout = 0 if 'sendingTiming' in c: self.sending_timing = c['sendingTiming'] else: self.sending_timing = 'ASYNC' self._preceding_components = [] if 'precedingComponents' in c: for p in c.get('precedingComponents'): self._preceding_components.append(TargetExecutionContext().parse_yaml(p)) return self
Parse a YAML specification of a preceding condition into this object.
def cli(env, billing_id, datacenter): mgr = SoftLayer.LoadBalancerManager(env.client) if not formatting.confirm("This action will incur charges on your " "account. Continue?"): raise exceptions.CLIAbort('Aborted.') mgr.add_local_lb(billing_id, datacenter=datacenter) env.fout("Load balancer is being created!")
Adds a load balancer given the id returned from create-options.
def _make_postfixes_1( analysis ): assert FORM in analysis, '(!) The input analysis does not contain "'+FORM+'" key.' if 'neg' in analysis[FORM]: analysis[FORM] = re.sub( '^\s*neg ([^,]*)$', '\\1 Neg', analysis[FORM] ) analysis[FORM] = re.sub( ' Neg Neg$', ' Neg', analysis[FORM] ) analysis[FORM] = re.sub( ' Aff Neg$', ' Neg', analysis[FORM] ) analysis[FORM] = re.sub( 'neg', 'Neg', analysis[FORM] ) analysis[FORM] = analysis[FORM].rstrip().lstrip() assert 'neg' not in analysis[FORM], \ '(!) The label "neg" should be removed by now.' assert 'Neg' not in analysis[FORM] or ('Neg' in analysis[FORM] and analysis[FORM].endswith('Neg')), \ '(!) The label "Neg" should end the analysis line: '+str(analysis[FORM]) return analysis
Provides some post-fixes.
def setTxPower(self, tx_power, peername=None): if peername: protocols = [p for p in self.protocols if p.peername[0] == peername] else: protocols = self.protocols for proto in protocols: proto.setTxPower(tx_power)
Set the transmit power on one or all readers If peername is None, set the transmit power for all readers. Otherwise, set it for that specific reader.
def __auth_descriptor(self, api_info): if api_info.auth is None: return None auth_descriptor = {} if api_info.auth.allow_cookie_auth is not None: auth_descriptor['allowCookieAuth'] = api_info.auth.allow_cookie_auth if api_info.auth.blocked_regions: auth_descriptor['blockedRegions'] = api_info.auth.blocked_regions return auth_descriptor
Builds an auth descriptor from API info. Args: api_info: An _ApiInfo object. Returns: A dictionary with 'allowCookieAuth' and/or 'blockedRegions' keys.
def addService(self, service, parentService=None): if parentService is not None: def check(services): for jS in services: if jS.service == parentService or check(jS.service._childServices): return True return False if not check(self._services): raise JobException("Parent service is not a service of the given job") return parentService._addChild(service) else: if service._hasParent: raise JobException("The service already has a parent service") service._hasParent = True jobService = ServiceJob(service) self._services.append(jobService) return jobService.rv()
Add a service. The :func:`toil.job.Job.Service.start` method of the service will be called after the run method has completed but before any successors are run. The service's :func:`toil.job.Job.Service.stop` method will be called once the successors of the job have been run. Services allow things like databases and servers to be started and accessed by jobs in a workflow. :raises toil.job.JobException: If service has already been made the child of a job or another service. :param toil.job.Job.Service service: Service to add. :param toil.job.Job.Service parentService: Service that will be started before 'service' is started. Allows trees of services to be established. parentService must be a service of this job. :return: a promise that will be replaced with the return value from :func:`toil.job.Job.Service.start` of service in any successor of the job. :rtype: toil.job.Promise
def valueRepr(self): v = self.value if self.behavior: v = self.behavior.valueRepr(self) return v
Transform the representation of the value according to the behavior, if any.
def _CheckPacketSize(cursor): cur_packet_size = int(_ReadVariable("max_allowed_packet", cursor)) if cur_packet_size < MAX_PACKET_SIZE: raise Error( "MySQL max_allowed_packet of {0} is required, got {1}. " "Please set max_allowed_packet={0} in your MySQL config.".format( MAX_PACKET_SIZE, cur_packet_size))
Checks that MySQL packet size is big enough for expected query size.
def remove(self, label): if label.id in self._labels: self._labels[label.id] = None self._dirty = True
Remove a label. Args: label (gkeepapi.node.Label): The Label object.
def predict_encoding(file_path, n_lines=20): import chardet with open(file_path, 'rb') as f: rawdata = b''.join([f.readline() for _ in range(n_lines)]) return chardet.detect(rawdata)['encoding']
Get file encoding of a text file
def save(self, url, storage_options=None): from dask.bytes import open_files with open_files([url], **(storage_options or {}), mode='wt')[0] as f: f.write(self.serialize())
Output this catalog to a file as YAML Parameters ---------- url : str Location to save to, perhaps remote storage_options : dict Extra arguments for the file-system
def add_section(self, id_, parent_id, section_type, points): assert id_ not in self.sections, 'id %s already exists in sections' % id_ self.sections[id_] = BlockNeuronBuilder.BlockSection(parent_id, section_type, points)
add a section Args: id_(int): identifying number of the section parent_id(int): identifying number of the parent of this section section_type(int): the section type as defined by POINT_TYPE points is an array of [X, Y, Z, R]
def get_value(self): if self.value is not_computed: self.value = self.value_provider() if self.value is not_computed: return None return self.value
Returns the value of the constant.
def sentinels(self, name): fut = self.execute(b'SENTINELS', name, encoding='utf-8') return wait_convert(fut, parse_sentinel_slaves_and_sentinels)
Returns a list of sentinels for ``name``.
def _exclude_ipv4_networks(self, networks, networks_to_exclude): for network_to_exclude in networks_to_exclude: def _exclude_ipv4_network(network): try: return list(network.address_exclude(network_to_exclude)) except ValueError: if network.overlaps(network_to_exclude): return [] else: return [network] networks = list(map(_exclude_ipv4_network, networks)) networks = [ item for nested in networks for item in nested ] return networks
Exclude the list of networks from another list of networks and return a flat list of new networks. :param networks: List of IPv4 networks to exclude from :param networks_to_exclude: List of IPv4 networks to exclude :returns: Flat list of IPv4 networks
def start_cmd(cmd, descr, data): if data and "provenance" in data: entity_id = tz.get_in(["provenance", "entity"], data)
Retain details about starting a command, returning a command identifier.
def _start(self): last_call = 42 while self._focus: sleep(1 / 100) mouse = pygame.mouse.get_pos() last_value = self.get() self.value_px = mouse[0] if self.get() == last_value: continue if last_call + self.interval / 1000 < time(): last_call = time() self.func(self.get())
Starts checking if the SB is shifted
def _eval_variables(self): for k, v in listitems(self._variables): self._variables[k] = v() if hasattr(v, '__call__') else v
evaluates callable _variables
def post_ticket(self, title, body): ticket = {'subject': title, 'message': body } response = self.client.post("/api/v1/tickets.json", { 'email': self.email, 'ticket': ticket })['ticket'] bot.info(response['url'])
post_ticket will post a ticket to the uservoice helpdesk Parameters ========== title: the title (subject) of the issue body: the message to send
def enable(self, size, block_size=None, store=None, store_sync_interval=None): self._set('queue', size) self._set('queue-blocksize', block_size) self._set('queue-store', store) self._set('queue-store-sync', store_sync_interval) return self._section
Enables shared queue of the given size. :param int size: Queue size. :param int block_size: Block size in bytes. Default: 8 KiB. :param str|unicode store: Persist the queue into file. :param int store_sync_interval: Store sync interval in master cycles (usually seconds).
def cross_variance(self, x, z, sigmas_f, sigmas_h): Pxz = zeros((sigmas_f.shape[1], sigmas_h.shape[1])) N = sigmas_f.shape[0] for i in range(N): dx = self.residual_x(sigmas_f[i], x) dz = self.residual_z(sigmas_h[i], z) Pxz += self.Wc[i] * outer(dx, dz) return Pxz
Compute cross variance of the state `x` and measurement `z`.
def rot13_app(parser, cmd, args): parser.add_argument('value', help='the value to rot13, read from stdin if omitted', nargs='?') args = parser.parse_args(args) return rot13(pwnypack.main.string_value_or_stdin(args.value))
rot13 encrypt a value.
def authorize_client_credentials( self, client_id, client_secret=None, scope="private_agent" ): self.auth_data = { "grant_type": "client_credentials", "scope": [ scope ], "client_id": client_id, "client_secret": client_secret } self._do_authorize()
Authorize to platform with client credentials This should be used if you posses client_id/client_secret pair generated by platform.
def _findLocation(self, reference_name, start, end): try: return self._locationMap['hg19'][reference_name][start][end] except: return None
return a location key form the locationMap
def fermion_avg(efermi, norm_hopping, func): if func == 'ekin': func = bethe_ekin_zeroT elif func == 'ocupation': func = bethe_filling_zeroT return np.asarray([func(ef, tz) for ef, tz in zip(efermi, norm_hopping)])
calcules for every slave it's average over the desired observable
def connect(uri, factory=pymongo.MongoClient): warnings.warn( "do not use. Just call MongoClient directly.", DeprecationWarning) return factory(uri)
Use the factory to establish a connection to uri.
def bandwidth(self): return np.abs(np.diff(self.pairs(), axis=1)).max()
Computes the 'bandwidth' of a graph.
def getFileName(self, suffix=None, extension="jar"): assert (self._artifactId is not None) assert (self._version is not None) return "{0}-{1}{2}{3}".format( self._artifactId, self._version.getRawString(), "-" + suffix.lstrip("-") if suffix is not None else "", "." + extension.lstrip(".") if extension is not None else "" )
Returns the basename of the artifact's file, using Maven's conventions. In particular, it will be: <artifactId>-<version>[-<suffix>][.<extension>]
def responderForName(self, instance, commandName): method = super(_AMPExposer, self).get(instance, commandName) return method
When resolving a command to a method from the wire, the information available is the command's name; look up a command. @param instance: an instance of a class who has methods exposed via this exposer's L{_AMPExposer.expose} method. @param commandName: the C{commandName} attribute of a L{Command} exposed on the given instance. @return: a bound method with a C{command} attribute.
def _one_diagonal_capture_square(self, capture_square, position): if self.contains_opposite_color_piece(capture_square, position): if self.would_move_be_promotion(): for move in self.create_promotion_moves(status=notation_const.CAPTURE_AND_PROMOTE, location=capture_square): yield move else: yield self.create_move(end_loc=capture_square, status=notation_const.CAPTURE)
Adds specified diagonal as a capture move if it is one
def acgt_match(string): search = re.compile(r'[^ACGT]').search return not bool(search(string))
returns True if sting consist of only "A "C" "G" "T"
def delete_organization(session, organization): last_modified = datetime.datetime.utcnow() for enrollment in organization.enrollments: enrollment.uidentity.last_modified = last_modified session.delete(organization) session.flush()
Remove an organization from the session. Function that removes from the session the organization given in `organization`. Data related such as domains or enrollments are also removed. :param session: database session :param organization: organization to remove
def classify_single_recording(raw_data_json, model_folder, verbose=False): evaluation_file = evaluate_model(raw_data_json, model_folder, verbose) with open(os.path.join(model_folder, "info.yml")) as ymlfile: model_description = yaml.load(ymlfile) index2latex = get_index2latex(model_description) with open(evaluation_file) as f: probabilities = f.read() probabilities = map(float, probabilities.split(" ")) results = [] for index, probability in enumerate(probabilities): results.append((index2latex[index], probability)) results = sorted(results, key=lambda n: n[1], reverse=True) return results
Get the classification as a list of tuples. The first value is the LaTeX code, the second value is the probability.
def getScriptLocation(): location = os.path.abspath("./") if __file__.rfind("/") != -1: location = __file__[:__file__.rfind("/")] return location
Helper function to get the location of a Python file.
def combine_keys(pks: Iterable[Ed25519PublicPoint]) -> Ed25519PublicPoint: P = [_ed25519.decodepoint(pk) for pk in pks] combine = reduce(_ed25519.edwards_add, P) return Ed25519PublicPoint(_ed25519.encodepoint(combine))
Combine a list of Ed25519 points into a "global" CoSi key.
def write_path(self, path_value): parent_dir = os.path.dirname(self.path) try: os.makedirs(parent_dir) except OSError: pass with open(self.path, "w") as fph: fph.write(path_value.value)
this will overwrite dst path - be careful
def get_environ(self, key, default=None, cast=None): key = key.upper() data = self.environ.get(key, default) if data: if cast in converters: data = converters.get(cast)(data) if cast is True: data = parse_conf_data(data, tomlfy=True) return data
Get value from environment variable using os.environ.get :param key: The name of the setting value, will always be upper case :param default: In case of not found it will be returned :param cast: Should cast in to @int, @float, @bool or @json ? or cast must be true to use cast inference :return: The value if found, default or None
def _add_res(line): global resource fields = line.strip().split() if resource: ret.append(resource) resource = {} resource["resource name"] = fields[0] resource["local role"] = fields[1].split(":")[1] resource["local volumes"] = [] resource["peer nodes"] = []
Analyse the line of local resource of ``drbdadm status``
def get_projected_fields(self, req): try: args = getattr(req, 'args', {}) return ','.join(json.loads(args.get('projections'))) except (AttributeError, TypeError): return None
Returns the projected fields from request.
def decode_json(cls, dct): if not ('event_name' in dct and 'event_values' in dct): return dct event_name = dct['event_name'] if event_name not in _CONCRETE_EVENT_CLASSES: raise ValueError("Could not find appropriate Event class for event_name: %r" % event_name) event_values = dct['event_values'] model_id = event_values.pop('model_id') event = _CONCRETE_EVENT_CLASSES[event_name](model=None, **event_values) event._model_id = model_id return event
Custom JSON decoder for Events. Can be used as the ``object_hook`` argument of ``json.load`` or ``json.loads``. Args: dct (dict) : a JSON dictionary to decode The dictionary should have keys ``event_name`` and ``event_values`` Raises: ValueError, if the event_name is unknown Examples: .. code-block:: python >>> import json >>> from bokeh.events import Event >>> data = '{"event_name": "pan", "event_values" : {"model_id": 1, "x": 10, "y": 20, "sx": 200, "sy": 37}}' >>> json.loads(data, object_hook=Event.decode_json) <bokeh.events.Pan object at 0x1040f84a8>
def _ordered_node_addrs(self, function_address): try: function = self.kb.functions[function_address] except KeyError: return [ ] if function_address not in self._function_node_addrs: sorted_nodes = CFGUtils.quasi_topological_sort_nodes(function.graph) self._function_node_addrs[function_address] = [ n.addr for n in sorted_nodes ] return self._function_node_addrs[function_address]
For a given function, return all nodes in an optimal traversal order. If the function does not exist, return an empty list. :param int function_address: Address of the function. :return: A ordered list of the nodes. :rtype: list
def payload_register(ptype, klass, pid): cmd = ['payload-register'] for x in [ptype, klass, pid]: cmd.append(x) subprocess.check_call(cmd)
is used while a hook is running to let Juju know that a payload has been started.
def format_response(self, response): conversion = self.shell_ctx.config.BOOLEAN_STATES if response in conversion: if conversion[response]: return 'yes' return 'no' raise ValueError('Invalid response: input should equate to true or false')
formats a response in a binary
def asyncStarCmap(asyncCallable, iterable): results = [] yield coopStar(asyncCallable, results.append, iterable) returnValue(results)
itertools.starmap for deferred callables using cooperative multitasking
def _Comparator(self, operator): if operator == "=": return lambda x, y: x == y elif operator == ">=": return lambda x, y: x >= y elif operator == ">": return lambda x, y: x > y elif operator == "<=": return lambda x, y: x <= y elif operator == "<": return lambda x, y: x < y elif operator == "!": return lambda x, y: x != y raise DefinitionError("Invalid comparison operator %s" % operator)
Generate lambdas for uid and gid comparison.
def wr_xlsx(self, fout_xlsx, goea_results, **kws): objprt = PrtFmt() prt_flds = kws.get('prt_flds', self.get_prtflds_default(goea_results)) xlsx_data = MgrNtGOEAs(goea_results).get_goea_nts_prt(prt_flds, **kws) if 'fld2col_widths' not in kws: kws['fld2col_widths'] = {f:objprt.default_fld2col_widths.get(f, 8) for f in prt_flds} RPT.wr_xlsx(fout_xlsx, xlsx_data, **kws)
Write a xlsx file.
def _attach_fields(obj): for attr in base_fields.__all__: if not hasattr(obj, attr): setattr(obj, attr, getattr(base_fields, attr)) for attr in fields.__all__: setattr(obj, attr, getattr(fields, attr))
Attach all the marshmallow fields classes to ``obj``, including Flask-Marshmallow's custom fields.
def cipher(self): cipher = Cipher(*self.mode().aes_args(), **self.mode().aes_kwargs()) return WAES.WAESCipher(cipher)
Generate AES-cipher :return: Crypto.Cipher.AES.AESCipher
def get_object_or_none(model, *args, **kwargs): try: return model._default_manager.get(*args, **kwargs) except model.DoesNotExist: return None
Like get_object_or_404, but doesn't throw an exception. Allows querying for an object that might not exist without triggering an exception.
def date(self, year: Number, month: Number, day: Number) -> Date: return Date(year, month, day)
Takes three numbers and returns a ``Date`` object whose year, month, and day are the three numbers in that order.
def workflow_states_column(self, obj): workflow_states = models.WorkflowState.objects.filter( content_type=self._get_obj_ct(obj), object_id=obj.pk, ) return ', '.join([unicode(wfs) for wfs in workflow_states])
Return text description of workflow states assigned to object
def project_create(auth=None, **kwargs): cloud = get_openstack_cloud(auth) kwargs = _clean_kwargs(keep_name=True, **kwargs) return cloud.create_project(**kwargs)
Create a project CLI Example: .. code-block:: bash salt '*' keystoneng.project_create name=project1 salt '*' keystoneng.project_create name=project2 domain_id=b62e76fbeeff4e8fb77073f591cf211e salt '*' keystoneng.project_create name=project3 enabled=False description='my project3'
def is_symmetric(self, symprec=0.1): sg = SpacegroupAnalyzer(self, symprec=symprec) return sg.is_laue()
Checks if slab is symmetric, i.e., contains inversion symmetry. Args: symprec (float): Symmetry precision used for SpaceGroup analyzer. Returns: (bool) Whether slab contains inversion symmetry.
def TimeField(formatter=types.DEFAULT_TIME_FORMAT, default=NOTHING, required=True, repr=True, cmp=True, key=None): default = _init_fields.init_default(required, default, None) validator = _init_fields.init_validator(required, time) converter = converters.to_time_field(formatter) return attrib(default=default, converter=converter, validator=validator, repr=repr, cmp=cmp, metadata=dict(formatter=formatter, key=key))
Create new time field on a model. :param formatter: time formatter string (default: "%H:%M:%S") :param default: any time or string that can be converted to a time value :param bool required: whether or not the object is invalid if not provided. :param bool repr: include this field should appear in object's repr. :param bool cmp: include this field in generated comparison. :param string key: override name of the value when converted to dict.
def rename(self, new_name, *args, **kwargs): new = self.reddit_session.rename_multireddit(self.name, new_name, *args, **kwargs) self.__dict__ = new.__dict__ return self
Rename this multireddit. This function is a handy shortcut to :meth:`rename_multireddit` of the reddit_session.
def parse_authority(cls, authority): userinfo, sep, host = authority.partition('@') if not sep: return '', userinfo else: return userinfo, host
Parse the authority part and return userinfo and host.
def set_seat_logical_name(self, seat): rc = self._libinput.libinput_device_set_seat_logical_name( self._handle, seat.encode()) assert rc == 0, 'Cannot assign device to {}'.format(seat)
Change the logical seat associated with this device by removing the device and adding it to the new seat. This command is identical to physically unplugging the device, then re-plugging it as a member of the new seat. libinput will generate a :attr:`~libinput.constant.EventType.DEVICE_REMOVED` event and this :class:`Device` is considered removed from the context; it will not generate further events. A :attr:`~libinput.constant.EventType.DEVICE_ADDED` event is generated with a new :class:`Device`. It is the caller's responsibility to update references to the new device accordingly. If the logical seat name already exists in the device's physical seat, the device is added to this seat. Otherwise, a new seat is created. Note: This change applies to this device until removal or :meth:`~libinput.LibInput.suspend`, whichever happens earlier. Args: seat (str): The new logical seat name. Raises: AssertionError
def kwds(self): return _kwds(base=self.base, item=self.item, leng=self.leng, refs=self.refs, both=self.both, kind=self.kind, type=self.type)
Return all attributes as keywords dict.
def _downloaded_filename(self): link = self._link() or self._finder.find_requirement(self._req, upgrade=False) if link: lower_scheme = link.scheme.lower() if lower_scheme == 'http' or lower_scheme == 'https': file_path = self._download(link) return basename(file_path) elif lower_scheme == 'file': link_path = url_to_path(link.url_without_fragment) if isdir(link_path): raise UnsupportedRequirementError( "%s: %s is a directory. So that it can compute " "a hash, peep supports only filesystem paths which " "point to files" % (self._req, link.url_without_fragment)) else: copy(link_path, self._temp_path) return basename(link_path) else: raise UnsupportedRequirementError( "%s: The download link, %s, would not result in a file " "that can be hashed. Peep supports only == requirements, " "file:// URLs pointing to files (not folders), and " "http:// and https:// URLs pointing to tarballs, zips, " "etc." % (self._req, link.url)) else: raise UnsupportedRequirementError( "%s: couldn't determine where to download this requirement from." % (self._req,))
Download the package's archive if necessary, and return its filename. --no-deps is implied, as we have reimplemented the bits that would ordinarily do dependency resolution.
def rndbytes(size=16, alphabet=""): x = rndstr(size, alphabet) if isinstance(x, six.string_types): return x.encode('utf-8') return x
Returns rndstr always as a binary type
def _on_login(self, user): self._bot = bool(user.bot) self._self_input_peer = utils.get_input_peer(user, allow_self=False) self._authorized = True return user
Callback called whenever the login or sign up process completes. Returns the input user parameter.
def collect_github_config(): github_config = {} for field in ["user", "token"]: try: github_config[field] = subprocess.check_output(["git", "config", "github.{}".format(field)]).decode('utf-8').strip() except (OSError, subprocess.CalledProcessError): pass return github_config
Try load Github configuration such as usernames from the local or global git config
def addPolylineAnnot(self, points): CheckParent(self) val = _fitz.Page_addPolylineAnnot(self, points) if not val: return val.thisown = True val.parent = weakref.proxy(self) self._annot_refs[id(val)] = val return val
Add a 'Polyline' annotation for a sequence of points.