code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def is_older_than(before, seconds): if isinstance(before, six.string_types): before = parse_strtime(before).replace(tzinfo=None) else: before = before.replace(tzinfo=None) return utcnow() - before > datetime.timedelta(seconds=seconds)
Return True if before is older than seconds.
def create_store(self): if self.store_class is not None: return self.store_class.load(self.client.webfinger, self) raise NotImplementedError("You need to specify PyPump.store_class or override PyPump.create_store method.")
Creates store object
def _add_route(self, method, path, middleware=None): if middleware is not None: self.add(method, path, middleware) return self else: return lambda func: ( self.add(method, path, func), func )[1]
The implementation of adding a route
def tag_details(tag, nodenames): details = {} details['type'] = tag.name details['ordinal'] = tag_ordinal(tag) if tag_details_sibling_ordinal(tag): details['sibling_ordinal'] = tag_details_sibling_ordinal(tag) if tag_details_asset(tag): details['asset'] = tag_details_asset(tag) o...
Used in media and graphics to extract data from their parent tags
def crop_to_sheet(self,sheet_coord_system): "Crop the slice to the SheetCoordinateSystem's bounds." maxrow,maxcol = sheet_coord_system.shape self[0] = max(0,self[0]) self[1] = min(maxrow,self[1]) self[2] = max(0,self[2]) self[3] = min(maxcol,self[3])
Crop the slice to the SheetCoordinateSystem's bounds.
def authenticate_redirect(self, callback_uri=None, ask_for=["name", "email", "language", "username"]): callback_uri = callback_uri or request.url args = self._openid_args(callback_uri, ax_attrs=ask_for) return redirect(self._OPENID_ENDPOINT + ...
Performs a redirect to the authentication URL for this service. After authentication, the service will redirect back to the given callback URI. We request the given attributes for the authenticated user by default (name, email, language, and username). If you don't need all tho...
def initialize(self): self.main_pid = os.getpid() self.processes.extend(self.init_service_processes()) self.processes.extend(self.init_tornado_workers())
Initialize instance attributes. You can override this method in the subclasses.
def _make_axes_dict(self, axes): if type(axes) is dict: axdict = axes elif type(axes) is Axis: ax = axes axdict = {ax.axis_type: ax} elif axes is None: axdict = {'empty': None} else: raise ValueError('axes needs to be Axis objec...
Makes an axes dictionary. .. note:: In case the input is ``None``, the dictionary :code:`{'empty': None}` is returned. **Function-call argument** \n :param axes: axes input :type axes: dict or single instance of :class:`~climlab....
def expand_filename_pattern(pattern, base_dir): pattern = os.path.normpath(os.path.join(base_dir, pattern)) pattern = os.path.expandvars(os.path.expanduser(pattern)) fileList = glob.glob(pattern) return fileList
Expand a file name pattern containing wildcards, environment variables etc. @param pattern: The pattern string to expand. @param base_dir: The directory where relative paths are based on. @return: A list of file names (possibly empty).
def put(self, url_path, data): request_url = "%s%s" % (self._url, url_path) data = ElementTree.tostring(data) response = self.session.put(request_url, data) if response.status_code == 201 and self.verbose: print "PUT %s: Success." % request_url elif response.status_co...
Update an existing object on the JSS. In general, it is better to use a higher level interface for updating objects, namely, making changes to a JSSObject subclass and then using its save method. Args: url_path: String API endpoint path to PUT, with ID (e.g. ...
def addToTimeInv(self,*params): for param in params: if param not in self.time_inv: self.time_inv.append(param)
Adds any number of parameters to time_inv for this instance. Parameters ---------- params : string Any number of strings naming attributes to be added to time_inv Returns ------- None
def trace_module(no_print=True): with pexdoc.ExDocCxt() as exdoc_obj: try: docs.support.my_module.func("John") obj = docs.support.my_module.MyClass() obj.value = 5 obj.value except: raise RuntimeError("Tracing did not complete successfully"...
Trace my_module_original exceptions.
def changePermissionsRecursively(path, uid, gid): os.chown(path, uid, gid) for item in os.listdir(path): itempath = os.path.join(path, item) if os.path.isfile(itempath): try: os.chown(itempath, uid, gid) except Exception as e: pass ...
Function to recursively change the user id and group id. It sets 700 permissions.
def publish_avatar_set(self, avatar_set): id_ = avatar_set.png_id done = False with (yield from self._publish_lock): if (yield from self._pep.available()): yield from self._pep.publish( namespaces.xep0084_data, avatar_xso.Data(a...
Make `avatar_set` the current avatar of the jid associated with this connection. If :attr:`synchronize_vcard` is true and PEP is available the vCard is only synchronized if the PEP update is successful. This means publishing the ``image/png`` avatar data and the avatar metadata...
def add(self, record): self._field.validate_value(record) self._elements[record.id] = record self._sync_field()
Add a reference to the provided record
def claim_token(self, **params): self._json_params.update(params) success, self.user = self.Model.authenticate_by_password( self._json_params) if success: headers = remember(self.request, self.user.username) return JHTTPOk('Token claimed', headers=headers) ...
Claim current token by POSTing 'login' and 'password'. User's `Authorization` header value is returned in `WWW-Authenticate` header.
def _update_params(self, constants): for k, v in constants.items(): self.params[k]['value'] *= v influence = self._calculate_influence(self.params['infl']['value']) return influence * self.params['lr']['value']
Update params and return new influence.
def get_targets(self): if not hasattr(self, "_targets"): targets = [] for target_def in self.config.targets or []: target = Target(target_def) targets.append(target) self._targets = targets return self._targets
Returns the named targets that are specified in the config. Returns: list: a list of :class:`stacker.target.Target` objects
def assert_in(first, second, msg_fmt="{msg}"): if first not in second: msg = "{!r} not in {!r}".format(first, second) fail(msg_fmt.format(msg=msg, first=first, second=second))
Fail if first is not in collection second. >>> assert_in("foo", [4, "foo", {}]) >>> assert_in("bar", [4, "foo", {}]) Traceback (most recent call last): ... AssertionError: 'bar' not in [4, 'foo', {}] The following msg_fmt arguments are supported: * msg - the default error message *...
def get_auth_server(domain, allow_http=False): st = get_stellar_toml(domain, allow_http) if not st: return None return st.get('AUTH_SERVER')
Retrieve the AUTH_SERVER config from a domain's stellar.toml. :param str domain: The domain the .toml file is hosted at. :param bool allow_http: Specifies whether the request should go over plain HTTP vs HTTPS. Note it is recommend that you *always* use HTTPS. :return str: The AUTH_SERVER url.
def value(self): if self.isenum(): if isinstance(self._value, self.enum_ref): return self._value.value return self._value elif self.is_bitmask(): return self._value.bitmask else: return self._value
Return this type's value. Returns: object: The value of an enum, bitmask, etc.
def get_insns(cls = None): insns = [] if cls is None: cls = Instruction if "_mnemonic" in cls.__dict__.keys(): insns = [cls] for subcls in cls.__subclasses__(): insns += get_insns(subcls) return insns
Get all Instructions. This is based on all known subclasses of `cls`. If non is given, all Instructions are returned. Only such instructions are returned that can be generated, i.e., that have a mnemonic, opcode, etc. So other classes in the hierarchy are not matched. :param cls: Base class to get list ...
def register(self, name, content, description=None): return self.__app.documents.register(name, content, self._plugin, description)
Register a new document. :param content: Content of this document. Jinja and rst are supported. :type content: str :param name: Unique name of the document for documentation purposes. :param description: Short description of this document
def auto_flexdock(self, binding_residues, radius, ligand_path=None, force_rerun=False): log.debug('\n{}: running DOCK6...\n' '\tBinding residues: {}\n' '\tBinding residues radius: {}\n' '\tLigand to dock: {}\n'.format(self.id, binding_residues, radius, op.basen...
Run DOCK6 on a PDB file, given its binding residues and a radius around them. Provide a path to a ligand to dock a ligand to it. If no ligand is provided, DOCK6 preparations will be run on that structure file. Args: binding_residues (str): Comma separated string of residues (eg: '1...
def _convert_pooling_param(param): param_string = "pooling_convention='full', " if param.global_pooling: param_string += "global_pool=True, kernel=(1,1)" else: param_string += "pad=(%d,%d), kernel=(%d,%d), stride=(%d,%d)" % ( param.pad, param.pad, param.kernel_size, param.kernel_...
Convert the pooling layer parameter
def indentation(logical_line, previous_logical, indent_char, indent_level, previous_indent_level): r c = 0 if logical_line else 3 tmpl = "E11%d %s" if logical_line else "E11%d %s (comment)" if indent_level % 4: yield 0, tmpl % (1 + c, "indentation is not a multiple of four") ...
r"""Use 4 spaces per indentation level. For really old code that you don't want to mess up, you can continue to use 8-space tabs. Okay: a = 1 Okay: if a == 0:\n a = 1 E111: a = 1 E114: # a = 1 Okay: for item in items:\n pass E112: for item in items:\npass E115: for item ...
def recycle_view(self, position): d = self.declaration if position < len(d.parent.items): d.index = position d.item = d.parent.items[position] else: d.index = -1 d.item = None
Tell the view to render the item at the given position
def set_ontime(self, ontime): try: ontime = float(ontime) except Exception as err: LOG.debug("SwitchPowermeter.set_ontime: Exception %s" % (err,)) return False self.actionNodeData("ON_TIME", ontime)
Set duration th switch stays on when toggled.
def unindex_model_on_delete(sender, document, **kwargs): if current_app.config.get('AUTO_INDEX'): unindex.delay(document)
Unindex Mongo document on post_delete
def get_themes(urls): length = len(urls) counter = 1 widgets = ['Fetching themes:', Percentage(), ' ', Bar(marker='-'), ' ', ETA()] pbar = ProgressBar( widgets=widgets, maxval=length ).start() for i in urls.keys(): href = 'http://dotshare.it/dots/%s/0/raw/' % urls[i] t...
takes in dict of names and urls, downloads and saves files
def removeProperty(self, prop: str) -> str: removed_prop = self.get(prop) if removed_prop is not None: del self[prop] return removed_prop
Remove the css property.
def add_verified_read(self): self.remove_perm(d1_common.const.SUBJECT_PUBLIC, 'read') self.add_perm(d1_common.const.SUBJECT_VERIFIED, 'read')
Add ``read`` perm for all verified subj. Public ``read`` is removed if present.
def create_group_dampening(self, group_id, dampening): data = self._serialize_object(dampening) url = self._service_url(['triggers', 'groups', group_id, 'dampenings']) return Dampening(self._post(url, data))
Create a new group dampening :param group_id: Group Trigger id attached to dampening :param dampening: Dampening definition to be created. :type dampening: Dampening :return: Group Dampening created
def _add_to(self, db, index, item, default=OOSet): row = db.get(index, None) if row is None: row = default() db[index] = row row.add(item)
Add `item` to `db` under `index`. If `index` is not yet in `db`, create it using `default`. Args: db (dict-obj): Dict-like object used to connect to database. index (str): Index used to look in `db`. item (obj): Persistent object, which may be stored in DB. ...
def get_event(self, name, default=_sentinel): if name not in self.events: if self.create_events_on_access: self.add_event(name) elif default is not _sentinel: return default return self.events[name]
Lookup an event by name. :param str item: Event name :return Event: Event instance under key
def get_angles(self) -> Tuple[List[float], List[float]]: stacked_params = np.hstack((self.betas, self.gammas)) vqe = VQE(self.minimizer, minimizer_args=self.minimizer_args, minimizer_kwargs=self.minimizer_kwargs) cost_ham = reduce(lambda x, y: x + y, self.cost_ham) para...
Finds optimal angles with the quantum variational eigensolver method. Stored VQE result :returns: A tuple of the beta angles and the gamma angles for the optimal solution.
def parse_format(self): if (self.format is not None and not re.match(r, self.format)): raise IIIFRequestError( parameter='format', text='Bad format parameter')
Check format parameter. All formats values listed in the specification are lowercase alphanumeric value commonly used as file extensions. To leave opportunity for extension here just do a limited sanity check on characters and length.
def _parse_expiry(response_data): expires_in = response_data.get('expires_in', None) if expires_in is not None: return _helpers.utcnow() + datetime.timedelta( seconds=expires_in) else: return None
Parses the expiry field from a response into a datetime. Args: response_data (Mapping): The JSON-parsed response data. Returns: Optional[datetime]: The expiration or ``None`` if no expiration was specified.
def api_url(self): return pathjoin(Bin.path, self.name, url=self.service.url)
return the api url of self
def process_pool(self, limited_run=False): from multiprocessing import cpu_count from ambry.bundle.concurrent import Pool, init_library if self.processes: cpus = self.processes else: cpus = cpu_count() self.logger.info('Starting MP pool with {} processors'...
Return a pool for multiprocess operations, sized either to the number of CPUS, or a configured value
async def stop(self): for task in self.__tracks.values(): if task is not None: task.cancel() self.__tracks = {}
Stop discarding media.
def storage_record2pairwise_info(storec: StorageRecord) -> PairwiseInfo: return PairwiseInfo( storec.id, storec.value, storec.tags['~my_did'], storec.tags['~my_verkey'], { tag[tag.startswith('~'):]: storec.tags[tag] for tag in (storec.tags or {}) })
Given indy-sdk non_secrets implementation of pairwise storage record dict, return corresponding PairwiseInfo. :param storec: (non-secret) storage record to convert to PairwiseInfo :return: PairwiseInfo on record DIDs, verkeys, metadata
def intersection(self, other): taxa1 = self.labels taxa2 = other.labels return taxa1 & taxa2
Returns the intersection of the taxon sets of two Trees
def _diff_disk_lists(old, new): targets = [] prefixes = ['fd', 'hd', 'vd', 'sd', 'xvd', 'ubd'] for disk in new: target_node = disk.find('target') target = target_node.get('dev') prefix = [item for item in prefixes if target.startswith(item)][0] new_target = ['{0}{1}'.format(p...
Compare disk definitions to extract the changes and fix target devices :param old: list of ElementTree nodes representing the old disks :param new: list of ElementTree nodes representing the new disks
def setFileSecurity( self, fileName, securityInformation, securityDescriptor, lengthSecurityDescriptorBuffer, dokanFileInfo, ): return self.operations('setFileSecurity', fileName)
Set security attributes of a file. :param fileName: name of file to set security for :type fileName: ctypes.c_wchar_p :param securityInformation: new security information :type securityInformation: PSECURITY_INFORMATION :param securityDescriptor: newsecurity descriptor :...
def _validate(self, field, list_attribute): if not self.scripts_added: self._generate_validation_scripts() self.id_generator.generate_id(field) self.script_list_fields_with_validation.append_text( 'hatemileValidationList.' + list_attribute + '.push...
Validate the field when its value change. :param field: The field. :param list_attribute: The list attribute of field with validation.
def set_brightness(self, brightness): if not 25 <= brightness <= 255: raise ValueError("The brightness needs to be between 25 and 255.") payload = self.generate_payload(SET, {self.DPS_INDEX_BRIGHTNESS: brightness}) data = self._send_receive(payload) return data
Set the brightness value of an rgb bulb. Args: brightness(int): Value for the brightness (25-255).
def _asyncio_open_serial_windows(path): try: yield from wait_for_named_pipe_creation(path) except asyncio.TimeoutError: raise NodeError('Pipe file "{}" is missing'.format(path)) return WindowsPipe(path)
Open a windows named pipe :returns: An IO like object
def _blocked(self, args): reason = args.read_shortstr() if self.on_blocked: return self.on_blocked(reason)
RabbitMQ Extension.
def lrem(self, key, count, value): if not isinstance(count, int): raise TypeError("count argument must be int") return self.execute(b'LREM', key, count, value)
Removes the first count occurrences of elements equal to value from the list stored at key. :raises TypeError: if count is not int
def is_holiday(self, day, extra_holidays=None): day = cleaned_date(day) if extra_holidays: extra_holidays = tuple(map(cleaned_date, extra_holidays)) if extra_holidays and day in extra_holidays: return True return day in self.holidays_set(day.year)
Return True if it's an holiday. In addition to the regular holidays, you can add exceptions. By providing ``extra_holidays``, you'll state that these dates **are** holidays, even if not in the regular calendar holidays (or weekends).
def _combine_to_jointcaller(processed): by_vrn_file = collections.OrderedDict() for data in (x[0] for x in processed): key = (tz.get_in(("config", "algorithm", "jointcaller"), data), data["vrn_file"]) if key not in by_vrn_file: by_vrn_file[key] = [] by_vrn_file[key].append(da...
Add joint calling information to variants, while collapsing independent regions.
def service_account_path(cls, project, service_account): return google.api_core.path_template.expand( "projects/{project}/serviceAccounts/{service_account}", project=project, service_account=service_account, )
Return a fully-qualified service_account string.
def list_group_users(self, group_id, **kwargs): kwargs["group_id"] = group_id kwargs = self._verify_sort_options(kwargs) api = self._get_api(iam.AccountAdminApi) return PaginatedResponse(api.get_users_of_group, lwrap_type=User, **kwargs)
List users of a group. :param str group_id: The group ID (Required) :param int limit: The number of users to retrieve :param str order: The ordering direction, ascending (asc) or descending (desc) :param str after: Get API keys after/starting at given user ID :returns: a list of...
def get_guild_member_by_id(self, guild_id: int, member_id: int) -> Dict[str, Any]: return self._query(f'guilds/{guild_id}/members/{member_id}', 'GET')
Get a guild member by their id Args: guild_id: snowflake id of the guild member_id: snowflake id of the member Returns: Dictionary data for the guild member. Example: { "id": "41771983423143937", "...
def create_partitions(self, new_partitions, **kwargs): f, futmap = AdminClient._make_futures([x.topic for x in new_partitions], None, AdminClient._make_topics_result) super(AdminClient, self).create_partitions(ne...
Create additional partitions for the given topics. The future result() value is None. :param list(NewPartitions) new_partitions: New partitions to be created. :param float operation_timeout: Set broker's operation timeout in seconds, controlling how long the CreatePartitions ...
def hit(self, to_hit): to_hit = list(map(lambda obj: self.idpool.id(obj), to_hit)) new_obj = list(filter(lambda vid: vid not in self.oracle.vmap.e2i, to_hit)) self.oracle.add_clause(to_hit) for vid in new_obj: self.oracle.add_clause([-vid], 1)
This method adds a new set to hit to the hitting set solver. This is done by translating the input iterable of objects into a list of Boolean variables in the MaxSAT problem formulation. :param to_hit: a new set to hit :type to_hit: iterable(obj)
def message_handler(type_, from_): def decorator(f): if asyncio.iscoroutinefunction(f): raise TypeError("message_handler must not be a coroutine function") aioxmpp.service.add_handler_spec( f, aioxmpp.service.HandlerSpec( (_apply_message_handler, (...
Register the decorated function as message handler. :param type_: Message type to listen for :type type_: :class:`~.MessageType` :param from_: Sender JIDs to listen for :type from_: :class:`aioxmpp.JID` or :data:`None` :raise TypeError: if the decorated object is a coroutine function .. seeals...
def iter(self, obj=None): 'Iterate through all keys considering context of obj. If obj is None, uses the context of the top sheet.' if obj is None and vd: obj = vd.sheet for o in self._mappings(obj): for k in self.keys(): for o2 in self[k]: ...
Iterate through all keys considering context of obj. If obj is None, uses the context of the top sheet.
async def jsk_hide(self, ctx: commands.Context): if self.jsk.hidden: return await ctx.send("Jishaku is already hidden.") self.jsk.hidden = True await ctx.send("Jishaku is now hidden.")
Hides Jishaku from the help command.
def _TerminateProcessByPid(self, pid): self._RaiseIfNotRegistered(pid) process = self._processes_per_pid[pid] self._TerminateProcess(process) self._StopMonitoringProcess(process)
Terminate a process that's monitored by the engine. Args: pid (int): process identifier (PID). Raises: KeyError: if the process is not registered with and monitored by the engine.
def invert(self, points): X = points if not points.ndim == 1 else points.reshape((points.size, 1)) wx, wy = self.wc rn = np.sqrt((X[0,:] - wx)**2 + (X[1,:] - wy)**2) phi = np.arctan2(X[1,:] - wy, X[0,:]-wx) r = np.tan(rn * self.lgamma) / self.lgamma; Y = np.ones(X.shape) ...
Invert the distortion Parameters ------------------ points : ndarray Input image points Returns ----------------- ndarray Undistorted points
def get_lower_bound(self): lower_bounds = [] for distribution in self.distribs.values(): lower_bound = distribution.percent_point(distribution.mean / 10000) if not pd.isnull(lower_bound): lower_bounds.append(lower_bound) return min(lower_bounds)
Compute the lower bound to integrate cumulative density. Returns: float: lower bound for cumulative density integral.
def _extract_rows(self, rows): if rows is not None: rows = numpy.array(rows, ndmin=1, copy=False, dtype='i8') rows = numpy.unique(rows) maxrow = self._info['nrows']-1 if rows[0] < 0 or rows[-1] > maxrow: raise ValueError("rows must be in [%d,%d]" %...
Extract an array of rows from an input scalar or sequence
def fourier_series(x, *a): output = 0 output += a[0]/2 w = a[1] for n in range(2, len(a), 2): n_ = n/2 val1 = a[n] val2 = a[n+1] output += val1*np.sin(n_*x*w) output += val2*np.cos(n_*x*w) return output
Arbitrary dimensionality fourier series. The first parameter is a_0, and the second parameter is the interval/scale parameter. The parameters are altering sin and cos paramters. n = (len(a)-2)/2
def _rollaxis_right(A, num_rolls): assert num_rolls > 0 rank = tf.rank(A) perm = tf.concat([rank - num_rolls + tf.range(num_rolls), tf.range(rank - num_rolls)], 0) return tf.transpose(A, perm)
Roll the tensor `A` forward `num_rolls` times
def purge_content(self, account_id, urls): if isinstance(urls, six.string_types): urls = [urls] content_list = [] for i in range(0, len(urls), MAX_URLS_PER_PURGE): content = self.account.purgeCache(urls[i:i + MAX_URLS_PER_PURGE], id=account_id) content_list.ex...
Purges one or more URLs from the CDN edge nodes. :param int account_id: the CDN account ID from which content should be purged. :param urls: a string or a list of strings representing the CDN URLs that should be purged. :returns: a list of Sof...
def _create_default_config_file(self): logger.info('Initialize Maya launcher, creating config file...\n') self.add_section(self.DEFAULTS) self.add_section(self.PATTERNS) self.add_section(self.ENVIRONMENTS) self.add_section(self.EXECUTABLES) self.set(self.DEFAULTS, '...
If config file does not exists create and set default values.
def reset_failed_attempts(ip_address=None, username=None): pipe = REDIS_SERVER.pipeline() unblock_ip(ip_address, pipe=pipe) unblock_username(username, pipe=pipe) pipe.execute()
reset the failed attempts for these ip's and usernames
def show_env(environment): if not environment: print("You need to supply an environment name") return parser = read_config() try: commands = parser.get(environment, "cmd").split("\n") except KeyError: print("Unknown environment type '%s'" % environment) return ...
Show the commands for a given environment.
def obtain_check_classes(self): check_classes = set() for path in self.paths: for root, _, files in os.walk(path): for fi in files: if not fi.endswith(".py"): continue path = os.path.join(root, fi) ...
find children of AbstractCheck class and return them as a list
def poisson_ll(data, means): if sparse.issparse(data): return sparse_poisson_ll(data, means) genes, cells = data.shape clusters = means.shape[1] ll = np.zeros((cells, clusters)) for i in range(clusters): means_i = np.tile(means[:,i], (cells, 1)) means_i = means_i.transpose() ...
Calculates the Poisson log-likelihood. Args: data (array): 2d numpy array of genes x cells means (array): 2d numpy array of genes x k Returns: cells x k array of log-likelihood for each cell/cluster pair
def get_rollout_from_id(self, rollout_id): layer = self.rollout_id_map.get(rollout_id) if layer: return layer self.logger.error('Rollout with ID "%s" is not in datafile.' % rollout_id) return None
Get rollout for the provided ID. Args: rollout_id: ID of the rollout to be fetched. Returns: Rollout corresponding to the provided ID.
def _validate_type(self, properties_spec, value): if 'type' not in properties_spec.keys(): def_name = self.get_definition_name_from_ref(properties_spec['$ref']) return self.validate_definition(def_name, value) elif properties_spec['type'] == 'array': if not isinstance...
Validate the given value with the given property spec. Args: properties_dict: specification of the property to check (From definition not route). value: value to check. Returns: True if the value is valid for the given spec.
def ex_call(func, args): if isinstance(func, str): func = ex_rvalue(func) args = list(args) for i in range(len(args)): if not isinstance(args[i], ast.expr): args[i] = ex_literal(args[i]) if sys.version_info[:2] < (3, 5): return ast.Call(func, args, [], None, None) ...
A function-call expression with only positional parameters. The function may be an expression or the name of a function. Each argument may be an expression or a value to be used as a literal.
def clear(self): if self._bit_count == 0: return block = self._qpart.document().begin() while block.isValid(): if self.getBlockValue(block): self.setBlockValue(block, 0) block = block.next()
Convenience method to reset all the block values to 0
def wp_slope(self): last_w = None for i in range(1, self.wploader.count()): w = self.wploader.wp(i) if w.command not in [mavutil.mavlink.MAV_CMD_NAV_WAYPOINT, mavutil.mavlink.MAV_CMD_NAV_LAND]: continue if last_w is not None: if last_w....
show slope of waypoints
def add_sponsor(self, type, name, **kwargs): self['sponsors'].append(dict(type=type, name=name, **kwargs))
Associate a sponsor with this bill. :param type: the type of sponsorship, e.g. 'primary', 'cosponsor' :param name: the name of the sponsor as provided by the official source
def attr_attributes_transform(node): node.locals["__attrs_attrs__"] = [astroid.Unknown(parent=node)] for cdefbodynode in node.body: if not isinstance(cdefbodynode, astroid.Assign): continue if isinstance(cdefbodynode.value, astroid.Call): if cdefbodynode.value.func.as_str...
Given that the ClassNode has an attr decorator, rewrite class attributes as instance attributes
def _register_entry_point_module(self, entry_point, module): records_map = self._map_entry_point_module(entry_point, module) self.store_records_for_package(entry_point, list(records_map.keys())) for module_name, records in records_map.items(): if module_name in self.records: ...
Private method that registers an entry_point with a provided module.
def list_keyvaults_sub(access_token, subscription_id): endpoint = ''.join([get_rm_endpoint(), '/subscriptions/', subscription_id, '/providers/Microsoft.KeyVault/vaults', '?api-version=', KEYVAULT_API]) return do_get_next(endpoint, access_to...
Lists key vaults belonging to this subscription. Args: access_token (str): A valid Azure authentication token. subscription_id (str): Azure subscription id. Returns: HTTP response. 200 OK.
def to_pillow_image(self, return_mask=False): img = np.rollaxis(np.rollaxis(self.image.data, 2), 2) img = Image.fromarray(img[:, :, 0]) if img.shape[2] == 1 else Image.fromarray(img) if return_mask: mask = np.ma.getmaskarray(self.image) mask = Image.fromarray(np.rollaxis(...
Return Pillow. Image, and optionally also mask.
def storage(self): if self.backend == 'redis': return RedisBackend(self.prefix, self.secondary_indexes) if self.backend == 'dynamodb': return DynamoDBBackend(self.prefix, self.key, self.sort_key, self.secondary_indexes) return DictBacken...
Instantiates and returns a storage instance
def get_skill_by_name(nme, character): for ndx, sk in enumerate(character["skills"]): if sk["name"] == nme: return ndx return 0
returns the skill by name in a character
def add_tenant_user_role(request, project=None, user=None, role=None, group=None, domain=None): manager = keystoneclient(request, admin=True).roles if VERSIONS.active < 3: manager.add_user_role(user, role, project) else: manager.grant(role, user=user, project=project...
Adds a role for a user on a tenant.
def worker_config(queue, s3_key, period, verbose): logging.basicConfig(level=(verbose and logging.DEBUG or logging.INFO)) logging.getLogger('botocore').setLevel(logging.WARNING) logging.getLogger('s3transfer').setLevel(logging.WARNING) queue, region = get_queue(queue) factory = SessionFactory(region...
daemon queue worker for config notifications
def start(self, origin): self.start_time = time.time() self.pause_until = None self.data.update(self._get_struct(origin, 'origin')) self.data_stack.append(self.data) sys.settrace(self._trace) return self._trace
Start this Tracer. Return a Python function suitable for use with sys.settrace().
def memoize(f): cache = {} @wraps(f) def inner(arg): if arg not in cache: cache[arg] = f(arg) return cache[arg] return inner
Decorator which caches function's return value each it is called. If called later with same arguments, the cached value is returned.
def set_license(self, key): data = {'LicenseKey': key} license_service_uri = (utils.get_subresource_path_by(self, ['Oem', 'Hpe', 'Links', 'LicenseService'])) self._conn.post(license_service_uri, data=data)
Set the license on a redfish system :param key: license key
def _delete_vdev_info(self, vdev): vdev = vdev.lower() rules_file_name = '/etc/udev/rules.d/51-qeth-0.0.%s.rules' % vdev cmd = 'rm -f %s\n' % rules_file_name address = '0.0.%s' % str(vdev).zfill(4) udev_file_name = '/etc/udev/rules.d/70-persistent-net.rules' cmd += "sed -...
handle udev rules file.
def partial_transform(self, traj): fingerprints = np.zeros((traj.n_frames, self.n_features)) atom_pairs = np.zeros((len(self.solvent_indices), 2)) sigma = self.sigma for i, solute_i in enumerate(self.solute_indices): atom_pairs[:, 0] = solute_i atom_pairs[:, 1] = ...
Featurize an MD trajectory into a vector space via calculation of solvent fingerprints Parameters ---------- traj : mdtraj.Trajectory A molecular dynamics trajectory to featurize. Returns ------- features : np.ndarray, dtype=float, shape=(n_samples, ...
def env_dn_dict(self, env_prefix, cert_value): if not cert_value: return {} env = {} for rdn in cert_value: for attr_name, val in rdn: attr_code = self.CERT_KEY_TO_LDAP_CODE.get(attr_name) if attr_code: env['%s_%s' % (en...
Return a dict of WSGI environment variables for a client cert DN. E.g. SSL_CLIENT_S_DN_CN, SSL_CLIENT_S_DN_C, etc. See SSL_CLIENT_S_DN_x509 at https://httpd.apache.org/docs/2.4/mod/mod_ssl.html#envvars.
def check_quirks(block_id, block_op, db_state): if op_get_opcode_name(block_op['op']) in OPCODE_NAME_NAMEOPS and op_get_opcode_name(block_op['op']) not in OPCODE_NAME_STATE_PREORDER: assert 'last_creation_op' in block_op, 'QUIRK BUG: missing last_creation_op in {}'.format(op_get_opcode_name(block_op['op']))...
Check that all serialization compatibility quirks have been preserved. Used primarily for testing.
def Record(self, obj): if len(self._visit_recorder_objects) >= _MAX_VISIT_OBJECTS: return False obj_id = id(obj) if obj_id in self._visit_recorder_objects: return False self._visit_recorder_objects[obj_id] = obj return True
Records the object as visited. Args: obj: visited object. Returns: True if the object hasn't been previously visited or False if it has already been recorded or the quota has been exhausted.
def get_cache_token(self, token): if self.conn is None: raise CacheException('Redis is not connected') token_data = self.conn.get(token) token_data = json.loads(token_data) if token_data else None return token_data
Get token and data from Redis
def add_validation_fun(phase, keywords, f): for keyword in keywords: if (phase, keyword) in _validation_map: oldf = _validation_map[(phase, keyword)] def newf(ctx, s): oldf(ctx, s) f(ctx, s) _validation_map[(phase, keyword)] = newf ...
Add a validation function to some phase in the framework. Function `f` is called for each valid occurance of each keyword in `keywords`. Can be used by plugins to do special validation of extensions.
def export_private_key(self, password=None): if self.__private_key is None: raise ValueError('Unable to call this method. Private key must be set') if password is not None: if isinstance(password, str) is True: password = password.encode() return self.__private_key.private_bytes( encoding=serializa...
Export a private key in PEM-format :param password: If it is not None, then result will be encrypt with given password :return: bytes
def matrix(matrix, xlabel=None, ylabel=None, xticks=None, yticks=None, title=None, colorbar_shrink=0.5, color_map=None, show=None, save=None, ax=None): if ax is None: ax = pl.gca() img = ax.imshow(matrix, cmap=color_map) if xlabel is not None: ax.set_xlabel(xlabel) if ylabel is not...
Plot a matrix.
def grok_if_node(element, default_vars): if isinstance(element.test, jinja2.nodes.Filter) and \ element.test.name == 'default': default_vars.append(element.test.node.name) return default_vars + grok_vars(element)
Properly parses a If element
def save_or_update(self, cluster): if not os.path.exists(self.storage_path): os.makedirs(self.storage_path) path = self._get_cluster_storage_path(cluster.name) cluster.storage_file = path with open(path, 'wb') as storage: self.dump(cluster, storage)
Save or update the cluster to persistent state. :param cluster: cluster to save or update :type cluster: :py:class:`elasticluster.cluster.Cluster`