code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def is_left(point0, point1, point2): return ((point1[0] - point0[0]) * (point2[1] - point0[1])) - ((point2[0] - point0[0]) * (point1[1] - point0[1]))
Tests if a point is Left|On|Right of an infinite line. Ported from the C++ version: on http://geomalgorithms.com/a03-_inclusion.html .. note:: This implementation only works in 2-dimensional space. :param point0: Point P0 :param point1: Point P1 :param point2: Point P2 :return: >0 for...
def sign(self, consumer_secret, access_token_secret, method, url, oauth_params, req_kwargs): key = self._escape(consumer_secret) + b'&' if access_token_secret: key += self._escape(access_token_secret) return key.decode()
Sign request using PLAINTEXT method. :param consumer_secret: Consumer secret. :type consumer_secret: str :param access_token_secret: Access token secret (optional). :type access_token_secret: str :param method: Unused :type method: str :param url: Unused ...
def build_log_presenters(service_names, monochrome): prefix_width = max_name_width(service_names) def no_color(text): return text for color_func in cycle([no_color] if monochrome else colors.rainbow()): yield LogPresenter(prefix_width, color_func)
Return an iterable of functions. Each function can be used to format the logs output of a container.
def get_xname(self, var, coords=None): if coords is not None: coord = self.get_variable_by_axis(var, 'x', coords) if coord is not None and coord.name in var.dims: return coord.name dimlist = list(self.x.intersection(var.dims)) if dimlist: if le...
Get the name of the x-dimension This method gives the name of the x-dimension (which is not necessarily the name of the coordinate if the variable has a coordinate attribute) Parameters ---------- var: xarray.Variables The variable to get the dimension for c...
def process_file(filename, interval=None, lazy=False): mp = MedscanProcessor() mp.process_csxml_file(filename, interval, lazy) return mp
Process a CSXML file for its relevant information. Consider running the fix_csxml_character_encoding.py script in indra/sources/medscan to fix any encoding issues in the input file before processing. Attributes ---------- filename : str The csxml file, containing Medscan XML, to proces...
def stream(func): @wraps(func) def wrapped(manager, *args, **kwargs): offset, limit = kwargs.pop('_offset', None), kwargs.pop('_limit', None) qs = func(manager, *args, **kwargs) if isinstance(qs, dict): qs = manager.public(**qs) elif isinstance(qs, (list, tuple)): ...
Stream decorator to be applied to methods of an ``ActionManager`` subclass Syntax:: from actstream.decorators import stream from actstream.managers import ActionManager class MyManager(ActionManager): @stream def foobar(self, ...): ...
def collmat(self, tau, deriv_order=0): dummy = self.__call__(0.) nbasis = dummy.shape[0] tau = np.atleast_1d(tau) if tau.ndim > 1: raise ValueError("tau must be a list or a rank-1 array") A = np.empty( (tau.shape[0], nbasis), dtype=dummy.dtype ) f = self.diff(...
Compute collocation matrix. Parameters: tau: Python list or rank-1 array, collocation sites deriv_order: int, >=0, order of derivative for which to compute the collocation matrix. The default is 0, which means the function value itself. Returns: A: if len(tau) > 1, rank-2 a...
def require_minimum_pandas_version(): minimum_pandas_version = "0.19.2" from distutils.version import LooseVersion try: import pandas have_pandas = True except ImportError: have_pandas = False if not have_pandas: raise ImportError("Pandas >= %s must be installed; howe...
Raise ImportError if minimum version of Pandas is not installed
def get_timestamp(self, **kwargs): timestamp = kwargs.get('timestamp') if not timestamp: now = datetime.datetime.utcnow() timestamp = now.strftime("%Y-%m-%dT%H:%M:%S") + ".%03d" % (now.microsecond / 1000) + "Z" return timestamp
Retrieves the timestamp for a given set of data
def EscapeWildcards(string): precondition.AssertType(string, Text) return string.replace("%", r"\%").replace("_", r"\_")
Escapes wildcard characters for strings intended to be used with `LIKE`. Databases don't automatically escape wildcard characters ('%', '_'), so any non-literal string that is passed to `LIKE` and is expected to match literally has to be manually escaped. Args: string: A string to escape. Returns: ...
def _replace_global_vars(xs, global_vars): if isinstance(xs, (list, tuple)): return [_replace_global_vars(x) for x in xs] elif isinstance(xs, dict): final = {} for k, v in xs.items(): if isinstance(v, six.string_types) and v in global_vars: v = global_vars[v] ...
Replace globally shared names from input header with value. The value of the `algorithm` item may be a pointer to a real file specified in the `global` section. If found, replace with the full value.
def _gist_is_preset(repo): _, gistid = repo.split("/") gist_template = "https://api.github.com/gists/{}" gist_path = gist_template.format(gistid) response = get(gist_path) if response.status_code == 404: return False try: data = response.json() except: return False ...
Evaluate whether gist is a be package Arguments: gist (str): username/id pair e.g. mottosso/2bb4651a05af85711cde
def set_ifo_tag(self,ifo_tag,pass_to_command_line=True): self.__ifo_tag = ifo_tag if pass_to_command_line: self.add_var_opt('ifo-tag', ifo_tag)
Set the ifo tag that is passed to the analysis code. @param ifo_tag: a string to identify one or more IFOs @bool pass_to_command_line: add ifo-tag as a variable option.
def get_maximum_score_metadata(self): metadata = dict(self._mdata['maximum_score']) metadata.update({'existing_cardinal_values': self._my_map['maximumScore']}) return Metadata(**metadata)
Gets the metadata for the maximum score. return: (osid.Metadata) - metadata for the maximum score *compliance: mandatory -- This method must be implemented.*
def _valid_request_body( self, cert_chain, signature, serialized_request_env): decoded_signature = base64.b64decode(signature) public_key = cert_chain.public_key() request_env_bytes = serialized_request_env.encode(CHARACTER_ENCODING) try: public_key.verify( ...
Validate the request body hash with signature. This method checks if the hash value of the request body matches with the hash value of the signature, decrypted using certificate chain. A :py:class:`VerificationException` is raised if there is a mismatch. :param cert_cha...
def alias_delete(indices, aliases, hosts=None, body=None, profile=None, source=None): es = _get_instance(hosts, profile) if source and body: message = 'Either body or source should be specified but not both.' raise SaltInvocationError(message) if source: body = __salt__['cp.get_file_...
Delete an alias of an index indices Single or multiple indices separated by comma, use _all to perform the operation on all indices. aliases Alias names separated by comma CLI example:: salt myminion elasticsearch.alias_delete testindex_v1 testindex
def config_get(args): r = fapi.get_workspace_config(args.project, args.workspace, args.namespace, args.config) fapi._check_response_code(r, 200) return json.dumps(r.json(), indent=4, separators=(',', ': '), sort_keys=True, ensure_ascii=False)
Retrieve a method config from a workspace, send stdout
def iter_batches(iterable, batch_size): sourceiter = iter(iterable) while True: batchiter = islice(sourceiter, batch_size) yield chain([batchiter.next()], batchiter)
Given a sequence or iterable, yield batches from that iterable until it runs out. Note that this function returns a generator, and also each batch will be a generator. :param iterable: The sequence or iterable to split into batches :param int batch_size: The number of elements of `iterable` to iterate ...
def get_scan(self, source_id, scan_id): target_url = self.client.get_url('SCAN', 'GET', 'single', {'source_id': source_id, 'scan_id': scan_id}) return self.client.get_manager(Scan)._get(target_url)
Get a Scan object :rtype: Scan
def _load_start_paths(self): " Start the Read-Eval-Print Loop. " if self._startup_paths: for path in self._startup_paths: if os.path.exists(path): with open(path, 'rb') as f: code = compile(f.read(), path, 'exec') ...
Start the Read-Eval-Print Loop.
def __redirect_stdio_emit(self, value): parent = self.parent() while parent is not None: try: parent.redirect_stdio.emit(value) except AttributeError: parent = parent.parent() else: break
Searches through the parent tree to see if it is possible to emit the redirect_stdio signal. This logic allows to test the SearchInComboBox select_directory method outside of the FindInFiles plugin.
def update_configurable(self, configurable_class, name, config): configurable_class_name = configurable_class.__name__.lower() logger.info( "updating %s: '%s'", configurable_class_name, name ) registry = self.registry_for(configurable_class) if name not in registry: ...
Callback fired when a configurable instance is updated. Looks up the existing configurable in the proper "registry" and `apply_config()` is called on it. If a method named "on_<configurable classname>_update" is defined it is called in the work pool and passed the configurable's name, ...
def mkdir(path, create_parent=True, check_if_exists=False): cmd = _format_cmd('mkdir', path, _p=create_parent) if check_if_exists: return 'if [[ ! -d {0} ]]; then {1}; fi'.format(path, cmd) return cmd
Generates a unix command line for creating a directory. :param path: Directory path. :type path: unicode | str :param create_parent: Create parent directories, if necessary. Default is ``True``. :type create_parent: bool :param check_if_exists: Prepend a check if the directory exists; in that case,...
def get_config(self, retrieve="all"): get_startup = retrieve == "all" or retrieve == "startup" get_running = retrieve == "all" or retrieve == "running" get_candidate = retrieve == "all" or retrieve == "candidate" if retrieve == "all" or get_running: result = self._execute_com...
get_config implementation for FortiOS.
def get_router_id(self, tenant_id, tenant_name): router_id = None if tenant_id in self.tenant_dict: router_id = self.tenant_dict.get(tenant_id).get('router_id') if not router_id: router_list = self.os_helper.get_rtr_by_name( 'FW_RTR_' + tenant_name) ...
Retrieve the router ID.
def read(self, path, environ): try: inp = open(path, 'rb') except FileNotFoundError as error: if error.errno != 2: raise return None parsing = parse_vexrc(inp, environ) for heading, key, value in parsing: heading = self.defa...
Read data from file into this vexrc instance.
def as_view(cls, **initkwargs): if isinstance(getattr(cls, 'queryset', None), models.query.QuerySet): def force_evaluation(): raise RuntimeError( 'Do not evaluate the `.queryset` attribute directly, ' 'as the result will be cached and reused be...
Store the original class on the view function. This allows us to discover information about the view when we do URL reverse lookups. Used for breadcrumb generation.
def by_name(self, name, archived=False, limit=None, page=None): if not archived: path = _path(self.adapter) else: path = _path(self.adapter, 'archived') return self._get(path, name=name, limit=limit, page=page)
get adapter data by name.
def detach(gandi, resource, background, force): resource = sorted(tuple(set(resource))) if not force: proceed = click.confirm('Are you sure you want to detach %s?' % ', '.join(resource)) if not proceed: return result = gandi.disk.detach(resource, b...
Detach disks from currectly attached vm. Resource can be a disk name, or ID
def get_backend_tfvars_file(path, environment, region): backend_filenames = gen_backend_tfvars_files(environment, region) for name in backend_filenames: if os.path.isfile(os.path.join(path, name)): return name return backend_filenames[-1]
Determine Terraform backend file.
def end(self): if self.lastUrl is not None: self.html.write(u'</li>\n') if self.lastComic is not None: self.html.write(u'</ul>\n') self.html.write(u'</ul>\n') self.addNavLinks() self.html.close()
End HTML output.
def _lock(self, url: str, name: str, hash_: str): self._new_lock.append({ 'url': url, 'name': name, 'hash': hash_, }) self._stale_files.pop(name, None)
Add details of the files downloaded to _new_lock so they can be saved to the lock file. Also remove path from _stale_files, whatever remains at the end therefore is stale and can be deleted.
def slack_user(request, api_data): if request.user.is_anonymous: return request, api_data data = deepcopy(api_data) slacker, _ = SlackUser.objects.get_or_create(slacker=request.user) slacker.access_token = data.pop('access_token') slacker.extras = data slacker.save() messages.add_mes...
Pipeline for backward compatibility prior to 1.0.0 version. In case if you're willing maintain `slack_user` table.
def _build_amps_list(self, amp_value, processlist): ret = [] try: for p in processlist: add_it = False if (re.search(amp_value.regex(), p['name']) is not None): add_it = True else: for c in p['cmdline']: ...
Return the AMPS process list according to the amp_value Search application monitored processes by a regular expression
def map_seqprop_resnums_to_seqprop_resnums(self, resnums, seqprop1, seqprop2): resnums = ssbio.utils.force_list(resnums) alignment = self._get_seqprop_to_seqprop_alignment(seqprop1=seqprop1, seqprop2=seqprop2) mapped = ssbio.protein.sequence.utils.alignment.map_resnum_a_to_resnum_b(resnums=resnu...
Map a residue number in any SeqProp to another SeqProp using the pairwise alignment information. Args: resnums (int, list): Residue numbers in seqprop1 seqprop1 (SeqProp): SeqProp object the resnums match to seqprop2 (SeqProp): SeqProp object you want to map the resnums to ...
def up(self) -> "InstanceNode": ts = max(self.timestamp, self.parinst.timestamp) return self.parinst._copy(self._zip(), ts)
Return an instance node corresponding to the receiver's parent. Raises: NonexistentInstance: If there is no parent.
def Name(self, number): if number in self._enum_type.values_by_number: return self._enum_type.values_by_number[number].name raise ValueError('Enum %s has no name defined for value %d' % ( self._enum_type.name, number))
Returns a string containing the name of an enum value.
def _dispatch(self, textgroup, directory): self.dispatcher.dispatch(textgroup, path=directory)
Sparql dispatcher do not need to dispatch works, as the link is DB stored through Textgroup :param textgroup: A Textgroup object :param directory: The path in which we found the textgroup :return:
def display_string_dump(self, section_spec): section = _section_from_spec(self.elf_file, section_spec) if section is None: print("Section '%s' does not exist in the file!" % section_spec) return None data = section.data() dataptr = 0 strs = [] ...
Display a strings dump of a section. section_spec is either a section number or a name.
def _get_alm_disp_fc3(disp_dataset): natom = disp_dataset['natom'] ndisp = len(disp_dataset['first_atoms']) for disp1 in disp_dataset['first_atoms']: ndisp += len(disp1['second_atoms']) disp = np.zeros((ndisp, natom, 3), dtype='double', order='C') indices = [] count = 0 for disp1 in ...
Create displacements of atoms for ALM input Note ---- Dipslacements of all atoms in supercells for all displacement configurations in phono3py are returned, i.e., most of displacements are zero. Only the configurations with 'included' == True are included in the list of indices that is returned...
def setRegisterNumbersForTemporaries(ast, start): seen = 0 signature = '' aliases = [] for node in ast.postorderWalk(): if node.astType == 'alias': aliases.append(node) node = node.value if node.reg.immediate: node.reg.n = node.value contin...
Assign register numbers for temporary registers, keeping track of aliases and handling immediate operands.
def get_text_path(self): for res in self.dsDoc['dataResources']: resPath = res['resPath'] resType = res['resType'] isCollection = res['isCollection'] if resType == 'text' and isCollection: return os.path.join(self.dsHome, resPath) raise Run...
Returns the path of the directory containing text if they exist in this dataset.
def ssh_compute_remove(public_key, application_name, user=None): if not (os.path.isfile(authorized_keys(application_name, user)) or os.path.isfile(known_hosts(application_name, user))): return keys = ssh_authorized_keys_lines(application_name, user=None) keys = [k.strip() for k in keys] ...
Remove given public key from authorized_keys file. :param public_key: Public key. :type public_key: str :param application_name: Name of application eg nova-compute-something :type application_name: str :param user: The user that the ssh asserts are for. :type user: str
def update_in_hdx(self): capacity = self.data.get('capacity') if capacity is not None: del self.data['capacity'] self._update_in_hdx('user', 'id') if capacity is not None: self.data['capacity'] = capacity
Check if user exists in HDX and if so, update user Returns: None
def bounding_box_as_binary_map(alpha, threshold=0.1): bb = bounding_box(alpha) x = np.zeros(alpha.shape, dtype=np.bool_) x[bb[0]:bb[2], bb[1]:bb[3]] = 1 return x
Similar to `bounding_box`, except returns the bounding box as a binary map the same size as the input. Same parameters as `bounding_box`. Returns ------- binary_map : ndarray, ndim=2, dtype=np.bool_ Binary map with True if object and False if background.
def get_cytoband_names(): return [ n.replace(".json.gz", "") for n in pkg_resources.resource_listdir(__name__, _data_dir) if n.endswith(".json.gz") ]
Returns the names of available cytoband data files >> get_cytoband_names() ['ucsc-hg38', 'ucsc-hg19']
def extract(cls, extractor, typ): schema = { "title": typ.__name__, "type": "object", "properties": {}, "required": [] } for attribute in attr.fields(typ): details = cls._extract_attribute(extractor, attribute) if details.is...
take an attrs based class, and convert it to jsonschema.
def clean_buckets(self, hash_name): bucket_keys = self._iter_bucket_keys(hash_name) self.redis_object.delete(*bucket_keys)
Removes all buckets and their content for specified hash.
def construct_xray_header(headers): header_str = headers.get(http.XRAY_HEADER) or headers.get(http.ALT_XRAY_HEADER) if header_str: return TraceHeader.from_header_str(header_str) else: return TraceHeader()
Construct a ``TraceHeader`` object from dictionary headers of the incoming request. This method should always return a ``TraceHeader`` object regardless of tracing header's presence in the incoming request.
def filter(self, record): if isinstance(record.msg, basestring): message = record.msg.lower() if all(kw in message for kw in self.KEYWORDS): record.levelname = 'DEBUG' record.levelno = logging.DEBUG return 1
Change the severity of selected log records.
def declares_namespace_package(filename): import ast with open(filename) as fp: init_py = ast.parse(fp.read(), filename) calls = [node for node in ast.walk(init_py) if isinstance(node, ast.Call)] for call in calls: if len(call.args) != 1: continue if isinstance(call.func, ast.Attribute) and ca...
Given a filename, walk its ast and determine if it declares a namespace package.
def _validate_namespace(self, namespace): if self._namespace_regex.fullmatch(namespace) is None: LOGGER.debug('Invalid namespace: %s', namespace) raise _ResponseFailed(self._status.INVALID_ADDRESS)
Validates a namespace, raising a ResponseFailed error if invalid. Args: state_root (str): The state_root to validate Raises: ResponseFailed: The state_root was invalid, and a status of INVALID_ROOT will be sent with the response.
def do_IAmRequest(self, apdu): if _debug: WhoIsIAmServices._debug("do_IAmRequest %r", apdu) if apdu.iAmDeviceIdentifier is None: raise MissingRequiredParameter("iAmDeviceIdentifier required") if apdu.maxAPDULengthAccepted is None: raise MissingRequiredParameter("maxAPDULe...
Respond to an I-Am request.
def DbGetDeviceFamilyList(self, argin): self._log.debug("In DbGetDeviceFamilyList()") argin = replace_wildcard(argin) return self.db.get_device_family_list(argin)
Get a list of device name families for device name matching the specified wildcard :param argin: The wildcard :type: tango.DevString :return: Family list :rtype: tango.DevVarStringArray
def fetch(elastic, backend, limit=None, search_after_value=None, scroll=True): logging.debug("Creating a elastic items generator.") elastic_scroll_id = None search_after = search_after_value while True: if scroll: rjson = get_elastic_items(elastic, elastic_scroll_id, limit) e...
Fetch the items from raw or enriched index
def create_auth_group(sender, instance, created, **kwargs): if created: AuthGroup.objects.create(group=instance)
Creates the AuthGroup model when a group is created
def cache_key(self): return '%s:%s' % (super(EntryPublishedVectorBuilder, self).cache_key, Site.objects.get_current().pk)
Key for the cache handling current site.
def setDatastreamState(self, pid, dsID, dsState): http_args = {'dsState' : dsState} url = 'objects/%(pid)s/datastreams/%(dsid)s' % {'pid': pid, 'dsid': dsID} response = self.put(url, params=http_args) return response.status_code == requests.codes.ok
Update datastream state. :param pid: object pid :param dsID: datastream id :param dsState: datastream state :returns: boolean success
def reverse_iterator(self, symbol, chunk_range=None): sym = self._get_symbol_info(symbol) if not sym: raise NoDataFoundException("Symbol does not exist.") c = CHUNKER_MAP[sym[CHUNKER]] for chunk in list(self.get_chunk_ranges(symbol, chunk_range=chunk_range, reverse=True)): ...
Returns a generator that accesses each chunk in descending order Parameters ---------- symbol: str the symbol for the given item in the DB chunk_range: None, or a range object allows you to subset the chunks by range Returns ------- gener...
def _get_assignment_target_end(self, ast_module): if len(ast_module.body) > 1: raise ValueError("More than one expression or assignment.") elif len(ast_module.body) > 0 and \ type(ast_module.body[0]) is ast.Assign: if len(ast_module.body[0].targets) != 1: ...
Returns position of 1st char after assignment traget. If there is no assignment, -1 is returned If there are more than one of any ( expressions or assigments) then a ValueError is raised.
def my_main(context): print('starting MyApp...') if context['debug']: print('Context:') for k in context: print('Key: {}\nValue: {}'.format(k, context[k])) print('Done!') return 0
The starting point for your app.
def get(self, id): schema = PackageSchema() resp = self.service.get_id(self.base, id) return self.service.decode(schema, resp)
Get a package. :param id: Package ID as an int. :return: :class:`packages.Package <packages.Package>` object :rtype: packages.Package
def deactivate_mfa_device(self, user_name, serial_number): params = {'UserName' : user_name, 'SerialNumber' : serial_number} return self.get_response('DeactivateMFADevice', params)
Deactivates the specified MFA device and removes it from association with the user. :type user_name: string :param user_name: The username of the user :type serial_number: string :param seriasl_number: The serial number which uniquely identifies ...
def modify_identity(self, identity, **kwargs): if isinstance(identity, zobjects.Identity): self.request('ModifyIdentity', {'identity': identity._full_data}) return self.get_identities(identity=identity.name)[0] else: attrs = [] for attr, value in kwargs.it...
Modify some attributes of an identity or its name. :param: identity a zobjects.Identity with `id` set (mandatory). Also set items you want to modify/set and/or the `name` attribute to rename the identity. Can also take the name in string and then attributes to modif...
def find_link(self, device): for i in range(len(self.mpstate.mav_master)): conn = self.mpstate.mav_master[i] if (str(i) == device or conn.address == device or getattr(conn, 'label', None) == device): return i return None
find a device based on number, name or label
def move_transition_point(self, fragment_index, value): self.log(u"Called move_transition_point with") self.log([u" fragment_index %d", fragment_index]) self.log([u" value %.3f", value]) if (fragment_index < 0) or (fragment_index > (len(self) - 3)): self.log(u"Bad ...
Change the transition point between fragment ``fragment_index`` and the next fragment to the time value ``value``. This method fails silently (without changing the fragment list) if at least one of the following conditions holds: * ``fragment_index`` is negative * ``fra...
def assert_allowed(request, level, pid): if not d1_gmn.app.models.ScienceObject.objects.filter(pid__did=pid).exists(): raise d1_common.types.exceptions.NotFound( 0, 'Attempted to perform operation on non-existing object. pid="{}"'.format( pid ), ) ...
Assert that one or more subjects are allowed to perform action on object. Raise NotAuthorized if object exists and subject is not allowed. Raise NotFound if object does not exist.
def rename(self, name): if name is None: raise Exception("name (%s) not-valid" % (name,)) self.prefix, self.name = splitPrefix(name)
Rename the element. @param name: A new name for the element. @type name: basestring
def applicationinsights_mgmt_plane_client(cli_ctx, _, subscription=None): from .vendored_sdks.mgmt_applicationinsights import ApplicationInsightsManagementClient from azure.cli.core._profile import Profile profile = Profile(cli_ctx=cli_ctx) if subscription: cred, _, _ = profile.get_login_credent...
Initialize Log Analytics mgmt client for use with CLI.
def get_comments_of_incoming_per_page(self, incoming_id, per_page=1000, page=1): return self._get_resource_per_page( resource=INCOMING_COMMENTS, per_page=per_page, page=page, params={'incoming_id': incoming_id}, )
Get comments of incoming per page :param incoming_id: the incoming id :param per_page: How many objects per page. Default: 1000 :param page: Which page. Default: 1 :return: list
def which(exe): def wrapper(function): def wrapped(*args, **kwargs): if salt.utils.path.which(exe) is None: raise CommandNotFoundError( 'The \'{0}\' binary was not found in $PATH.'.format(exe) ) return function(*args, **kwargs) ...
Decorator wrapper for salt.utils.path.which
def register_lists(self, category_lists, lists_init_kwargs=None, editor_init_kwargs=None): lists_init_kwargs = lists_init_kwargs or {} editor_init_kwargs = editor_init_kwargs or {} for lst in category_lists: if isinstance(lst, string_types): lst = self.list_cls(lst, *...
Registers CategoryList objects to handle their requests. :param list category_lists: CategoryList objects :param dict lists_init_kwargs: Attributes to apply to each of CategoryList objects
def get_command_class(self, cmd): try: cmdpath = self.registry[cmd] except KeyError: raise CommandError("No such command %r" % cmd) if isinstance(cmdpath, basestring): Command = import_class(cmdpath) else: Command = cmdpath return C...
Returns command class from the registry for a given ``cmd``. :param cmd: command to run (key at the registry)
def resume(config_path: str, restore_from: Optional[str], cl_arguments: Iterable[str], output_root: str) -> None: config = None try: config_path = find_config(config_path) restore_from = restore_from or path.dirname(config_path) config = load_config(config_file=config_path, additional_ar...
Load config from the directory specified and start the training. :param config_path: path to the config file or the directory in which it is stored :param restore_from: backend-specific path to the already trained model to be restored from. If ``None`` is passed, it is inferred from th...
def view_on_site(self, request, content_type_id, object_id): try: content_type = ContentType.objects.get(pk=content_type_id) if not content_type.model_class(): raise Http404(_("Content type %(ct_id)s object has no associated model") % { 'ct_id': conten...
Redirect to an object's page based on a content-type ID and an object ID.
def authorizer(self, schemes, resource, action, request_args): if not schemes: return u'', u'' for scheme in schemes: if scheme in self.schemes and self.has_auth_params(scheme): cred = Context.format_auth_params(self.schemes[scheme][u'params']) if ...
Construct the Authorization header for a request. Args: schemes (list of str): Authentication schemes supported for the requested action. resource (str): Object upon which an action is being performed. action (str): Action being performed. request_args (list ...
def check_valid(self, get_params): if self.commands._if: return self.commands._if.check_valid(get_params)
see if the if condition for a block is valid
def close_async(self): if self._stream is None or self._stream.closed(): self._stream = None return send_data = struct.pack('<i', 1) + int2byte(COMMAND.COM_QUIT) yield self._stream.write(send_data) self.close()
Send the quit message and close the socket
def create(self, parties): assert parties > 0, "parties must be a positive integer." return self.backend.add(self.key, parties, self.ttl)
Create the barrier for the given number of parties. Parameters: parties(int): The number of parties to wait for. Returns: bool: Whether or not the new barrier was successfully created.
def run(self): _, test_data = self.data.load(train=False, test=True) try: self.model.fit_generator( self.samples_to_batches(self.generate_samples(), self.args.batch_size), steps_per_epoch=self.args.steps_per_epoch, epochs=self.epoch + self.args.epochs, validat...
Train the model on randomly generated batches
def get_indicator(self, resource): path = resource.real_path if os.name != 'posix' and os.path.isdir(path): return (os.path.getmtime(path), len(os.listdir(path)), os.path.getsize(path)) return (os.path.getmtime(path), os.path.ge...
Return the modification time and size of a `Resource`.
def assembly_plus_protons(input_file, path=True, pdb_name=None, save_output=False, force_save=False): from ampal.pdb_parser import convert_pdb_to_ampal if path: input_path = Path(input_file) if not pdb_name: pdb_name = input_path.stem[:4] reduced_pat...
Returns an Assembly with protons added by Reduce. Notes ----- Looks for a pre-existing Reduce output in the standard location before running Reduce. If the protein contains oligosaccharides or glycans, use reduce_correct_carbohydrates. Parameters ---------- input_file : str or pathlib....
def list(self, cur_p=''): current_page_number = int(cur_p) if cur_p else 1 current_page_number = 1 if current_page_number < 1 else current_page_number kwd = { 'current_page': current_page_number } recs = MEntity.get_all_pager(current_page_num=current_page_number) ...
Lists of the entities.
def _neighbors_graph(self, **params) -> Dict: response = self._get_response("graph/neighbors", format="json", **params) return response.json()
Get neighbors of a node parameters are directly passed through to SciGraph: e.g. depth, relationshipType
def reload_accelerators(self, *args): if self.accel_group: self.guake.window.remove_accel_group(self.accel_group) self.accel_group = Gtk.AccelGroup() self.guake.window.add_accel_group(self.accel_group) self.load_accelerators()
Reassign an accel_group to guake main window and guake context menu and calls the load_accelerators method.
def walk_dir(path, args, state): if args.debug: sys.stderr.write("Walking %s\n" % path) for root, _dirs, files in os.walk(path): if not safe_process_files(root, files, args, state): return False if state.should_quit(): return False return True
Check all files in `path' to see if there is any requests that we should send out on the bus.
def concentric_hexagons(radius, start=(0, 0)): x, y = start yield (x, y) for r in range(1, radius + 1): y -= 1 for dx, dy in [(1, 1), (0, 1), (-1, 0), (-1, -1), (0, -1), (1, 0)]: for _ in range(r): yield (x, y) x += dx y += dy
A generator which produces coordinates of concentric rings of hexagons. Parameters ---------- radius : int Number of layers to produce (0 is just one hexagon) start : (x, y) The coordinate of the central hexagon.
def get(request): res = Result() obj, created = UserPref.objects.get_or_create(user=request.user, defaults={'data': json.dumps(DefaultPrefs.copy())}) data = obj.json() data['subscriptions'] = [_.json() for _ in GallerySubscription.objects.filter(user=request.user)] res.append(data) return JsonRe...
Gets the currently logged in users preferences :returns: json
def init_all_objects(self, data, target=None, single_result=True): if single_result: return self.init_target_object(target, data) return list(self.expand_models(target, data))
Initializes model instances from given data. Returns single instance if single_result=True.
def _escape(self, value): if isinstance(value, SafeString): return value return shellescape.quote(value)
Escape given value unless it is safe.
def clone(name, new_name, linked=False, template=False, runas=None): args = [salt.utils.data.decode(name), '--name', salt.utils.data.decode(new_name)] if linked: args.append('--linked') if template: args.append('--template') return prlctl('clone', args, runas=runas)
Clone a VM .. versionadded:: 2016.11.0 :param str name: Name/ID of VM to clone :param str new_name: Name of the new VM :param bool linked: Create a linked virtual machine. :param bool template: Create a virtual machine template instead of a real virtual machine. ...
def _ExtractYahooSearchQuery(self, url): if 'p=' not in url: return None _, _, line = url.partition('p=') before_and, _, _ = line.partition('&') if not before_and: return None yahoo_search_url = before_and.split()[0] return yahoo_search_url.replace('+', ' ')
Extracts a search query from a Yahoo search URL. Examples: https://search.yahoo.com/search?p=query https://search.yahoo.com/search;?p=query Args: url (str): URL. Returns: str: search query or None if no query was found.
def append(self, data): for k in self._entries.keys(): self._entries[k].append(data._entries[k])
Append a Data instance to self
def blocking_start(self, waiting_func=None): self.logger.debug('threadless start') try: for job_params in self._get_iterator(): self.config.logger.debug('received %r', job_params) self.quit_check() if job_params is None: if ...
this function starts the task manager running to do tasks. The waiting_func is normally used to do something while other threads are running, but here we don't have other threads. So the waiting func will never get called. I can see wanting this function to be called at least once aft...
def encrypt(self, mesg): seqn = next(self._tx_sn) rv = self._tx_tinh.enc(s_msgpack.en((seqn, mesg))) return rv
Wrap a message with a sequence number and encrypt it. Args: mesg: The mesg to encrypt. Returns: bytes: The encrypted message.
def get_func_task_path(func): module_path = inspect.getmodule(func).__name__ task_path = '{module_path}.{func_name}'.format( module_path=module_path, func_name=func.__name__ ) return task_path
Format the modular task path for a function via inspection.
def should_see_in_seconds(self, text, timeout): def check_element(): assert contains_content(world.browser, text), \ "Expected element with the given text." wait_for(check_element)(timeout=int(timeout))
Assert provided text is visible within n seconds. Be aware this text could be anywhere on the screen. Also be aware that it might cross several HTML nodes. No determination is made between block and inline nodes. Whitespace can be affected.
def backtrack(self, decision_level): self._backtracking = True packages = set() while self._assignments[-1].decision_level > decision_level: removed = self._assignments.pop(-1) packages.add(removed.dependency.name) if removed.is_decision(): del...
Resets the current decision level to decision_level, and removes all assignments made after that level.
def make_driver(loop=None): loop = loop or asyncio.get_event_loop() def stop(i = None): loop.stop() def driver(sink): sink.control.subscribe( on_next=stop, on_error=stop, on_completed=stop) return None return Component(call=driver, input=Sink)
Returns a stop driver. The optional loop argument can be provided to use the driver in another loop than the default one. Parameters ----------- loop: BaseEventLoop The event loop to use instead of the default one.
def processCommit(self, commit: Commit, sender: str) -> None: self.logger.debug("{} received COMMIT{} from {}".format( self, (commit.viewNo, commit.ppSeqNo), sender)) if self.validateCommit(commit, sender): self.stats.inc(TPCStat.CommitRcvd) self.addToCommits(commit, ...
Validate and process the COMMIT specified. If validation is successful, return the message to the node. :param commit: an incoming COMMIT message :param sender: name of the node that sent the COMMIT