code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def time_at_elevation(self, elevation, direction=SUN_RISING, date=None, local=True): if local and self.timezone is None: raise ValueError("Local time requested but Location has no timezone set.") if self.astral is None: self.astral = Astral() if date is None: date = datetime.date.today() if elevation > 90.0: elevation = 180.0 - elevation direction = SUN_SETTING time_ = self.astral.time_at_elevation_utc( elevation, direction, date, self.latitude, self.longitude ) if local: return time_.astimezone(self.tz) else: return time_
Calculate the time when the sun is at the specified elevation. Note: This method uses positive elevations for those above the horizon. Elevations greater than 90 degrees are converted to a setting sun i.e. an elevation of 110 will calculate a setting sun at 70 degrees. :param elevation: Elevation in degrees above the horizon to calculate for. :type elevation: float :param direction: Determines whether the time is for the sun rising or setting. Use ``astral.SUN_RISING`` or ``astral.SUN_SETTING``. Default is rising. :type direction: int :param date: The date for which to calculate the elevation time. If no date is specified then the current date will be used. :type date: :class:`~datetime.date` :param local: True = Time to be returned in location's time zone; False = Time to be returned in UTC. If not specified then the time will be returned in local time :type local: bool :returns: The date and time at which dusk occurs. :rtype: :class:`~datetime.datetime`
def read_string_from_file(path, encoding="utf8"): with codecs.open(path, "rb", encoding=encoding) as f: value = f.read() return value
Read entire contents of file into a string.
def storeFASTA(fastaFNH): fasta = file_handle(fastaFNH).read() return [FASTARecord(rec[0].split()[0], rec[0].split(None, 1)[1], "".join(rec[1:])) for rec in (x.strip().split("\n") for x in fasta.split(">")[1:])]
Parse the records in a FASTA-format file by first reading the entire file into memory. :type source: path to FAST file or open file handle :param source: The data source from which to parse the FASTA records. Expects the input to resolve to a collection that can be iterated through, such as an open file handle. :rtype: tuple :return: FASTA records containing entries for id, description and data.
def get_description(self, lang=None): return self.metadata.get_single(key=RDF_NAMESPACES.CTS.description, lang=lang)
Get the DC description of the object :param lang: Lang to retrieve :return: Description string representation :rtype: Literal
def indexOfClosest( arr, val ): i_closest = None for i,v in enumerate(arr): d = math.fabs( v - val ) if i_closest == None or d < d_closest: i_closest = i d_closest = d return i_closest
Return the index in arr of the closest float value to val.
def _infer_binop(self, context): left = self.left right = self.right context = context or contextmod.InferenceContext() lhs_context = contextmod.copy_context(context) rhs_context = contextmod.copy_context(context) lhs_iter = left.infer(context=lhs_context) rhs_iter = right.infer(context=rhs_context) for lhs, rhs in itertools.product(lhs_iter, rhs_iter): if any(value is util.Uninferable for value in (rhs, lhs)): yield util.Uninferable return try: yield from _infer_binary_operation(lhs, rhs, self, context, _get_binop_flow) except exceptions._NonDeducibleTypeHierarchy: yield util.Uninferable
Binary operation inference logic.
def _set_upload_status(self, file_data_object, upload_status): uuid = file_data_object['uuid'] return self.connection.update_data_object( uuid, {'uuid': uuid, 'value': { 'upload_status': upload_status}} )
Set file_data_object.file_resource.upload_status
def __execute_rot(self, surface): self.image = pygame.transform.rotate(surface, self.__rotation) self.__resize_surface_extents()
Executes the rotating operation
def safe_better_repr( self, obj, context=None, html=True, level=0, full=False ): context = context and dict(context) or {} recursion = id(obj) in context if not recursion: context[id(obj)] = obj try: rv = self.better_repr(obj, context, html, level + 1, full) except Exception: rv = None if rv: return rv self.obj_cache[id(obj)] = obj if html: return '<a href="%d" class="inspect">%s%s</a>' % ( id(obj), 'Recursion of ' if recursion else '', escape(self.safe_repr(obj)) ) return '%s%s' % ( 'Recursion of ' if recursion else '', self.safe_repr(obj) )
Repr with inspect links on objects
def wrap_all(self, rows: Iterable[Union[Mapping[str, Any], Sequence[Any]]]): return (self.wrap(r) for r in rows)
Return row tuple for each row in rows.
def print_languages_and_exit(lst, status=1, header=True): if header: print("Available languages:") for lg in lst: print("- %s" % lg) sys.exit(status)
print a list of languages and exit
def _terminate_process_iou(self): if self._iou_process: log.info('Stopping IOU process for IOU VM "{}" PID={}'.format(self.name, self._iou_process.pid)) try: self._iou_process.terminate() except ProcessLookupError: pass self._started = False self.status = "stopped"
Terminate the IOU process if running
def peek(self, iroute: "InstanceRoute") -> Optional[Value]: val = self.value sn = self.schema_node for sel in iroute: val, sn = sel.peek_step(val, sn) if val is None: return None return val
Return a value within the receiver's subtree. Args: iroute: Instance route (relative to the receiver).
def delete_sandbox(self, si, logger, vcenter_data_model, delete_sandbox_actions, cancellation_context): results = [] logger.info('Deleting saved sandbox command starting on ' + vcenter_data_model.default_datacenter) if not delete_sandbox_actions: raise Exception('Failed to delete saved sandbox, missing data in request.') actions_grouped_by_save_types = groupby(delete_sandbox_actions, lambda x: x.actionParams.saveDeploymentModel) artifactHandlersToActions = {ArtifactHandler.factory(k, self.pyvmomi_service, vcenter_data_model, si, logger, self.deployer, None, self.resource_model_parser, self.snapshot_saver, self.task_waiter, self.folder_manager, self.pg, self.cs): list(g) for k, g in actions_grouped_by_save_types} self._validate_save_deployment_models(artifactHandlersToActions, delete_sandbox_actions, results) error_results = [r for r in results if not r.success] if not error_results: results = self._execute_delete_saved_sandbox(artifactHandlersToActions, cancellation_context, logger, results) return results
Deletes a saved sandbox's artifacts :param vcenter_data_model: VMwarevCenterResourceModel :param vim.ServiceInstance si: py_vmomi service instance :type si: vim.ServiceInstance :param logger: Logger :type logger: cloudshell.core.logger.qs_logger.get_qs_logger :param list[SaveApp] delete_sandbox_actions: :param cancellation_context:
def get_execution_host_info(): host = os.environ.get('HOSTNAME', None) cluster = os.environ.get('SGE_O_HOST', None) if host is None: try: import socket host = host or socket.gethostname() except: pass return host or 'unknown', cluster or 'unknown'
Tries to return a tuple describing the execution host. Doesn't work for all queueing systems Returns: (HOSTNAME, CLUSTER_NAME)
def setValue(self, value): if isinstance(value, Text): self.value = value else: self.value = Text(value) return self
Set the attributes value @param value: The new value (may be None) @type value: basestring @return: self @rtype: L{Attribute}
def get_column(self, chrom, position, missing_seqs=MissingSequenceHandler.TREAT_AS_ALL_GAPS, species=None): blocks = self.get_blocks(chrom, position, position + 1) if len(blocks) == 0: raise NoSuchAlignmentColumnError("Request for column on chrom " + chrom + " at position " + str(position) + " not possible; " + "genome alignment not defined at " + "that locus.") if len(blocks) > 1: raise NoUniqueColumnError("Request for column on chrom " + chrom + " at position " + str(position) + "not " + "possible; ambiguous alignment of that locus.") return blocks[0].get_column_absolute(position, missing_seqs, species)
Get the alignment column at the specified chromosome and position.
def _disable_access_key(self, force_disable_self=False): client = self.client if self.validate is True: return else: try: client.update_access_key( UserName=self._search_user_for_key(), AccessKeyId=self.access_key_id, Status='Inactive' ) logger.info( "Access key {id} has " "been disabled.".format(id=self.access_key_id) ) except Exception as e: logger.info( "Access key {id} could not " "be disabled due to: {e}.".format( e=e, id=self.access_key_id ) )
This function first checks to see if the key is already disabled\ if not then it goes to disabling
def update(): with settings(warn_only=True): print(cyan('\nInstalling/Updating required packages...')) pip = local('venv/bin/pip install -U --allow-all-external --src libs -r requirements.txt', capture=True) if pip.failed: print(red(pip)) abort("pip exited with return code %i" % pip.return_code) print(green('Packages requirements updated.'))
Update virtual env with requirements packages.
def subscribe(self, feedUrl): response = self.httpPost( ReaderUrl.SUBSCRIPTION_EDIT_URL, {'ac':'subscribe', 's': feedUrl}) if response and 'OK' in response: return True else: return False
Adds a feed to the top-level subscription list Ubscribing seems idempotent, you can subscribe multiple times without error returns True or throws HTTPError
def expand_to_chunk_size(self, chunk_size, offset=Vec(0,0,0, dtype=int)): chunk_size = np.array(chunk_size, dtype=np.float32) result = self.clone() result = result - offset result.minpt = np.floor(result.minpt / chunk_size) * chunk_size result.maxpt = np.ceil(result.maxpt / chunk_size) * chunk_size return (result + offset).astype(self.dtype)
Align a potentially non-axis aligned bbox to the grid by growing it to the nearest grid lines. Required: chunk_size: arraylike (x,y,z), the size of chunks in the dataset e.g. (64,64,64) Optional: offset: arraylike (x,y,z), the starting coordinate of the dataset
def on_successful_login(self, subject, authc_token, account_id): self.forget_identity(subject) if authc_token.is_remember_me: self.remember_identity(subject, authc_token, account_id) else: msg = ("AuthenticationToken did not indicate that RememberMe is " "requested. RememberMe functionality will not be executed " "for corresponding account.") logger.debug(msg)
Reacts to the successful login attempt by first always forgetting any previously stored identity. Then if the authc_token is a ``RememberMe`` type of token, the associated identity will be remembered for later retrieval during a new user session. :param subject: the subject whose identifying attributes are being remembered :param authc_token: the token that resulted in a successful authentication attempt :param account_id: id of authenticated account
def comments_are_open(content_object): moderator = get_model_moderator(content_object.__class__) if moderator is None: return True return CommentModerator.allow(moderator, None, content_object, None)
Return whether comments are still open for a given target object.
def extra_metadata(self): return get_extra_metadata( self.gh.api, self.repository['owner']['login'], self.repository['name'], self.release['tag_name'], )
Get extra metadata for file in repository.
def _add_device_to_device_group(self, device): device_name = get_device_info(device).name dg = pollster(self._get_device_group)(device) dg.devices_s.devices.create(name=device_name, partition=self.partition) pollster(self._check_device_exists_in_device_group)(device_name)
Add device to device service cluster group. :param device: bigip object -- device to add to group
def catalogFactory(name, **kwargs): fn = lambda member: inspect.isclass(member) and member.__module__==__name__ catalogs = odict(inspect.getmembers(sys.modules[__name__], fn)) if name not in list(catalogs.keys()): msg = "%s not found in catalogs:\n %s"%(name,list(kernels.keys())) logger.error(msg) msg = "Unrecognized catalog: %s"%name raise Exception(msg) return catalogs[name](**kwargs)
Factory for various catalogs.
def read_remote_spec(filename, encoding='binary', cache=True, show_progress=True, **kwargs): with get_readable_fileobj(filename, encoding=encoding, cache=cache, show_progress=show_progress) as fd: header, wavelengths, fluxes = read_spec(fd, fname=filename, **kwargs) return header, wavelengths, fluxes
Read FITS or ASCII spectrum from a remote location. Parameters ---------- filename : str Spectrum filename. encoding, cache, show_progress See :func:`~astropy.utils.data.get_readable_fileobj`. kwargs : dict Keywords acceptable by :func:`read_fits_spec` (if FITS) or :func:`read_ascii_spec` (if ASCII). Returns ------- header : dict Metadata. wavelengths, fluxes : `~astropy.units.quantity.Quantity` Wavelength and flux of the spectrum.
def generate_sample_set(self, tags=None): if isinstance(tags, str): tags = [tags] md5_list = self.data_store.tag_match(tags) return self.store_sample_set(md5_list)
Generate a sample_set that maches the tags or all if tags are not specified. Args: tags: Match samples against this tag list (or all if not specified) Returns: The sample_set of those samples matching the tags
def run(self): which_command = self._args.which if which_command == 'compound': self._search_compound() elif which_command == 'reaction': self._search_reaction()
Run search command.
def _on_grant(self, grant): self.set_timeout(grant.expiration_time, ContractState.expired, self._run_and_terminate, self.contractor.cancelled, grant) self.grant = grant self.set_remote_id(grant.sender_id) self.update_manager_address(grant.reply_to) self.call_agent_side(self.contractor.granted, grant, ensure_state=ContractState.granted)
Called upon receiving the grant. Than calls granted and sets up reporter if necessary.
def get(self): new_alarm = self.entity.get_alarm(self) if new_alarm: self._add_details(new_alarm._info)
Fetches the current state of the alarm from the API and updates the object.
def to_sql(self, connection, grammar): self._add_implied_commands() statements = [] for command in self._commands: method = "compile_%s" % command.name if hasattr(grammar, method): sql = getattr(grammar, method)(self, command, connection) if sql is not None: if isinstance(sql, list): statements += sql else: statements.append(sql) return statements
Get the raw SQL statements for the blueprint. :param connection: The connection to use :type connection: orator.connections.Connection :param grammar: The grammar to user :type grammar: orator.schema.grammars.SchemaGrammar :rtype: list
def detachChildren(self): detached = self.children self.children = [] for child in detached: child.parent = None return detached
Detach and return this element's children. @return: The element's children (detached). @rtype: [L{Element},...]
def signed_gt(a, b): a, b = match_bitwidth(as_wires(a), as_wires(b), signed=True) r = b - a return r[-1] ^ (~a[-1]) ^ (~b[-1])
Return a single bit result of signed greater than comparison.
def getVerifiersIDs(self): verifiers_ids = list() for brain in self.getAnalyses(): verifiers_ids += brain.getVerificators return list(set(verifiers_ids))
Returns the ids from users that have verified at least one analysis from this Analysis Request
def get_random_word(dictionary, min_word_length=3, max_word_length=8): while True: word = choice(dictionary) if len(word) >= min_word_length and len(word) <= max_word_length: break return word
Returns a random word from the dictionary
def exec_(scope, data): conn = scope.get('__connection__') response = [] for line in data: conn.send(line) conn.expect_prompt() response += conn.response.split('\n')[1:] scope.define(__response__=response) return True
Sends the given data to the remote host and waits until the host has responded with a prompt. If the given data is a list of strings, each item is sent, and after each item a prompt is expected. This function also causes the response of the command to be stored in the built-in __response__ variable. :type data: string :param data: The data that is sent.
def _put (self, url_data): if self.shutdown or self.max_allowed_urls == 0: return log.debug(LOG_CACHE, "queueing %s", url_data.url) key = url_data.cache_url cache = url_data.aggregate.result_cache if url_data.has_result or cache.has_result(key): self.queue.appendleft(url_data) else: assert key is not None, "no result for None key: %s" % url_data if self.max_allowed_urls is not None: self.max_allowed_urls -= 1 self.num_puts += 1 if self.num_puts >= NUM_PUTS_CLEANUP: self.cleanup() self.queue.append(url_data) self.unfinished_tasks += 1
Put URL in queue, increase number of unfished tasks.
def get_availabilities(date): day_of_week = dateutil.parser.parse(date).weekday() availabilities = [] available_probability = 0.3 if day_of_week == 0: start_hour = 10 while start_hour <= 16: if random.random() < available_probability: appointment_type = get_random_int(1, 4) if appointment_type == 1: availabilities.append('{}:00'.format(start_hour)) elif appointment_type == 2: availabilities.append('{}:30'.format(start_hour)) else: availabilities.append('{}:00'.format(start_hour)) availabilities.append('{}:30'.format(start_hour)) start_hour += 1 if day_of_week == 2 or day_of_week == 4: availabilities.append('10:00') availabilities.append('16:00') availabilities.append('16:30') return availabilities
Helper function which in a full implementation would feed into a backend API to provide query schedule availability. The output of this function is an array of 30 minute periods of availability, expressed in ISO-8601 time format. In order to enable quick demonstration of all possible conversation paths supported in this example, the function returns a mixture of fixed and randomized results. On Mondays, availability is randomized; otherwise there is no availability on Tuesday / Thursday and availability at 10:00 - 10:30 and 4:00 - 5:00 on Wednesday / Friday.
def delete_object(cache, template, indexes): with cache as redis_connection: pipe = redis_connection.pipeline() for key in set(template.keys()): pipe.delete(template[key] % indexes) pipe.execute()
Delete an object in Redis using a pipeline. Deletes all fields defined by the template. Arguments: template: a dictionary containg the keys for the object and template strings for the corresponding redis keys. The template string uses named string interpolation format. Example: { 'username': 'user:%(id)s:username', 'email': 'user:%(id)s:email', 'phone': 'user:%(id)s:phone' } indexes: a dictionary containing the values to use to construct the redis keys: Example: { 'id': 342 }
def drop_privileges(): uid = int(pwd.getpwnam(settings.DROPLET_USER).pw_uid) os.setuid(uid)
Set settings.DROPLET_USER UID for the current process After calling this, root operation will be impossible to execute See root context manager
def get_generic_type(type_tag): return { DEVICE_ALARM: TYPE_ALARM, DEVICE_GLASS_BREAK: TYPE_CONNECTIVITY, DEVICE_KEYPAD: TYPE_CONNECTIVITY, DEVICE_REMOTE_CONTROLLER: TYPE_CONNECTIVITY, DEVICE_SIREN: TYPE_CONNECTIVITY, DEVICE_STATUS_DISPLAY: TYPE_CONNECTIVITY, DEVICE_DOOR_CONTACT: TYPE_OPENING, DEVICE_MOTION_CAMERA: TYPE_CAMERA, DEVICE_MOTION_VIDEO_CAMERA: TYPE_CAMERA, DEVICE_IP_CAM: TYPE_CAMERA, DEVICE_OUTDOOR_MOTION_CAMERA: TYPE_CAMERA, DEVICE_SECURE_BARRIER: TYPE_COVER, DEVICE_DIMMER: TYPE_LIGHT, DEVICE_DIMMER_METER: TYPE_LIGHT, DEVICE_HUE: TYPE_LIGHT, DEVICE_DOOR_LOCK: TYPE_LOCK, DEVICE_WATER_SENSOR: TYPE_CONNECTIVITY, DEVICE_SWITCH: TYPE_SWITCH, DEVICE_NIGHT_SWITCH: TYPE_SWITCH, DEVICE_POWER_SWITCH_SENSOR: TYPE_SWITCH, DEVICE_POWER_SWITCH_METER: TYPE_SWITCH, DEVICE_VALVE: TYPE_VALVE, DEVICE_ROOM_SENSOR: TYPE_UNKNOWN_SENSOR, DEVICE_TEMPERATURE_SENSOR: TYPE_UNKNOWN_SENSOR, DEVICE_MULTI_SENSOR: TYPE_UNKNOWN_SENSOR, DEVICE_PIR: TYPE_UNKNOWN_SENSOR, DEVICE_POVS: TYPE_UNKNOWN_SENSOR, }.get(type_tag.lower(), None)
Map type tag to generic type.
def add_frame_widget(self, ref, left=1, top=1, right=20, bottom=1, width=20, height=4, direction="h", speed=1): if ref not in self.widgets: widget = widgets.FrameWidget( screen=self, ref=ref, left=left, top=top, right=right, bottom=bottom, width=width, height=height, direction=direction, speed=speed, ) self.widgets[ref] = widget return self.widgets[ref]
Add Frame Widget
def get_reservation_resources(session, reservation_id, *models): models_resources = [] reservation = session.GetReservationDetails(reservation_id).ReservationDescription for resource in reservation.Resources: if resource.ResourceModelName in models: models_resources.append(resource) return models_resources
Get all resources of given models in reservation. :param session: CloudShell session :type session: cloudshell.api.cloudshell_api.CloudShellAPISession :param reservation_id: active reservation ID :param models: list of requested models :return: list of all resources of models in reservation
def drop(self): import os if self.path: if os.path.exists(self.path): os.remove(self.path) else: self._data = {}
Remove the database by deleting the JSON file.
def line_count(fn): with open(fn) as f: for i, l in enumerate(f): pass return i + 1
Get line count of file Args: fn (str): Path to file Return: Number of lines in file (int)
def getFullParList(configObj): plist = [] for par in configObj.keys(): if isinstance(configObj[par],configobj.Section): plist.extend(getFullParList(configObj[par])) else: plist.append(par) return plist
Return a single list of all parameter names included in the configObj regardless of which section the parameter was stored
def replace(self, scaling_group, name, cooldown, min_entities, max_entities, metadata=None): body = self._create_group_config_body(name, cooldown, min_entities, max_entities, metadata=metadata) group_id = utils.get_id(scaling_group) uri = "/%s/%s/config" % (self.uri_base, group_id) resp, resp_body = self.api.method_put(uri, body=body)
Replace an existing ScalingGroup configuration. All of the attributes must be specified If you wish to delete any of the optional attributes, pass them in as None.
def get_additional_handlers(): global _additional_handlers if not isinstance(_additional_handlers, list): handlers = [] for name in config.ADDITIONAL_HANDLERS: module_name, function_name = name.rsplit('.', 1) function = getattr(import_module(module_name), function_name) handlers.append(function) _additional_handlers = handlers return _additional_handlers
Returns the actual functions from the dotted paths specified in ADDITIONAL_HANDLERS.
def clean_username(self, username): username_case = settings.CAS_FORCE_CHANGE_USERNAME_CASE if username_case == 'lower': username = username.lower() elif username_case == 'upper': username = username.upper() elif username_case is not None: raise ImproperlyConfigured( "Invalid value for the CAS_FORCE_CHANGE_USERNAME_CASE setting. " "Valid values are `'lower'`, `'upper'`, and `None`.") return username
Performs any cleaning on the "username" prior to using it to get or create the user object. Returns the cleaned username. By default, changes the username case according to `settings.CAS_FORCE_CHANGE_USERNAME_CASE`.
async def stop(wallet_name: str) -> None: LOGGER.debug('RevRegBuilder.stop >>>') dir_sentinel = join(RevRegBuilder.dir_tails_sentinel(wallet_name)) if isdir(dir_sentinel): open(join(dir_sentinel, '.stop'), 'w').close() while any(isfile(join(dir_sentinel, d, '.in-progress')) for d in listdir(dir_sentinel)): await asyncio.sleep(1) LOGGER.debug('RevRegBuilder.stop <<<')
Gracefully stop an external revocation registry builder, waiting for its current. The indy-sdk toolkit uses a temporary directory for tails file mustration, and shutting down the toolkit removes the directory, crashing the external tails file write. This method allows a graceful stop to wait for completion of such tasks already in progress. :wallet_name: name external revocation registry builder to check :return: whether a task is pending.
def update(self, custom_field, params={}, **options): path = "/custom_fields/%s" % (custom_field) return self.client.put(path, params, **options)
A specific, existing custom field can be updated by making a PUT request on the URL for that custom field. Only the fields provided in the `data` block will be updated; any unspecified fields will remain unchanged When using this method, it is best to specify only those fields you wish to change, or else you may overwrite changes made by another user since you last retrieved the custom field. A custom field's `type` cannot be updated. An enum custom field's `enum_options` cannot be updated with this endpoint. Instead see "Work With Enum Options" for information on how to update `enum_options`. Returns the complete updated custom field record. Parameters ---------- custom_field : {Id} Globally unique identifier for the custom field. [data] : {Object} Data for the request
def pseudoify(self): assert self.is_toplevel assert self.is_multi assert len(self.multi_rep.siblings) > 0 rep = self.multi_rep start = min([s.start for s in rep.siblings + [rep]]) end = max([s.end for s in rep.siblings + [rep]]) parent = Feature(None) parent._pseudo = True parent._seqid = self._seqid parent.set_coord(start, end) parent._strand = self._strand for sibling in rep.siblings + [rep]: parent.add_child(sibling, rangecheck=True) parent.children = sorted(parent.children) rep.siblings = sorted(rep.siblings) return parent
Derive a pseudo-feature parent from the given multi-feature. The provided multi-feature does not need to be the representative. The newly created pseudo-feature has the same seqid as the provided multi- feature, and spans its entire range. Otherwise, the pseudo-feature is empty. It is used only for convenience in sorting.
def resid_dev(self, endog, mu, scale=1.): r endog_mu = self._clean(endog / mu) return np.sign(endog - mu) * np.sqrt(-2 * (-(endog - mu)/mu + np.log(endog_mu)))
r""" Gamma deviance residuals Parameters ----------- endog : array-like Endogenous response variable mu : array-like Fitted mean response variable scale : float, optional An optional argument to divide the residuals by scale. The default is 1. Returns ------- resid_dev : array Deviance residuals as defined below
def register_handler(self, handler): self._handlers[handler.namespace] = handler handler.registered(self)
Register a new namespace handler.
def from_elements(cls, elts=None): node = cls() if elts is None: node.elts = [] else: node.elts = [const_factory(e) if _is_const(e) else e for e in elts] return node
Create a node of this type from the given list of elements. :param elts: The list of elements that the node should contain. :type elts: list(NodeNG) :returns: A new node containing the given elements. :rtype: NodeNG
def process_post_tags(self, bulk_mode, api_post, post_tags): post_tags[api_post["ID"]] = [] for api_tag in six.itervalues(api_post["tags"]): tag = self.process_post_tag(bulk_mode, api_tag) if tag: post_tags[api_post["ID"]].append(tag)
Create or update Tags related to a post. :param bulk_mode: If True, minimize db operations by bulk creating post objects :param api_post: the API data for the post :param post_tags: a mapping of Tags keyed by post ID :return: None
def get_pyquery(self, tree=None, page_numbers=None): if not page_numbers: page_numbers = [] if tree is None: if not page_numbers and self.tree is not None: tree = self.tree else: tree = self.get_tree(page_numbers) if hasattr(tree, 'getroot'): tree = tree.getroot() return PyQuery(tree, css_translator=PDFQueryTranslator())
Wrap given tree in pyquery and return. If no tree supplied, will generate one from given page_numbers, or all page numbers.
def close_position(self, repay_only): params = {'repay_only': repay_only} return self._send_message('post', '/position/close', data=json.dumps(params))
Close position. Args: repay_only (bool): Undocumented by cbpro. Returns: Undocumented
def get_slope(self): return ((self.p1.y-self.p2.y) / (self.p1.x-self.p2.x))
Return the slope m of this line segment.
def tolerant_metaphone_processor(words): for word in words: r = 0 for w in double_metaphone(word): if w: w = w.strip() if w: r += 1 yield w if not r: yield word
Double metaphone word processor slightly modified so that when no words are returned by the algorithm, the original word is returned.
def RollbackAll(close=None): if close: warnings.simplefilter('default') warnings.warn("close parameter will not need at all.", DeprecationWarning) for k, v in engine_manager.items(): session = v.session(create=False) if session: session.rollback()
Rollback all transactions, according Local.conn
def initialise(self): self._checkWriteMode() self._createSystemTable() self._createNetworkTables() self._createOntologyTable() self._createReferenceSetTable() self._createReferenceTable() self._createDatasetTable() self._createReadGroupSetTable() self._createReadGroupTable() self._createCallSetTable() self._createVariantSetTable() self._createVariantAnnotationSetTable() self._createFeatureSetTable() self._createContinuousSetTable() self._createBiosampleTable() self._createIndividualTable() self._createPhenotypeAssociationSetTable() self._createRnaQuantificationSetTable()
Initialise this data repository, creating any necessary directories and file paths.
def parse_attributes(self, attrstring): if attrstring in [None, '', '.']: return dict() attributes = dict() keyvaluepairs = attrstring.split(';') for kvp in keyvaluepairs: if kvp == '': continue key, value = kvp.split('=') if key == 'ID': assert ',' not in value attributes[key] = value continue values = value.split(',') valdict = dict((val, True) for val in values) attributes[key] = valdict return attributes
Parse an attribute string. Given a string with semicolon-separated key-value pairs, populate a dictionary with the given attributes.
def startDataStoreMachine(self, dataStoreItemName, machineName): url = self._url + "/items/enterpriseDatabases/%s/machines/%s/start" % (dataStoreItemName, machineName) params = { "f": "json" } return self._post(url=url, param_dict=params, securityHandler=self._securityHandler, proxy_url=self._proxy_url, proxy_port=self._proxy_port)
Starts the database instance running on the Data Store machine. Inputs: dataStoreItemName - name of the item to start machineName - name of the machine to start on
def _GetKeyFromRegistry(self): if not self._registry: return try: self._registry_key = self._registry.GetKeyByPath(self._key_path) except RuntimeError: pass if not self._registry_key: return for sub_registry_key in self._registry_key.GetSubkeys(): self.AddSubkey(sub_registry_key) if self._key_path == 'HKEY_LOCAL_MACHINE\\System': sub_registry_key = VirtualWinRegistryKey( 'CurrentControlSet', registry=self._registry) self.AddSubkey(sub_registry_key) self._registry = None
Determines the key from the Windows Registry.
def debug_ratelimit(g): assert isinstance(g, github.MainClass.Github), type(g) debug("github ratelimit: {rl}".format(rl=g.rate_limiting))
Log debug of github ratelimit information from last API call Parameters ---------- org: github.MainClass.Github github object
def validate_args(self, qubits: Sequence[Qid]) -> None: if len(qubits) == 0: raise ValueError( "Applied a gate to an empty set of qubits. Gate: {}".format( repr(self))) if len(qubits) != self.num_qubits(): raise ValueError( 'Wrong number of qubits for <{!r}>. ' 'Expected {} qubits but got <{!r}>.'.format( self, self.num_qubits(), qubits)) if any([not isinstance(qubit, Qid) for qubit in qubits]): raise ValueError( 'Gate was called with type different than Qid.')
Checks if this gate can be applied to the given qubits. By default checks if input is of type Qid and qubit count. Child classes can override. Args: qubits: The collection of qubits to potentially apply the gate to. Throws: ValueError: The gate can't be applied to the qubits.
def _get_transport(self): if self.ssh_proxy: if isinstance(self.ssh_proxy, paramiko.proxy.ProxyCommand): proxy_repr = repr(self.ssh_proxy.cmd[1]) else: proxy_repr = repr(self.ssh_proxy) self.logger.debug('Connecting via proxy: {0}'.format(proxy_repr)) _socket = self.ssh_proxy else: _socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) if isinstance(_socket, socket.socket): _socket.settimeout(SSH_TIMEOUT) _socket.connect((self.ssh_host, self.ssh_port)) transport = paramiko.Transport(_socket) transport.set_keepalive(self.set_keepalive) transport.use_compression(compress=self.compression) transport.daemon = self.daemon_transport return transport
Return the SSH transport to the remote gateway
def is_multilingual_project(site_id=None): from parler import appsettings if site_id is None: site_id = getattr(settings, 'SITE_ID', None) return appsettings.PARLER_SHOW_EXCLUDED_LANGUAGE_TABS or site_id in appsettings.PARLER_LANGUAGES
Whether the current Django project is configured for multilingual support.
def check_int_param(self, param, low, high, name): try: param = int(param) except: raise ValueError( 'Parameter {} is not int or similar'.format(name) ) if low != None or high != None: if not low <= param <= high: raise ValueError('Parameter {} is not in range <{}, {}>' .format(name, low, high)) return param
Check if the value of the given parameter is in the given range and an int. Designed for testing parameters like `mu` and `eps`. To pass this function the variable `param` must be able to be converted into a float with a value between `low` and `high`. **Args:** * `param` : parameter to check (int or similar) * `low` : lowest allowed value (int), or None * `high` : highest allowed value (int), or None * `name` : name of the parameter (string), it is used for an error message **Returns:** * `param` : checked parameter converted to float
def make_pilothole_cutter(self): pilothole_radius = self.pilothole_radius if pilothole_radius is None: (inner_radius, outer_radius) = self.get_radii() pilothole_radius = inner_radius + self.pilothole_ratio * (outer_radius - inner_radius) return cadquery.Workplane('XY') \ .circle(pilothole_radius) \ .extrude(self.length)
Make a solid to subtract from an interfacing solid to bore a pilot-hole.
def _loadcache(cachefile): cache = {} if os.path.exists(cachefile): with open(cachefile) as f: for line in f: line = line.split() if len(line) == 2: try: cache[int(line[0])] = float(line[1]) except: pass return cache
Returns a dictionary resulting from reading a likelihood cachefile
def after_submit(analysis): alsoProvides(analysis, ISubmitted) promote_to_dependencies(analysis, "submit") if IRequestAnalysis.providedBy(analysis): analysis._reflex_rule_process('submit') ws = analysis.getWorksheet() if ws: doActionFor(ws, 'submit') push_reindex_to_actions_pool(ws) if IRequestAnalysis.providedBy(analysis): doActionFor(analysis.getRequest(), 'submit') reindex_request(analysis)
Method triggered after a 'submit' transition for the analysis passed in is performed. Promotes the submit transition to the Worksheet to which the analysis belongs to. Note that for the worksheet there is already a guard that assures the transition to the worksheet will only be performed if all analyses within the worksheet have already been transitioned. This function is called automatically by bika.lims.workfow.AfterTransitionEventHandler
def _run_with_kvm(self, qemu_path, options): if sys.platform.startswith("linux") and self.manager.config.get_section_config("Qemu").getboolean("enable_kvm", True) \ and "-no-kvm" not in options: if os.path.basename(qemu_path) not in ["qemu-system-x86_64", "qemu-system-i386", "qemu-kvm"]: return False if not os.path.exists("/dev/kvm"): if self.manager.config.get_section_config("Qemu").getboolean("require_kvm", True): raise QemuError("KVM acceleration cannot be used (/dev/kvm doesn't exist). You can turn off KVM support in the gns3_server.conf by adding enable_kvm = false to the [Qemu] section.") else: return False return True return False
Check if we could run qemu with KVM :param qemu_path: Path to qemu :param options: String of qemu user options :returns: Boolean True if we need to enable KVM
def format_error(err_type, err_value, err_trace=None): if err_trace is None: err_parts = "".join(traceback.format_exception_only(err_type, err_value)).strip().split(": ", 1) if len(err_parts) == 1: err_name, err_msg = err_parts[0], "" else: err_name, err_msg = err_parts err_name = err_name.split(".")[-1] return err_name + ": " + err_msg else: return "".join(traceback.format_exception(err_type, err_value, err_trace)).strip()
Properly formats the specified error.
def CRPS(label, pred): for i in range(pred.shape[0]): for j in range(pred.shape[1] - 1): if pred[i, j] > pred[i, j + 1]: pred[i, j + 1] = pred[i, j] return np.sum(np.square(label - pred)) / label.size
Custom evaluation metric on CRPS.
def to_ip(self): if 'chargeability' in self.data.columns: tdip = reda.TDIP(data=self.data) else: raise Exception('Missing column "chargeability"') return tdip
Return of copy of the data inside a TDIP container
def set_inputhook(self, callback): ignore_CTRL_C() self._callback = callback self._callback_pyfunctype = self.PYFUNC(callback) pyos_inputhook_ptr = self.get_pyos_inputhook() original = self.get_pyos_inputhook_as_func() pyos_inputhook_ptr.value = \ ctypes.cast(self._callback_pyfunctype, ctypes.c_void_p).value self._installed = True return original
Set PyOS_InputHook to callback and return the previous one.
def _get_min_addr(self): if not self._regions: if self.project.arch.name != "Soot": l.error("self._regions is empty or not properly set.") return None return next(self._regions.irange())
Get the minimum address out of all regions. We assume self._regions is sorted. :return: The minimum address. :rtype: int
def xor_key(first, second, trafaret): trafaret = t.Trafaret._trafaret(trafaret) def check_(value): if (first in value) ^ (second in value): key = first if first in value else second yield first, t.catch_error(trafaret, value[key]), (key,) elif first in value and second in value: yield first, t.DataError(error='correct only if {} is not defined'.format(second)), (first,) yield second, t.DataError(error='correct only if {} is not defined'.format(first)), (second,) else: yield first, t.DataError(error='is required if {} is not defined'.format('second')), (first,) yield second, t.DataError(error='is required if {} is not defined'.format('first')), (second,) return check_
xor_key - takes `first` and `second` key names and `trafaret`. Checks if we have only `first` or only `second` in data, not both, and at least one. Then checks key value against trafaret.
def user_id(self): if not has_flask_login: return if not hasattr(current_app, 'login_manager'): return try: is_authenticated = current_user.is_authenticated except AttributeError: return if callable(is_authenticated): is_authenticated = is_authenticated() if not is_authenticated: return return current_user.get_id()
Return the ID of the current request's user
def predict_maxprob(self, x, **kwargs): return self.base_estimator_.predict(x.values, **kwargs)
Most likely value. Generally equivalent to predict.
def get_logger(name, level=None): log = logging.getLogger("jb.%s" % name) if level is not None: log.setLevel(level) return log
Return a setup logger for the given name :param name: The name for the logger. It is advised to use __name__. The logger name will be prepended by \"jb.\". :type name: str :param level: the logging level, e.g. logging.DEBUG, logging.INFO etc :type level: int :returns: Logger :rtype: logging.Logger :raises: None The logger default level is defined in the constants :data:`jukeboxcore.constants.DEFAULT_LOGGING_LEVEL` but can be overwritten by the environment variable \"JUKEBOX_LOG_LEVEL\"
def update_from_json(self, json_device): self.identifier = json_device['Id'] self.license_plate = json_device['EquipmentHeader']['SerialNumber'] self.make = json_device['EquipmentHeader']['Make'] self.model = json_device['EquipmentHeader']['Model'] self.equipment_id = json_device['EquipmentHeader']['EquipmentID'] self.active = json_device['EngineRunning'] self.odo = json_device['Odometer'] self.latitude = json_device['Location']['Latitude'] self.longitude = json_device['Location']['Longitude'] self.altitude = json_device['Location']['Altitude'] self.speed = json_device['Speed'] self.last_seen = json_device['Location']['DateTime']
Set all attributes based on API response.
def get_file_size(self, path): id = self._get_id_for_path(path) blob = self.repository._repo[id] return blob.raw_length()
Returns size of the file at given ``path``.
def clean_tmpdir(path): if os.path.exists(path) and \ os.path.isdir(path): rmtree(path)
Invoked atexit, this removes our tmpdir
def process_dimensions(kdims, vdims): dimensions = {} for group, dims in [('kdims', kdims), ('vdims', vdims)]: if dims is None: continue elif isinstance(dims, (tuple, basestring, Dimension, dict)): dims = [dims] elif not isinstance(dims, list): raise ValueError("%s argument expects a Dimension or list of dimensions, " "specified as tuples, strings, dictionaries or Dimension " "instances, not a %s type. Ensure you passed the data as the " "first argument." % (group, type(dims).__name__)) for dim in dims: if not isinstance(dim, (tuple, basestring, Dimension, dict)): raise ValueError('Dimensions must be defined as a tuple, ' 'string, dictionary or Dimension instance, ' 'found a %s type.' % type(dim).__name__) dimensions[group] = [asdim(d) for d in dims] return dimensions
Converts kdims and vdims to Dimension objects. Args: kdims: List or single key dimension(s) specified as strings, tuples dicts or Dimension objects. vdims: List or single value dimension(s) specified as strings, tuples dicts or Dimension objects. Returns: Dictionary containing kdims and vdims converted to Dimension objects: {'kdims': [Dimension('x')], 'vdims': [Dimension('y')]
def example_panel(self, ax, feature): txt = '%s:%s-%s' % (feature.chrom, feature.start, feature.stop) ax.text(0.5, 0.5, txt, transform=ax.transAxes) return feature
A example panel that just prints the text of the feature.
def propagate(self, *arg, **kw): output = Network.propagate(self, *arg, **kw) if self.interactive: self.updateGraphics() if type(output) == dict: for layerName in output: output[layerName] = [float(x) for x in output[layerName]] return output else: return [float(x) for x in output]
Propagates activation through the network.
def keys_list(gandi, fqdn): keys = gandi.dns.keys(fqdn) output_keys = ['uuid', 'algorithm', 'algorithm_name', 'ds', 'flags', 'status'] for num, key in enumerate(keys): if num: gandi.separator_line() output_generic(gandi, key, output_keys, justify=15) return keys
List domain keys.
def remove_and_record_multiple_spaces_in_line(line): removed_spaces = {} multispace_matches = re_group_captured_multiple_space.finditer(line) for multispace in multispace_matches: removed_spaces[multispace.start()] = \ (multispace.end() - multispace.start() - 1) line = re_group_captured_multiple_space.sub(u' ', line) return (removed_spaces, line)
For a given string, locate all ocurrences of multiple spaces together in the line, record the number of spaces found at each position, and replace them with a single space. @param line: (string) the text line to be processed for multiple spaces. @return: (tuple) countaining a dictionary and a string. The dictionary contains information about the number of spaces removed at given positions in the line. For example, if 3 spaces were removed from the line at index '22', the dictionary would be set as follows: { 22 : 3 } The string that is also returned in this tuple is the line after multiple-space ocurrences have replaced with single spaces.
def create_lzma(archive, compression, cmd, verbosity, interactive, filenames): return _create(archive, compression, cmd, 'alone', verbosity, filenames)
Create an LZMA archive with the lzma Python module.
def spin(self): for x in self.spinchars: self.string = self.msg + "...\t" + x + "\r" self.out.write(self.string.encode('utf-8')) self.out.flush() time.sleep(self.waittime)
Perform a single spin
def make_for_loop(loop_body_instrs, else_body_instrs, context): iterator_expr = make_expr( popwhile(not_a(instrs.GET_ITER), loop_body_instrs, side='left') ) loop_body_instrs.popleft() top_of_loop = loop_body_instrs.popleft() target = make_assign_target( loop_body_instrs.popleft(), loop_body_instrs, stack=[], ) body, orelse_body = make_loop_body_and_orelse( top_of_loop, loop_body_instrs, else_body_instrs, context ) return ast.For( target=target, iter=iterator_expr, body=body, orelse=orelse_body, )
Make an ast.For node.
def call(self, obj, name, method, args, kwargs): if name in self._callback_registry: beforebacks, afterbacks = zip(*self._callback_registry.get(name, [])) hold = [] for b in beforebacks: if b is not None: call = Data(name=name, kwargs=kwargs.copy(), args=args) v = b(obj, call) else: v = None hold.append(v) out = method(*args, **kwargs) for a, bval in zip(afterbacks, hold): if a is not None: a(obj, Data(before=bval, name=name, value=out)) elif callable(bval): bval(out) return out else: return method(*args, **kwargs)
Trigger a method along with its beforebacks and afterbacks. Parameters ---------- name: str The name of the method that will be called args: tuple The arguments that will be passed to the base method kwargs: dict The keyword args that will be passed to the base method
def authorization_header(oauth_params): authorization_headers = 'OAuth realm="",' authorization_headers += ','.join(['{0}="{1}"'.format(k, urllib.quote(str(v))) for k, v in oauth_params.items()]) return authorization_headers
Return Authorization header
def from_two_dim_array(cls, cols, rows, twoDimArray): return Matrix(cols, rows, twoDimArray, rowBased=False, isOneDimArray=False)
Create a new Matrix instance from a two dimensional array. :param integer columns: The number of columns for the Matrix. :param integer rows: The number of rows for the Matrix. :param list twoDimArray: A two dimensional column based array with the values of the matrix. :raise: Raises an :py:exc:`ValueError` if: - columns < 1 or - rows < 1 - the size of the parameter does not match with the size of the Matrix.
def pexpireat(self, key, timestamp): if not isinstance(timestamp, int): raise TypeError("timestamp argument must be int, not {!r}" .format(timestamp)) fut = self.execute(b'PEXPIREAT', key, timestamp) return wait_convert(fut, bool)
Set expire timestamp on key, timestamp in milliseconds. :raises TypeError: if timeout is not int
def debug(self, value): self._debug = value if self._debug: logging.getLogger().setLevel(logging.DEBUG)
Turn on debug logging if necessary. :param value: Value of debug flag