code stringlengths 51 2.38k | docstring stringlengths 4 15.2k |
|---|---|
def time_at_elevation(self, elevation, direction=SUN_RISING, date=None, local=True):
if local and self.timezone is None:
raise ValueError("Local time requested but Location has no timezone set.")
if self.astral is None:
self.astral = Astral()
if date is None:
... | Calculate the time when the sun is at the specified elevation.
Note:
This method uses positive elevations for those above the horizon.
Elevations greater than 90 degrees are converted to a setting sun
i.e. an elevation of 110 will calculate a setting sun at 70 degrees.
... |
def read_string_from_file(path, encoding="utf8"):
with codecs.open(path, "rb", encoding=encoding) as f:
value = f.read()
return value | Read entire contents of file into a string. |
def storeFASTA(fastaFNH):
fasta = file_handle(fastaFNH).read()
return [FASTARecord(rec[0].split()[0], rec[0].split(None, 1)[1], "".join(rec[1:]))
for rec in (x.strip().split("\n") for x in fasta.split(">")[1:])] | Parse the records in a FASTA-format file by first reading the entire file into memory.
:type source: path to FAST file or open file handle
:param source: The data source from which to parse the FASTA records. Expects the
input to resolve to a collection that can be iterated through, such as
... |
def get_description(self, lang=None):
return self.metadata.get_single(key=RDF_NAMESPACES.CTS.description, lang=lang) | Get the DC description of the object
:param lang: Lang to retrieve
:return: Description string representation
:rtype: Literal |
def indexOfClosest( arr, val ):
i_closest = None
for i,v in enumerate(arr):
d = math.fabs( v - val )
if i_closest == None or d < d_closest:
i_closest = i
d_closest = d
return i_closest | Return the index in arr of the closest float value to val. |
def _infer_binop(self, context):
left = self.left
right = self.right
context = context or contextmod.InferenceContext()
lhs_context = contextmod.copy_context(context)
rhs_context = contextmod.copy_context(context)
lhs_iter = left.infer(context=lhs_context)
rhs_iter = right.infer(context=rhs_... | Binary operation inference logic. |
def _set_upload_status(self, file_data_object, upload_status):
uuid = file_data_object['uuid']
return self.connection.update_data_object(
uuid,
{'uuid': uuid, 'value': { 'upload_status': upload_status}}
) | Set file_data_object.file_resource.upload_status |
def __execute_rot(self, surface):
self.image = pygame.transform.rotate(surface, self.__rotation)
self.__resize_surface_extents() | Executes the rotating operation |
def safe_better_repr(
self, obj, context=None, html=True, level=0, full=False
):
context = context and dict(context) or {}
recursion = id(obj) in context
if not recursion:
context[id(obj)] = obj
try:
rv = self.better_repr(obj, context, html... | Repr with inspect links on objects |
def wrap_all(self, rows: Iterable[Union[Mapping[str, Any], Sequence[Any]]]):
return (self.wrap(r) for r in rows) | Return row tuple for each row in rows. |
def print_languages_and_exit(lst, status=1, header=True):
if header:
print("Available languages:")
for lg in lst:
print("- %s" % lg)
sys.exit(status) | print a list of languages and exit |
def _terminate_process_iou(self):
if self._iou_process:
log.info('Stopping IOU process for IOU VM "{}" PID={}'.format(self.name, self._iou_process.pid))
try:
self._iou_process.terminate()
except ProcessLookupError:
pass
self._started = ... | Terminate the IOU process if running |
def peek(self, iroute: "InstanceRoute") -> Optional[Value]:
val = self.value
sn = self.schema_node
for sel in iroute:
val, sn = sel.peek_step(val, sn)
if val is None:
return None
return val | Return a value within the receiver's subtree.
Args:
iroute: Instance route (relative to the receiver). |
def delete_sandbox(self, si, logger, vcenter_data_model, delete_sandbox_actions, cancellation_context):
results = []
logger.info('Deleting saved sandbox command starting on ' + vcenter_data_model.default_datacenter)
if not delete_sandbox_actions:
raise Exception('Failed to delete sav... | Deletes a saved sandbox's artifacts
:param vcenter_data_model: VMwarevCenterResourceModel
:param vim.ServiceInstance si: py_vmomi service instance
:type si: vim.ServiceInstance
:param logger: Logger
:type logger: cloudshell.core.logger.qs_logger.get_qs_logger
:param list... |
def get_execution_host_info():
host = os.environ.get('HOSTNAME', None)
cluster = os.environ.get('SGE_O_HOST', None)
if host is None:
try:
import socket
host = host or socket.gethostname()
except:
pass
return host or 'unknown', cluster or 'unknown' | Tries to return a tuple describing the execution host.
Doesn't work for all queueing systems
Returns:
(HOSTNAME, CLUSTER_NAME) |
def setValue(self, value):
if isinstance(value, Text):
self.value = value
else:
self.value = Text(value)
return self | Set the attributes value
@param value: The new value (may be None)
@type value: basestring
@return: self
@rtype: L{Attribute} |
def get_column(self, chrom, position,
missing_seqs=MissingSequenceHandler.TREAT_AS_ALL_GAPS,
species=None):
blocks = self.get_blocks(chrom, position, position + 1)
if len(blocks) == 0:
raise NoSuchAlignmentColumnError("Request for column on chrom " +
... | Get the alignment column at the specified chromosome and position. |
def _disable_access_key(self, force_disable_self=False):
client = self.client
if self.validate is True:
return
else:
try:
client.update_access_key(
UserName=self._search_user_for_key(),
AccessKeyId=self.access_key_id... | This function first checks to see if the key is already disabled\
if not then it goes to disabling |
def update():
with settings(warn_only=True):
print(cyan('\nInstalling/Updating required packages...'))
pip = local('venv/bin/pip install -U --allow-all-external --src libs -r requirements.txt', capture=True)
if pip.failed:
print(red(pip))
abort("pip exited with return... | Update virtual env with requirements packages. |
def subscribe(self, feedUrl):
response = self.httpPost(
ReaderUrl.SUBSCRIPTION_EDIT_URL,
{'ac':'subscribe', 's': feedUrl})
if response and 'OK' in response:
return True
else:
return False | Adds a feed to the top-level subscription list
Ubscribing seems idempotent, you can subscribe multiple times
without error
returns True or throws HTTPError |
def expand_to_chunk_size(self, chunk_size, offset=Vec(0,0,0, dtype=int)):
chunk_size = np.array(chunk_size, dtype=np.float32)
result = self.clone()
result = result - offset
result.minpt = np.floor(result.minpt / chunk_size) * chunk_size
result.maxpt = np.ceil(result.maxpt / chunk_size) * chunk_size ... | Align a potentially non-axis aligned bbox to the grid by growing it
to the nearest grid lines.
Required:
chunk_size: arraylike (x,y,z), the size of chunks in the
dataset e.g. (64,64,64)
Optional:
offset: arraylike (x,y,z), the starting coordinate of the dataset |
def on_successful_login(self, subject, authc_token, account_id):
self.forget_identity(subject)
if authc_token.is_remember_me:
self.remember_identity(subject, authc_token, account_id)
else:
msg = ("AuthenticationToken did not indicate that RememberMe is "
... | Reacts to the successful login attempt by first always
forgetting any previously stored identity. Then if the authc_token
is a ``RememberMe`` type of token, the associated identity
will be remembered for later retrieval during a new user session.
:param subject: the subject whose ident... |
def comments_are_open(content_object):
moderator = get_model_moderator(content_object.__class__)
if moderator is None:
return True
return CommentModerator.allow(moderator, None, content_object, None) | Return whether comments are still open for a given target object. |
def extra_metadata(self):
return get_extra_metadata(
self.gh.api,
self.repository['owner']['login'],
self.repository['name'],
self.release['tag_name'],
) | Get extra metadata for file in repository. |
def _add_device_to_device_group(self, device):
device_name = get_device_info(device).name
dg = pollster(self._get_device_group)(device)
dg.devices_s.devices.create(name=device_name, partition=self.partition)
pollster(self._check_device_exists_in_device_group)(device_name) | Add device to device service cluster group.
:param device: bigip object -- device to add to group |
def catalogFactory(name, **kwargs):
fn = lambda member: inspect.isclass(member) and member.__module__==__name__
catalogs = odict(inspect.getmembers(sys.modules[__name__], fn))
if name not in list(catalogs.keys()):
msg = "%s not found in catalogs:\n %s"%(name,list(kernels.keys()))
logger.erro... | Factory for various catalogs. |
def read_remote_spec(filename, encoding='binary', cache=True,
show_progress=True, **kwargs):
with get_readable_fileobj(filename, encoding=encoding, cache=cache,
show_progress=show_progress) as fd:
header, wavelengths, fluxes = read_spec(fd, fname=filename, ... | Read FITS or ASCII spectrum from a remote location.
Parameters
----------
filename : str
Spectrum filename.
encoding, cache, show_progress
See :func:`~astropy.utils.data.get_readable_fileobj`.
kwargs : dict
Keywords acceptable by :func:`read_fits_spec` (if FITS) or
... |
def generate_sample_set(self, tags=None):
if isinstance(tags, str):
tags = [tags]
md5_list = self.data_store.tag_match(tags)
return self.store_sample_set(md5_list) | Generate a sample_set that maches the tags or all if tags are not specified.
Args:
tags: Match samples against this tag list (or all if not specified)
Returns:
The sample_set of those samples matching the tags |
def run(self):
which_command = self._args.which
if which_command == 'compound':
self._search_compound()
elif which_command == 'reaction':
self._search_reaction() | Run search command. |
def _on_grant(self, grant):
self.set_timeout(grant.expiration_time, ContractState.expired,
self._run_and_terminate, self.contractor.cancelled,
grant)
self.grant = grant
self.set_remote_id(grant.sender_id)
self.update_manager_address(grant... | Called upon receiving the grant. Than calls granted and sets
up reporter if necessary. |
def get(self):
new_alarm = self.entity.get_alarm(self)
if new_alarm:
self._add_details(new_alarm._info) | Fetches the current state of the alarm from the API and updates the
object. |
def to_sql(self, connection, grammar):
self._add_implied_commands()
statements = []
for command in self._commands:
method = "compile_%s" % command.name
if hasattr(grammar, method):
sql = getattr(grammar, method)(self, command, connection)
i... | Get the raw SQL statements for the blueprint.
:param connection: The connection to use
:type connection: orator.connections.Connection
:param grammar: The grammar to user
:type grammar: orator.schema.grammars.SchemaGrammar
:rtype: list |
def detachChildren(self):
detached = self.children
self.children = []
for child in detached:
child.parent = None
return detached | Detach and return this element's children.
@return: The element's children (detached).
@rtype: [L{Element},...] |
def signed_gt(a, b):
a, b = match_bitwidth(as_wires(a), as_wires(b), signed=True)
r = b - a
return r[-1] ^ (~a[-1]) ^ (~b[-1]) | Return a single bit result of signed greater than comparison. |
def getVerifiersIDs(self):
verifiers_ids = list()
for brain in self.getAnalyses():
verifiers_ids += brain.getVerificators
return list(set(verifiers_ids)) | Returns the ids from users that have verified at least one analysis
from this Analysis Request |
def get_random_word(dictionary, min_word_length=3, max_word_length=8):
while True:
word = choice(dictionary)
if len(word) >= min_word_length and len(word) <= max_word_length:
break
return word | Returns a random word from the dictionary |
def exec_(scope, data):
conn = scope.get('__connection__')
response = []
for line in data:
conn.send(line)
conn.expect_prompt()
response += conn.response.split('\n')[1:]
scope.define(__response__=response)
return True | Sends the given data to the remote host and waits until the host
has responded with a prompt.
If the given data is a list of strings, each item is sent, and
after each item a prompt is expected.
This function also causes the response of the command to be stored
in the built-in __response__ variable... |
def _put (self, url_data):
if self.shutdown or self.max_allowed_urls == 0:
return
log.debug(LOG_CACHE, "queueing %s", url_data.url)
key = url_data.cache_url
cache = url_data.aggregate.result_cache
if url_data.has_result or cache.has_result(key):
self.queue... | Put URL in queue, increase number of unfished tasks. |
def get_availabilities(date):
day_of_week = dateutil.parser.parse(date).weekday()
availabilities = []
available_probability = 0.3
if day_of_week == 0:
start_hour = 10
while start_hour <= 16:
if random.random() < available_probability:
appointment_type = get_ra... | Helper function which in a full implementation would feed into a backend API to provide query schedule availability.
The output of this function is an array of 30 minute periods of availability, expressed in ISO-8601 time format.
In order to enable quick demonstration of all possible conversation paths suppor... |
def delete_object(cache, template, indexes):
with cache as redis_connection:
pipe = redis_connection.pipeline()
for key in set(template.keys()):
pipe.delete(template[key] % indexes)
pipe.execute() | Delete an object in Redis using a pipeline.
Deletes all fields defined by the template.
Arguments:
template: a dictionary containg the keys for the object and
template strings for the corresponding redis keys. The template
string uses named string interpolation format. Example:... |
def drop_privileges():
uid = int(pwd.getpwnam(settings.DROPLET_USER).pw_uid)
os.setuid(uid) | Set settings.DROPLET_USER UID for the current process
After calling this, root operation will be impossible to execute
See root context manager |
def get_generic_type(type_tag):
return {
DEVICE_ALARM: TYPE_ALARM,
DEVICE_GLASS_BREAK: TYPE_CONNECTIVITY,
DEVICE_KEYPAD: TYPE_CONNECTIVITY,
DEVICE_REMOTE_CONTROLLER: TYPE_CONNECTIVITY,
DEVICE_SIREN: TYPE_CONNECTIVITY,
DEVICE_STATUS_DISPLAY: TYPE_CONNECTIVITY,
... | Map type tag to generic type. |
def add_frame_widget(self, ref, left=1, top=1, right=20, bottom=1, width=20, height=4, direction="h", speed=1):
if ref not in self.widgets:
widget = widgets.FrameWidget(
screen=self, ref=ref, left=left, top=top, right=right, bottom=bottom, width=width, height=height,
... | Add Frame Widget |
def get_reservation_resources(session, reservation_id, *models):
models_resources = []
reservation = session.GetReservationDetails(reservation_id).ReservationDescription
for resource in reservation.Resources:
if resource.ResourceModelName in models:
models_resources.append(resource)
... | Get all resources of given models in reservation.
:param session: CloudShell session
:type session: cloudshell.api.cloudshell_api.CloudShellAPISession
:param reservation_id: active reservation ID
:param models: list of requested models
:return: list of all resources of models in reservation |
def drop(self):
import os
if self.path:
if os.path.exists(self.path):
os.remove(self.path)
else:
self._data = {} | Remove the database by deleting the JSON file. |
def line_count(fn):
with open(fn) as f:
for i, l in enumerate(f):
pass
return i + 1 | Get line count of file
Args:
fn (str): Path to file
Return:
Number of lines in file (int) |
def getFullParList(configObj):
plist = []
for par in configObj.keys():
if isinstance(configObj[par],configobj.Section):
plist.extend(getFullParList(configObj[par]))
else:
plist.append(par)
return plist | Return a single list of all parameter names included in the configObj
regardless of which section the parameter was stored |
def replace(self, scaling_group, name, cooldown, min_entities,
max_entities, metadata=None):
body = self._create_group_config_body(name, cooldown, min_entities,
max_entities, metadata=metadata)
group_id = utils.get_id(scaling_group)
uri = "/%s/%s/config" % (self.uri_b... | Replace an existing ScalingGroup configuration. All of the attributes
must be specified If you wish to delete any of the optional attributes,
pass them in as None. |
def get_additional_handlers():
global _additional_handlers
if not isinstance(_additional_handlers, list):
handlers = []
for name in config.ADDITIONAL_HANDLERS:
module_name, function_name = name.rsplit('.', 1)
function = getattr(import_module(module_name), function_name)
... | Returns the actual functions from the dotted paths specified in ADDITIONAL_HANDLERS. |
def clean_username(self, username):
username_case = settings.CAS_FORCE_CHANGE_USERNAME_CASE
if username_case == 'lower':
username = username.lower()
elif username_case == 'upper':
username = username.upper()
elif username_case is not None:
raise Improp... | Performs any cleaning on the "username" prior to using it to get or
create the user object. Returns the cleaned username.
By default, changes the username case according to
`settings.CAS_FORCE_CHANGE_USERNAME_CASE`. |
async def stop(wallet_name: str) -> None:
LOGGER.debug('RevRegBuilder.stop >>>')
dir_sentinel = join(RevRegBuilder.dir_tails_sentinel(wallet_name))
if isdir(dir_sentinel):
open(join(dir_sentinel, '.stop'), 'w').close()
while any(isfile(join(dir_sentinel, d, '.in-progress'... | Gracefully stop an external revocation registry builder, waiting for its current.
The indy-sdk toolkit uses a temporary directory for tails file mustration,
and shutting down the toolkit removes the directory, crashing the external
tails file write. This method allows a graceful stop to wait fo... |
def update(self, custom_field, params={}, **options):
path = "/custom_fields/%s" % (custom_field)
return self.client.put(path, params, **options) | A specific, existing custom field can be updated by making a PUT request on the URL for that custom field. Only the fields provided in the `data` block will be updated; any unspecified fields will remain unchanged
When using this method, it is best to specify only those fields you wish to change, or el... |
def pseudoify(self):
assert self.is_toplevel
assert self.is_multi
assert len(self.multi_rep.siblings) > 0
rep = self.multi_rep
start = min([s.start for s in rep.siblings + [rep]])
end = max([s.end for s in rep.siblings + [rep]])
parent = Feature(None)
pare... | Derive a pseudo-feature parent from the given multi-feature.
The provided multi-feature does not need to be the representative. The
newly created pseudo-feature has the same seqid as the provided multi-
feature, and spans its entire range. Otherwise, the pseudo-feature is
empty. It is u... |
def resid_dev(self, endog, mu, scale=1.):
r
endog_mu = self._clean(endog / mu)
return np.sign(endog - mu) * np.sqrt(-2 * (-(endog - mu)/mu +
np.log(endog_mu))) | r"""
Gamma deviance residuals
Parameters
-----------
endog : array-like
Endogenous response variable
mu : array-like
Fitted mean response variable
scale : float, optional
An optional argument to divide the residuals by scale. The defau... |
def register_handler(self, handler):
self._handlers[handler.namespace] = handler
handler.registered(self) | Register a new namespace handler. |
def from_elements(cls, elts=None):
node = cls()
if elts is None:
node.elts = []
else:
node.elts = [const_factory(e) if _is_const(e) else e for e in elts]
return node | Create a node of this type from the given list of elements.
:param elts: The list of elements that the node should contain.
:type elts: list(NodeNG)
:returns: A new node containing the given elements.
:rtype: NodeNG |
def process_post_tags(self, bulk_mode, api_post, post_tags):
post_tags[api_post["ID"]] = []
for api_tag in six.itervalues(api_post["tags"]):
tag = self.process_post_tag(bulk_mode, api_tag)
if tag:
post_tags[api_post["ID"]].append(tag) | Create or update Tags related to a post.
:param bulk_mode: If True, minimize db operations by bulk creating post objects
:param api_post: the API data for the post
:param post_tags: a mapping of Tags keyed by post ID
:return: None |
def get_pyquery(self, tree=None, page_numbers=None):
if not page_numbers:
page_numbers = []
if tree is None:
if not page_numbers and self.tree is not None:
tree = self.tree
else:
tree = self.get_tree(page_numbers)
if has... | Wrap given tree in pyquery and return.
If no tree supplied, will generate one from given page_numbers, or
all page numbers. |
def close_position(self, repay_only):
params = {'repay_only': repay_only}
return self._send_message('post', '/position/close',
data=json.dumps(params)) | Close position.
Args:
repay_only (bool): Undocumented by cbpro.
Returns:
Undocumented |
def get_slope(self):
return ((self.p1.y-self.p2.y) / (self.p1.x-self.p2.x)) | Return the slope m of this line segment. |
def tolerant_metaphone_processor(words):
for word in words:
r = 0
for w in double_metaphone(word):
if w:
w = w.strip()
if w:
r += 1
yield w
if not r:
yield word | Double metaphone word processor slightly modified so that when no
words are returned by the algorithm, the original word is returned. |
def RollbackAll(close=None):
if close:
warnings.simplefilter('default')
warnings.warn("close parameter will not need at all.", DeprecationWarning)
for k, v in engine_manager.items():
session = v.session(create=False)
if session:
session.rollback() | Rollback all transactions, according Local.conn |
def initialise(self):
self._checkWriteMode()
self._createSystemTable()
self._createNetworkTables()
self._createOntologyTable()
self._createReferenceSetTable()
self._createReferenceTable()
self._createDatasetTable()
self._createReadGroupSetTable()
s... | Initialise this data repository, creating any necessary directories
and file paths. |
def parse_attributes(self, attrstring):
if attrstring in [None, '', '.']:
return dict()
attributes = dict()
keyvaluepairs = attrstring.split(';')
for kvp in keyvaluepairs:
if kvp == '':
continue
key, value = kvp.split('=')
i... | Parse an attribute string.
Given a string with semicolon-separated key-value pairs, populate a
dictionary with the given attributes. |
def startDataStoreMachine(self, dataStoreItemName, machineName):
url = self._url + "/items/enterpriseDatabases/%s/machines/%s/start" % (dataStoreItemName, machineName)
params = {
"f": "json"
}
return self._post(url=url, param_dict=params,
security... | Starts the database instance running on the Data Store machine.
Inputs:
dataStoreItemName - name of the item to start
machineName - name of the machine to start on |
def _GetKeyFromRegistry(self):
if not self._registry:
return
try:
self._registry_key = self._registry.GetKeyByPath(self._key_path)
except RuntimeError:
pass
if not self._registry_key:
return
for sub_registry_key in self._registry_key.GetSubkeys():
self.AddSubkey(sub_reg... | Determines the key from the Windows Registry. |
def debug_ratelimit(g):
assert isinstance(g, github.MainClass.Github), type(g)
debug("github ratelimit: {rl}".format(rl=g.rate_limiting)) | Log debug of github ratelimit information from last API call
Parameters
----------
org: github.MainClass.Github
github object |
def validate_args(self, qubits: Sequence[Qid]) -> None:
if len(qubits) == 0:
raise ValueError(
"Applied a gate to an empty set of qubits. Gate: {}".format(
repr(self)))
if len(qubits) != self.num_qubits():
raise ValueError(
'Wro... | Checks if this gate can be applied to the given qubits.
By default checks if input is of type Qid and qubit count.
Child classes can override.
Args:
qubits: The collection of qubits to potentially apply the gate to.
Throws:
ValueError: The gate can't be applied... |
def _get_transport(self):
if self.ssh_proxy:
if isinstance(self.ssh_proxy, paramiko.proxy.ProxyCommand):
proxy_repr = repr(self.ssh_proxy.cmd[1])
else:
proxy_repr = repr(self.ssh_proxy)
self.logger.debug('Connecting via proxy: {0}'.format(proxy... | Return the SSH transport to the remote gateway |
def is_multilingual_project(site_id=None):
from parler import appsettings
if site_id is None:
site_id = getattr(settings, 'SITE_ID', None)
return appsettings.PARLER_SHOW_EXCLUDED_LANGUAGE_TABS or site_id in appsettings.PARLER_LANGUAGES | Whether the current Django project is configured for multilingual support. |
def check_int_param(self, param, low, high, name):
try:
param = int(param)
except:
raise ValueError(
'Parameter {} is not int or similar'.format(name)
)
if low != None or high != None:
if not low <= param <= high:
... | Check if the value of the given parameter is in the given range
and an int.
Designed for testing parameters like `mu` and `eps`.
To pass this function the variable `param` must be able to be converted
into a float with a value between `low` and `high`.
**Args:**
* `para... |
def make_pilothole_cutter(self):
pilothole_radius = self.pilothole_radius
if pilothole_radius is None:
(inner_radius, outer_radius) = self.get_radii()
pilothole_radius = inner_radius + self.pilothole_ratio * (outer_radius - inner_radius)
return cadquery.Workplane('XY') \
... | Make a solid to subtract from an interfacing solid to bore a pilot-hole. |
def _loadcache(cachefile):
cache = {}
if os.path.exists(cachefile):
with open(cachefile) as f:
for line in f:
line = line.split()
if len(line) == 2:
try:
cache[int(line[0])] = float(line[1])
excep... | Returns a dictionary resulting from reading a likelihood cachefile |
def after_submit(analysis):
alsoProvides(analysis, ISubmitted)
promote_to_dependencies(analysis, "submit")
if IRequestAnalysis.providedBy(analysis):
analysis._reflex_rule_process('submit')
ws = analysis.getWorksheet()
if ws:
doActionFor(ws, 'submit')
push_reindex_to_actions_p... | Method triggered after a 'submit' transition for the analysis passed in
is performed. Promotes the submit transition to the Worksheet to which the
analysis belongs to. Note that for the worksheet there is already a guard
that assures the transition to the worksheet will only be performed if all
analyses... |
def _run_with_kvm(self, qemu_path, options):
if sys.platform.startswith("linux") and self.manager.config.get_section_config("Qemu").getboolean("enable_kvm", True) \
and "-no-kvm" not in options:
if os.path.basename(qemu_path) not in ["qemu-system-x86_64", "qemu-system-i386", "qemu-kv... | Check if we could run qemu with KVM
:param qemu_path: Path to qemu
:param options: String of qemu user options
:returns: Boolean True if we need to enable KVM |
def format_error(err_type, err_value, err_trace=None):
if err_trace is None:
err_parts = "".join(traceback.format_exception_only(err_type, err_value)).strip().split(": ", 1)
if len(err_parts) == 1:
err_name, err_msg = err_parts[0], ""
else:
err_name, err_msg = err_par... | Properly formats the specified error. |
def CRPS(label, pred):
for i in range(pred.shape[0]):
for j in range(pred.shape[1] - 1):
if pred[i, j] > pred[i, j + 1]:
pred[i, j + 1] = pred[i, j]
return np.sum(np.square(label - pred)) / label.size | Custom evaluation metric on CRPS. |
def to_ip(self):
if 'chargeability' in self.data.columns:
tdip = reda.TDIP(data=self.data)
else:
raise Exception('Missing column "chargeability"')
return tdip | Return of copy of the data inside a TDIP container |
def set_inputhook(self, callback):
ignore_CTRL_C()
self._callback = callback
self._callback_pyfunctype = self.PYFUNC(callback)
pyos_inputhook_ptr = self.get_pyos_inputhook()
original = self.get_pyos_inputhook_as_func()
pyos_inputhook_ptr.value = \
ctypes.cast(... | Set PyOS_InputHook to callback and return the previous one. |
def _get_min_addr(self):
if not self._regions:
if self.project.arch.name != "Soot":
l.error("self._regions is empty or not properly set.")
return None
return next(self._regions.irange()) | Get the minimum address out of all regions. We assume self._regions is sorted.
:return: The minimum address.
:rtype: int |
def xor_key(first, second, trafaret):
trafaret = t.Trafaret._trafaret(trafaret)
def check_(value):
if (first in value) ^ (second in value):
key = first if first in value else second
yield first, t.catch_error(trafaret, value[key]), (key,)
elif first in value and second in... | xor_key - takes `first` and `second` key names and `trafaret`.
Checks if we have only `first` or only `second` in data, not both,
and at least one.
Then checks key value against trafaret. |
def user_id(self):
if not has_flask_login:
return
if not hasattr(current_app, 'login_manager'):
return
try:
is_authenticated = current_user.is_authenticated
except AttributeError:
return
if callable(is_authenticated):
is... | Return the ID of the current request's user |
def predict_maxprob(self, x, **kwargs):
return self.base_estimator_.predict(x.values, **kwargs) | Most likely value. Generally equivalent to predict. |
def get_logger(name, level=None):
log = logging.getLogger("jb.%s" % name)
if level is not None:
log.setLevel(level)
return log | Return a setup logger for the given name
:param name: The name for the logger. It is advised to use __name__. The logger name will be prepended by \"jb.\".
:type name: str
:param level: the logging level, e.g. logging.DEBUG, logging.INFO etc
:type level: int
:returns: Logger
:rtype: logging.Log... |
def update_from_json(self, json_device):
self.identifier = json_device['Id']
self.license_plate = json_device['EquipmentHeader']['SerialNumber']
self.make = json_device['EquipmentHeader']['Make']
self.model = json_device['EquipmentHeader']['Model']
self.equipment_id = json_device... | Set all attributes based on API response. |
def get_file_size(self, path):
id = self._get_id_for_path(path)
blob = self.repository._repo[id]
return blob.raw_length() | Returns size of the file at given ``path``. |
def clean_tmpdir(path):
if os.path.exists(path) and \
os.path.isdir(path):
rmtree(path) | Invoked atexit, this removes our tmpdir |
def process_dimensions(kdims, vdims):
dimensions = {}
for group, dims in [('kdims', kdims), ('vdims', vdims)]:
if dims is None:
continue
elif isinstance(dims, (tuple, basestring, Dimension, dict)):
dims = [dims]
elif not isinstance(dims, list):
raise V... | Converts kdims and vdims to Dimension objects.
Args:
kdims: List or single key dimension(s) specified as strings,
tuples dicts or Dimension objects.
vdims: List or single value dimension(s) specified as strings,
tuples dicts or Dimension objects.
Returns:
Dictio... |
def example_panel(self, ax, feature):
txt = '%s:%s-%s' % (feature.chrom, feature.start, feature.stop)
ax.text(0.5, 0.5, txt, transform=ax.transAxes)
return feature | A example panel that just prints the text of the feature. |
def propagate(self, *arg, **kw):
output = Network.propagate(self, *arg, **kw)
if self.interactive:
self.updateGraphics()
if type(output) == dict:
for layerName in output:
output[layerName] = [float(x) for x in output[layerName]]
return output
... | Propagates activation through the network. |
def keys_list(gandi, fqdn):
keys = gandi.dns.keys(fqdn)
output_keys = ['uuid', 'algorithm', 'algorithm_name', 'ds', 'flags',
'status']
for num, key in enumerate(keys):
if num:
gandi.separator_line()
output_generic(gandi, key, output_keys, justify=15)
return... | List domain keys. |
def remove_and_record_multiple_spaces_in_line(line):
removed_spaces = {}
multispace_matches = re_group_captured_multiple_space.finditer(line)
for multispace in multispace_matches:
removed_spaces[multispace.start()] = \
(multispace.end() - multispace.start() - 1)
line = re_group_captu... | For a given string, locate all ocurrences of multiple spaces
together in the line, record the number of spaces found at each
position, and replace them with a single space.
@param line: (string) the text line to be processed for multiple
spaces.
@return: (tuple) countaining a diction... |
def create_lzma(archive, compression, cmd, verbosity, interactive, filenames):
return _create(archive, compression, cmd, 'alone', verbosity, filenames) | Create an LZMA archive with the lzma Python module. |
def spin(self):
for x in self.spinchars:
self.string = self.msg + "...\t" + x + "\r"
self.out.write(self.string.encode('utf-8'))
self.out.flush()
time.sleep(self.waittime) | Perform a single spin |
def make_for_loop(loop_body_instrs, else_body_instrs, context):
iterator_expr = make_expr(
popwhile(not_a(instrs.GET_ITER), loop_body_instrs, side='left')
)
loop_body_instrs.popleft()
top_of_loop = loop_body_instrs.popleft()
target = make_assign_target(
loop_body_instrs.popleft(),
... | Make an ast.For node. |
def call(self, obj, name, method, args, kwargs):
if name in self._callback_registry:
beforebacks, afterbacks = zip(*self._callback_registry.get(name, []))
hold = []
for b in beforebacks:
if b is not None:
call = Data(name=name, kwargs=kwarg... | Trigger a method along with its beforebacks and afterbacks.
Parameters
----------
name: str
The name of the method that will be called
args: tuple
The arguments that will be passed to the base method
kwargs: dict
The keyword args that will be ... |
def authorization_header(oauth_params):
authorization_headers = 'OAuth realm="",'
authorization_headers += ','.join(['{0}="{1}"'.format(k, urllib.quote(str(v)))
for k, v in oauth_params.items()])
return authorization_headers | Return Authorization header |
def from_two_dim_array(cls, cols, rows, twoDimArray):
return Matrix(cols, rows, twoDimArray, rowBased=False, isOneDimArray=False) | Create a new Matrix instance from a two dimensional array.
:param integer columns: The number of columns for the Matrix.
:param integer rows: The number of rows for the Matrix.
:param list twoDimArray: A two dimensional column based array
with t... |
def pexpireat(self, key, timestamp):
if not isinstance(timestamp, int):
raise TypeError("timestamp argument must be int, not {!r}"
.format(timestamp))
fut = self.execute(b'PEXPIREAT', key, timestamp)
return wait_convert(fut, bool) | Set expire timestamp on key, timestamp in milliseconds.
:raises TypeError: if timeout is not int |
def debug(self, value):
self._debug = value
if self._debug:
logging.getLogger().setLevel(logging.DEBUG) | Turn on debug logging if necessary.
:param value: Value of debug flag |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.