code stringlengths 51 2.38k | docstring stringlengths 4 15.2k |
|---|---|
def maybe_from_tuple(tup_or_range):
if isinstance(tup_or_range, tuple):
return from_tuple(tup_or_range)
elif isinstance(tup_or_range, range):
return tup_or_range
raise ValueError(
'maybe_from_tuple expects a tuple or range, got %r: %r' % (
type(tup_or_range).__name__,
... | Convert a tuple into a range but pass ranges through silently.
This is useful to ensure that input is a range so that attributes may
be accessed with `.start`, `.stop` or so that containment checks are
constant time.
Parameters
----------
tup_or_range : tuple or range
A tuple to pass t... |
def authorization_code(self, code, redirect_uri):
return self._token_request(grant_type='authorization_code', code=code,
redirect_uri=redirect_uri) | Retrieve access token by `authorization_code` grant.
https://tools.ietf.org/html/rfc6749#section-4.1.3
:param str code: The authorization code received from the authorization
server.
:param str redirect_uri: the identical value of the "redirect_uri"
parameter in the aut... |
def get(self, sid):
return TranscriptionContext(self._version, account_sid=self._solution['account_sid'], sid=sid, ) | Constructs a TranscriptionContext
:param sid: The unique string that identifies the resource
:returns: twilio.rest.api.v2010.account.transcription.TranscriptionContext
:rtype: twilio.rest.api.v2010.account.transcription.TranscriptionContext |
def _check_devices(self):
"Enumerate OpenVR tracked devices and check whether any need to be initialized"
for i in range(1, len(self.poses)):
pose = self.poses[i]
if not pose.bDeviceIsConnected:
continue
if not pose.bPoseIsValid:
contin... | Enumerate OpenVR tracked devices and check whether any need to be initialized |
def _get_container_infos(config, container):
client = _get_client(config)
infos = None
try:
infos = _set_id(client.inspect_container(container))
except Exception:
pass
return infos | Get container infos
container
Image Id / grain name
return: dict |
def fromProfileName(cls, name):
session = bones.SessionAPI.fromProfileName(name)
return cls(session) | Return an `Origin` from a given configuration profile name.
:see: `ProfileStore`. |
def _read(path, encoding="utf-8", comment=";;;"):
if path:
if isinstance(path, basestring) and os.path.exists(path):
if PY2:
f = codecs.open(path, 'r', encoding='utf-8')
else:
f = open(path, 'r', encoding='utf-8')
elif isinstance(path, basestri... | Returns an iterator over the lines in the file at the given path,
strippping comments and decoding each line to Unicode. |
def check_version(current_version: str):
app_version = parse_version(current_version)
while True:
try:
_do_check_version(app_version)
except requests.exceptions.HTTPError as herr:
click.secho('Error while checking for version', fg='red')
print(herr)
ex... | Check periodically for a new release |
def get_swift_codename(version):
codenames = [k for k, v in six.iteritems(SWIFT_CODENAMES) if version in v]
if len(codenames) > 1:
for codename in reversed(codenames):
releases = UBUNTU_OPENSTACK_RELEASE
release = [k for k, v in six.iteritems(releases) if codename in v]
... | Determine OpenStack codename that corresponds to swift version. |
def reverseCommit(self):
self.baseClass.setText(self.oldText)
self.qteWidget.SCISetStylingEx(0, 0, self.style) | Replace the current widget content with the original text.
Note that the original text has styling information available,
whereas the new text does not. |
def schema_import(conn, dbpath):
conn.execute(
"ATTACH DATABASE ? AS source", (str(dbpath),))
conn.execute(
"INSERT OR IGNORE INTO profiles (name, data)"
" SELECT name, data FROM source.profiles"
" WHERE data IS NOT NULL")
conn.commit()
conn.execute(
"DETACH DATAB... | Import profiles from another database.
This does not overwrite existing profiles in the target database. Profiles
in the source database that share names with those in the target database
are ignored.
:param conn: A connection to an SQLite3 database into which to copy
profiles.
:param dbpa... |
def close(self, status=1000, reason=u''):
try:
if self.closed is False:
close_msg = bytearray()
close_msg.extend(struct.pack("!H", status))
if _check_unicode(reason):
close_msg.extend(reason.encode('utf-8'))
else:
... | Send Close frame to the client. The underlying socket is only closed
when the client acknowledges the Close frame.
status is the closing identifier.
reason is the reason for the close. |
def _get_annotation_heading(self, handler, route, heading=None):
if hasattr(handler, '_doctor_heading'):
return handler._doctor_heading
heading = ''
handler_path = str(handler)
try:
handler_file_name = handler_path.split('.')[-2]
except IndexError:
... | Returns the heading text for an annotation.
Attempts to get the name of the heading from the handler attribute
`schematic_title` first.
If `schematic_title` it is not present, it attempts to generate
the title from the class path.
This path: advertiser_api.handlers.foo_bar.FooL... |
def scene_add(frames):
reader = MessageReader(frames)
results = reader.string("command").uint32("animation_id").string("name").uint8_3("color").uint32("velocity").string("config").get()
if results.command != "scene.add":
raise MessageParserError("Command is not 'scene.add'")
... | parse a scene.add message |
def WriteHuntOutputPluginsStates(self, hunt_id, states, cursor=None):
columns = ", ".join(_HUNT_OUTPUT_PLUGINS_STATES_COLUMNS)
placeholders = mysql_utils.Placeholders(
2 + len(_HUNT_OUTPUT_PLUGINS_STATES_COLUMNS))
hunt_id_int = db_utils.HuntIDToInt(hunt_id)
for index, state in enumerate(states):... | Writes hunt output plugin states for a given hunt. |
def get_samples(self, init_points_count):
init_points_count = self._adjust_init_points_count(init_points_count)
samples = np.empty((init_points_count, self.space.dimensionality))
random_design = RandomDesign(self.space)
random_design.fill_noncontinous_variables(samples)
if self.s... | This method may return less points than requested.
The total number of generated points is the smallest closest integer of n^d to the selected amount of points. |
def _load(self, **kwargs):
if 'uri' in self._meta_data:
error = "There was an attempt to assign a new uri to this "\
"resource, the _meta_data['uri'] is %s and it should"\
" not be changed." % (self._meta_data['uri'])
raise URICreationCollision(err... | wrapped with load, override that in a subclass to customize |
def s_find_first(pred, first, lst):
if pred(first):
return first
elif lst:
return s_find_first(pred, unquote(lst[0]), lst[1:])
else:
return None | Evaluate `first`; if predicate `pred` succeeds on the result of `first`,
return the result; otherwise recur on the first element of `lst`.
:param pred: a predicate.
:param first: a promise.
:param lst: a list of quoted promises.
:return: the first element for which predicate is true. |
def _getDefaultCombinedL4Params(self, numInputBits, inputSize,
numExternalInputBits, externalInputSize,
L2CellCount):
sampleSize = numExternalInputBits + numInputBits
activationThreshold = int(max(numExternalInputBits, numInputBits) * .6)
m... | Returns a good default set of parameters to use in a combined L4 region. |
def get_attribute(self, node, column):
if column > 0 and column < len(self.__horizontal_headers):
return node.get(self.__horizontal_headers[self.__horizontal_headers.keys()[column]], None) | Returns the given Node attribute associated to the given column.
:param node: Node.
:type node: AbstractCompositeNode or GraphModelNode
:param column: Column.
:type column: int
:return: Attribute.
:rtype: Attribute |
def stop_playback(self):
self._sink.flush()
self._sink.stop()
self._playing = False | Stop playback from the audio sink. |
def instruction_size(op, opc):
if op < opc.HAVE_ARGUMENT:
return 2 if opc.version >= 3.6 else 1
else:
return 2 if opc.version >= 3.6 else 3 | For a given opcode, `op`, in opcode module `opc`,
return the size, in bytes, of an `op` instruction.
This is the size of the opcode (1 byte) and any operand it has. In
Python before version 3.6 this will be either 1 or 3 bytes. In
Python 3.6 or later, it is 2 bytes or a "word". |
def convert(cls, obj, parent):
replacement_type = cls._type_mapping.get(type(obj))
if replacement_type is not None:
new = replacement_type(obj)
new.parent = parent
return new
return obj | Converts objects to registered tracked types
This checks the type of the given object against the registered tracked
types. When a match is found, the given object will be converted to the
tracked type, its parent set to the provided parent, and returned.
If its type does not occur in ... |
def do_cleanup(cleanup):
log.info('Cleaning up after exception')
for leftover in cleanup:
what = leftover['what']
item = leftover['item']
if what == 'domain':
log.info('Cleaning up %s %s', what, item.name())
try:
item.destroy()
log.... | Clean up clone domain leftovers as much as possible.
Extra robust clean up in order to deal with some small changes in libvirt
behavior over time. Passed in volumes and domains are deleted, any errors
are ignored. Used when cloning/provisioning a domain fails.
:param cleanup: list containing dictonari... |
def update(self):
if self.single_channel:
self.im.set_data(self.data[self.ind, :, :])
else:
self.im.set_data(self.data[self.ind, :, :, :])
self.ax.set_ylabel('time frame %s' % self.ind)
self.im.axes.figure.canvas.draw() | Updates image to be displayed with new time frame. |
def register_views(*args):
config = args[0]
settings = config.get_settings()
pages_config = settings[CONFIG_MODELS]
resources = resources_of_config(pages_config)
for resource in resources:
if hasattr(resource, '__table__')\
and not hasattr(resource, 'model'):
cont... | Registration view for each resource from config. |
def statuses_show(self, id, trim_user=None, include_my_retweet=None,
include_entities=None):
params = {'id': id}
set_bool_param(params, 'trim_user', trim_user)
set_bool_param(params, 'include_my_retweet', include_my_retweet)
set_bool_param(params, 'include_entities'... | Returns a single Tweet, specified by the id parameter.
https://dev.twitter.com/docs/api/1.1/get/statuses/show/%3Aid
:param str id:
(*required*) The numerical ID of the desired tweet.
:param bool trim_user:
When set to ``True``, the tweet's user object includes only the... |
def get_culprit(omit_top_frames=1):
try:
caller_stack = stack()[omit_top_frames:]
while len(caller_stack) > 0:
frame = caller_stack.pop(0)
filename = frame[1]
if '<decorator' in filename or __file__ in filename:
continue
else:
... | get the filename and line number calling this.
Parameters
----------
omit_top_frames: int, default=1
omit n frames from top of stack stack. Purpose is to get the real
culprit and not intermediate functions on the stack.
Returns
-------
(filename: str, fileno: int)
filename a... |
def su(self) -> 'Gate':
rank = 2**self.qubit_nb
U = asarray(self.asoperator())
U /= np.linalg.det(U) ** (1/rank)
return Gate(tensor=U, qubits=self.qubits) | Convert gate tensor to the special unitary group. |
def output_size(self) -> Tuple[Sequence[Shape], Sequence[Shape], Sequence[Shape], int]:
return self._cell.output_size | Returns the simulation output size. |
def value_to_sql_str(v):
if v is None:
return 'null'
if type(v) in (types.IntType, types.FloatType, types.LongType):
return str(v)
if type(v) in (types.StringType, types.UnicodeType):
return "'%s'" %(v.replace(u"'", u"\\'"))
if isinstance(v, datetime):
return "'%s'" %(v.s... | transform a python variable to the appropriate representation in SQL |
def _GetTableNames(self, database):
table_names = []
for esedb_table in database.tables:
table_names.append(esedb_table.name)
return table_names | Retrieves the table names in a database.
Args:
database (pyesedb.file): ESE database.
Returns:
list[str]: table names. |
def calculate_file_access_time(workflow_workspace):
access_times = {}
for subdir, dirs, files in os.walk(workflow_workspace):
for file in files:
file_path = os.path.join(subdir, file)
access_times[file_path] = os.stat(file_path).st_atime
return access_times | Calculate access times of files in workspace. |
def get_file_extension(filepath):
_ext = os.path.splitext(filepath)[-1]
if _ext:
return _ext[1:] if _ext.startswith('.') else _ext
return '' | Copy if anyconfig.utils.get_file_extension is not available.
>>> get_file_extension("/a/b/c")
''
>>> get_file_extension("/a/b.txt")
'txt'
>>> get_file_extension("/a/b/c.tar.xz")
'xz' |
def start_msstitch(exec_drivers, sysargs):
parser = populate_parser(exec_drivers)
args = parser.parse_args(sysargs[1:])
args.func(**vars(args)) | Passed all drivers of executable, checks which command is passed to
the executable and then gets the options for a driver, parses them from
command line and runs the driver |
def fit_df(self, dfs, pstate_col=PSTATE_COL):
obs_cols = list(self.emission_name)
obs = [df[df.columns.difference([pstate_col])][obs_cols].values for df in dfs]
pstates = [df[pstate_col].values for df in dfs]
return self.fit(obs, pstates) | Convenience function to fit a model from a list of dataframes |
def pick(self, req_authn_context=None):
if req_authn_context is None:
return self._pick_by_class_ref(UNSPECIFIED, "minimum")
if req_authn_context.authn_context_class_ref:
if req_authn_context.comparison:
_cmp = req_authn_context.comparison
else:
... | Given the authentication context find zero or more places where
the user could be sent next. Ordered according to security level.
:param req_authn_context: The requested context as an
RequestedAuthnContext instance
:return: An URL |
def get(self, sid):
return AuthCallsIpAccessControlListMappingContext(
self._version,
account_sid=self._solution['account_sid'],
domain_sid=self._solution['domain_sid'],
sid=sid,
) | Constructs a AuthCallsIpAccessControlListMappingContext
:param sid: The unique string that identifies the resource
:returns: twilio.rest.api.v2010.account.sip.domain.auth_types.auth_calls_mapping.auth_calls_ip_access_control_list_mapping.AuthCallsIpAccessControlListMappingContext
:rtype: twili... |
def set_custom_getter_compose(custom_getter):
tf.get_variable_scope().set_custom_getter(
_compose_custom_getters(tf.get_variable_scope().custom_getter,
custom_getter)) | Set a custom getter in the current variable scope.
Do not overwrite the existing custom getter - rather compose with it.
Args:
custom_getter: a custom getter. |
def help(*args):
from . import commands
parser = argparse.ArgumentParser(prog="%s %s" % (__package__, help.__name__), description=help.__doc__)
parser.add_argument('COMMAND', help="command to show help for", nargs="?", choices=__all__)
args = parser.parse_args(args)
if args.COMMAND:
for l in... | Prints help. |
def rotate_image(filename, line, sdir, image_list):
file_loc = get_image_location(filename, sdir, image_list)
degrees = re.findall('(angle=[-\\d]+|rotate=[-\\d]+)', line)
if len(degrees) < 1:
return False
degrees = degrees[0].split('=')[-1].strip()
if file_loc is None or file_loc == 'ERROR' ... | Rotate a image.
Given a filename and a line, figure out what it is that the author
wanted to do wrt changing the rotation of the image and convert the
file so that this rotation is reflected in its presentation.
:param: filename (string): the name of the file as specified in the TeX
:param: line (... |
def hashmodel(model, library=None):
library = library or 'python-stdnet'
meta = model._meta
sha = hashlib.sha1(to_bytes('{0}({1})'.format(library, meta)))
hash = sha.hexdigest()[:8]
meta.hash = hash
if hash in _model_dict:
raise KeyError('Model "{0}" already in hash table.\
Rena... | Calculate the Hash id of metaclass ``meta`` |
def fit_transform(self, X, y, step_size=0.1, init_weights=None, warm_start=False):
self.fit(X=X, y=y, step_size=step_size, init_weights=init_weights, warm_start=warm_start)
return self.transform(X=X) | Fit optimizer to X, then transforms X. See `fit` and `transform` for further explanation. |
def send_event(self, name, *args, **kwargs):
n = len(self._bridge_queue)
self._bridge_queue.append((name, args))
if n == 0:
self._bridge_last_scheduled = time()
self.deferred_call(self._bridge_send)
return
elif kwargs.get('now'):
self._brid... | Send an event to the native handler. This call is queued and
batched.
Parameters
----------
name : str
The event name to be processed by MainActivity.processMessages.
*args: args
The arguments required by the event.
**kwargs: kwargs
O... |
def approx_contains(self, other, atol):
other = np.atleast_1d(other)
return (other.shape == (self.ndim,) and
all(np.any(np.isclose(vector, coord, atol=atol, rtol=0.0))
for vector, coord in zip(self.coord_vectors, other))) | Test if ``other`` belongs to this grid up to a tolerance.
Parameters
----------
other : `array-like` or float
The object to test for membership in this grid
atol : float
Allow deviations up to this number in absolute value
per vector entry.
E... |
def repo(name: str, owner: str) -> snug.Query[dict]:
request = snug.GET(f'https://api.github.com/repos/{owner}/{name}')
response = yield request
return json.loads(response.content) | a repo lookup by owner and name |
def _check_span_id(self, span_id):
if span_id is None:
return None
assert isinstance(span_id, six.string_types)
if span_id is INVALID_SPAN_ID:
logging.warning(
'Span_id {} is invalid (cannot be all zero)'.format(span_id))
self.from_header = Fal... | Check the format of the span_id to ensure it is 16-character hex
value representing a 64-bit number. If span_id is invalid, logs a
warning message and returns None
:type span_id: str
:param span_id: Identifier for the span, unique within a span.
:rtype: str
:returns: Sp... |
def events(self):
if not self.event_reflector:
return []
events = []
for event in self.event_reflector.events:
if event.involved_object.name != self.pod_name:
continue
if self._last_event and event.metadata.uid == self._last_event:
... | Filter event-reflector to just our events
Returns list of all events that match our pod_name
since our ._last_event (if defined).
._last_event is set at the beginning of .start(). |
def main(ctx, connection):
ctx.obj = Manager(connection=connection)
ctx.obj.bind() | Command line interface for PyBEL. |
def run_stop_backup(cls):
def handler(popen):
assert popen.returncode != 0
raise UserException('Could not stop hot backup')
return cls._dict_transform(psql_csv_run(
"SELECT file_name, "
" lpad(file_offset::text, 8, '0') AS file_offset "
... | Stop a hot backup, if it was running, or error
Return the last WAL file name and position that is required to
gain consistency on the captured heap. |
def get_out_of_order(list_of_numbers):
result = []
for i in range(len(list_of_numbers)):
if i == 0:
continue
if list_of_numbers[i] < list_of_numbers[i - 1]:
result.append((list_of_numbers[i - 1], list_of_numbers[i]))
return result | Returns elements that break the monotonically non-decreasing trend.
This is used to find instances of global step values that are "out-of-order",
which may trigger TensorBoard event discarding logic.
Args:
list_of_numbers: A list of numbers.
Returns:
A list of tuples in which each tuple are two eleme... |
def _choose_capture_side(self):
ALWAYS_RUNNING_NODES_TYPE = ("cloud", "nat", "ethernet_switch", "ethernet_hub")
for node in self._nodes:
if node["node"].compute.id == "local" and node["node"].node_type in ALWAYS_RUNNING_NODES_TYPE and node["node"].status == "started":
return ... | Run capture on the best candidate.
The ideal candidate is a node who on controller server and always
running (capture will not be cut off)
:returns: Node where the capture should run |
def verify(path):
valid = False
try:
zf = zipfile.ZipFile(path)
except (zipfile.BadZipfile, IsADirectoryError):
pass
else:
names = sorted(zf.namelist())
names = [nn for nn in names if nn.endswith(".tif")]
names = [nn for nn in n... | Verify that `path` is a zip file with Phasics TIFF files |
def visit_global(self, node, parent):
newnode = nodes.Global(
node.names,
getattr(node, "lineno", None),
getattr(node, "col_offset", None),
parent,
)
if self._global_names:
for name in node.names:
self._global_names[-1].... | visit a Global node to become astroid |
def _decrypt_entity(entity, encrypted_properties_list, content_encryption_key, entityIV, isJavaV1):
_validate_not_none('entity', entity)
decrypted_entity = deepcopy(entity)
try:
for property in entity.keys():
if property in encrypted_properties_list:
value = entity[proper... | Decrypts the specified entity using AES256 in CBC mode with 128 bit padding. Unwraps the CEK
using either the specified KEK or the key returned by the key_resolver. Properties
specified in the encrypted_properties_list, will be decrypted and decoded to utf-8 strings.
:param entity:
The entity bei... |
def parse_feature(obj):
if hasattr(obj, '__geo_interface__'):
gi = obj.__geo_interface__
if gi['type'] in geom_types:
return wrap_geom(gi)
elif gi['type'] == 'Feature':
return gi
try:
shape = wkt.loads(obj)
return wrap_geom(shape.__geo_interface__)... | Given a python object
attemp to a GeoJSON-like Feature from it |
def complete_contexts(self):
if self._complete_contexts:
return self._complete_contexts
self.context()
return self._complete_contexts | Return a list of interfaces that have satisfied contexts. |
def remove_component(self, entity, component_type):
relation = self._get_relation(component_type)
del relation[entity]
self._entities_with(component_type).remove(entity) | Remove the component of component_type from entity.
Long-hand for :func:`essence.Entity.remove`.
:param entity: entity to associate
:type entity: :class:`essence.Entity`
:param component_type: Type of component
:type component_type: The :class:`type` of a :class:`Component` sub... |
def run_check(self, check, argument_names):
arguments = []
for name in argument_names:
arguments.append(getattr(self, name))
return check(*arguments) | Run a check plugin. |
def dem(bounds, src_crs, dst_crs, out_file, resolution):
if not dst_crs:
dst_crs = "EPSG:3005"
bcdata.get_dem(bounds, out_file=out_file, src_crs=src_crs, dst_crs=dst_crs, resolution=resolution) | Dump BC DEM to TIFF |
def attach(self,
image_in,
sampler=None,
show=True):
if len(image_in.shape) < 3:
raise ValueError('Image must be atleast 3D')
if sampler is None:
temp_sampler = self.sampler
else:
temp_sampler = sampler
slic... | Attaches the relevant cross-sections to each axis.
Parameters
----------
attach_image : ndarray
The image to be attached to the collage, once it is created.
Must be atleast 3d.
sampler : str or list or callable
selection strategy: to identify the ty... |
def install(self, pip_args=None):
if path.isdir(self.env):
print_pretty("<FG_RED>This seems to already be installed.<END>")
else:
print_pretty("<FG_BLUE>Creating environment {}...<END>\n".format(self.env))
self.create_env()
self.install_program(pip_args)
... | Install the program and put links in place. |
async def create_virtual_environment(loop=None):
tmp_dir = tempfile.mkdtemp()
venv_dir = os.path.join(tmp_dir, VENV_NAME)
proc1 = await asyncio.create_subprocess_shell(
'virtualenv {}'.format(venv_dir), loop=loop)
await proc1.communicate()
if sys.platform == 'win32':
python = os.path... | Create a virtual environment, and return the path to the virtual env
directory, which should contain a "bin" directory with the `python` and
`pip` binaries that can be used to a test install of a software package.
:return: the path to the virtual environment, its python, and its site pkgs |
def run(self, messages, env=None):
if self.args.score or self.args.unlock or self.args.testing:
return
tests = self.assignment.specified_tests
for test in tests:
if self.args.suite and hasattr(test, 'suites'):
test.run_only = int(self.args.suite)
... | Run gradeable tests and print results and return analytics.
RETURNS:
dict; a mapping of test name -> JSON-serializable object. It is up to
each test to determine what kind of data it wants to return as
significant for analytics. However, all tests must include the number
passed,... |
def get_external_account(resource_root, name, view=None):
return call(resource_root.get,
EXTERNAL_ACCOUNT_FETCH_PATH % ("account", name,),
ApiExternalAccount, False, params=view and dict(view=view) or None) | Lookup an external account by name
@param resource_root: The root Resource object.
@param name: Account name
@param view: View
@return: An ApiExternalAccount object |
def stop(self):
self.working = False
for w in self.workers:
w.join()
self.workers = [] | Stops the worker threads and waits for them to finish |
def focus(self, force_first=False, force_last=False, force_column=None,
force_widget=None):
self._has_focus = True
if force_widget is not None and force_column is not None:
self._live_col = force_column
self._live_widget = force_widget
elif force_first:
... | Call this to give this Layout the input focus.
:param force_first: Optional parameter to force focus to first widget.
:param force_last: Optional parameter to force focus to last widget.
:param force_column: Optional parameter to mandate the new column index.
:param force_widget: Option... |
def get_query_schema(self, job_id):
query_reply = self.get_query_results(job_id, offset=0, limit=0)
if not query_reply['jobComplete']:
logger.warning('BigQuery job %s not complete' % job_id)
raise UnfinishedQueryException()
return query_reply['schema']['fields'] | Retrieve the schema of a query by job id.
Parameters
----------
job_id : str
The job_id that references a BigQuery query
Returns
-------
list
A ``list`` of ``dict`` objects that represent the schema. |
def dbmax_stddev(self, value=None):
if value is not None:
try:
value = float(value)
except ValueError:
raise ValueError('value {} need to be of type float '
'for field `dbmax_stddev`'.format(value))
self._dbmax_stdd... | Corresponds to IDD Field `dbmax_stddev`
Standard deviation of extreme annual maximum dry-bulb temperature
Args:
value (float): value for IDD Field `dbmax_stddev`
Unit: C
if `value` is None it will not be checked against the
specification and i... |
def attachable(name, path=None):
cachekey = 'lxc.attachable{0}{1}'.format(name, path)
try:
return __context__[cachekey]
except KeyError:
_ensure_exists(name, path=path)
log.debug('Checking if LXC container %s is attachable', name)
cmd = 'lxc-attach'
if path:
... | Return True if the named container can be attached to via the lxc-attach
command
path
path to the container parent
default: /var/lib/lxc (system default)
.. versionadded:: 2015.8.0
CLI Example:
.. code-block:: bash
salt 'minion' lxc.attachable ubuntu |
def _try_fetch(self, size=None):
if self._query_job is None:
raise exceptions.InterfaceError(
"No query results: execute() must be called before fetch."
)
is_dml = (
self._query_job.statement_type
and self._query_job.statement_type.upper() ... | Try to start fetching data, if not yet started.
Mutates self to indicate that iteration has started. |
def conj(self, out=None):
if out is None:
return self.space.element(self.tensor.conj())
else:
self.tensor.conj(out=out.tensor)
return out | Complex conjugate of this element.
Parameters
----------
out : `DiscreteLpElement`, optional
Element to which the complex conjugate is written.
Must be an element of this element's space.
Returns
-------
out : `DiscreteLpElement`
The ... |
def unpack_rgb(packed):
orig_shape = None
if isinstance(packed, np.ndarray):
assert packed.dtype == int
orig_shape = packed.shape
packed = packed.reshape((-1, 1))
rgb = ((packed >> 16) & 0xff,
(packed >> 8) & 0xff,
(packed) & 0xff)
if orig_shape is None:
... | Unpacks a single integer or array of integers into one or more
24-bit RGB values. |
def as_string(self):
if self.headers_only:
self.msgobj = self._get_content()
from email.generator import Generator
fp = StringIO()
g = Generator(fp, maxheaderlen=60)
g.flatten(self.msgobj)
text = fp.getvalue()
return text | Get the underlying message object as a string |
def _initLayerCtors(self):
ctors = {
'lmdb': s_lmdblayer.LmdbLayer,
'remote': s_remotelayer.RemoteLayer,
}
self.layrctors.update(**ctors) | Registration for built-in Layer ctors |
def parse_multipart_upload_result(data):
root = S3Element.fromstring('CompleteMultipartUploadResult', data)
return MultipartUploadResult(
root.get_child_text('Bucket'),
root.get_child_text('Key'),
root.get_child_text('Location'),
root.get_etag_elem()
) | Parser for complete multipart upload response.
:param data: Response data for complete multipart upload.
:return: :class:`MultipartUploadResult <MultipartUploadResult>`. |
def integrate(self, function, lower_bound, upper_bound):
ret = 0.0
n = self.nsteps
xStep = (float(upper_bound) - float(lower_bound)) / float(n)
self.log_info("xStep" + str(xStep))
x = lower_bound
val1 = function(x)
self.log_info("val1: " + str(val1))
for i... | Calculates the integral of the given one dimensional function
in the interval from lower_bound to upper_bound, with the simplex integration method. |
def drdlat(r, lon, lat):
r = ctypes.c_double(r)
lon = ctypes.c_double(lon)
lat = ctypes.c_double(lat)
jacobi = stypes.emptyDoubleMatrix()
libspice.drdlat_c(r, lon, lat, jacobi)
return stypes.cMatrixToNumpy(jacobi) | Compute the Jacobian of the transformation from latitudinal to
rectangular coordinates.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/drdlat_c.html
:param r: Distance of a point from the origin.
:type r: float
:param lon: Angle of the point from the XZ plane in radians.
:type lon: fl... |
def aa_counts(aln, weights=None, gap_chars='-.'):
if weights is None:
counts = Counter()
for rec in aln:
seq_counts = Counter(str(rec.seq))
counts.update(seq_counts)
else:
if weights == True:
weights = sequence_weights(aln)
else:
as... | Calculate the amino acid frequencies in a set of SeqRecords.
Weights for each sequence in the alignment can be given as a list/tuple,
usually calculated with the sequence_weights function. For convenience, you
can also pass "weights=True" and the weights will be calculated with
sequence_weights here. |
def join(self, timeout_s=None):
if not self.thread:
return False
self.thread.join(timeout_s)
return self.running | Joins blocking until the interval ends or until timeout is reached.
Args:
timeout_s: The time in seconds to wait, defaults to forever.
Returns:
True if the interval is still running and we reached the timeout. |
def to_text(sentence):
text = ""
for i, tok in enumerate(sentence.token):
if i != 0:
text += tok.before
text += tok.word
return text | Helper routine that converts a Sentence protobuf to a string from
its tokens. |
def login(self, username, password=None, blob=None, zeroconf=None):
username = utils.to_char(username)
if password is not None:
password = utils.to_char(password)
spotifyconnect.Error.maybe_raise(
lib.SpConnectionLoginPassword(
username, passwo... | Authenticate to Spotify's servers.
You can login with one of three combinations:
- ``username`` and ``password``
- ``username`` and ``blob``
- ``username`` and ``zeroconf``
To get the ``blob`` string, you must once log in with ``username`` and
``password``. You'll then... |
def get_labels(self, depth=None):
labels = libCopy.deepcopy(self.labels)
if depth is None or depth > 0:
for element in self.elements:
if isinstance(element, CellReference):
labels.extend(
element.get_labels(None if depth is None els... | Returns a list with a copy of the labels in this cell.
Parameters
----------
depth : integer or ``None``
If not ``None``, defines from how many reference levels to
retrieve labels from.
Returns
-------
out : list of ``Label``
List con... |
def get(self, name, hint):
if name:
return name
if hint not in self._counter:
self._counter[hint] = 0
name = '%s%d' % (hint, self._counter[hint])
self._counter[hint] += 1
return name | Get the canonical name for a symbol.
This is the default implementation.
If the user specifies a name,
the user-specified name will be used.
When user does not specify a name, we automatically generate a
name based on the hint string.
Parameters
----------
... |
def distill(p, K):
q = p.reshape(p.shape[0], -1)
for _ in range(K):
_accupy.distill(q)
return q.reshape(p.shape) | Algorithm 4.3. Error-free vector transformation for summation.
The vector p is transformed without changing the sum, and p_n is replaced
by float(sum(p)). Kahan [21] calls this a 'distillation algorithm.' |
def reset(self):
self._components = OrderedDict()
self.clear_selections()
self._logger.info("<block: %s> reset component list" % (self.name)) | Removes all the components of the block |
def handle_call_response(self, result, node):
if not result[0]:
log.warning("no response from %s, removing from router", node)
self.router.remove_contact(node)
return result
log.info("got successful response from %s", node)
self.welcome_if_new(node)
re... | If we get a response, add the node to the routing table. If
we get no response, make sure it's removed from the routing table. |
def pop_event(self):
with self.lock:
if not self.events:
raise ValueError('no events queued')
return self.events.popleft() | Pop the next queued event from the queue.
:raise ValueError: If there is no event queued. |
def create_env_section(pairs, name):
section = ['%' + name ]
for pair in pairs:
section.append("export %s" %pair)
return section | environment key value pairs need to be joined by an equal, and
exported at the end.
Parameters
==========
section: the list of values to return as a parsed list of lines
name: the name of the section to write (e.g., files) |
def decimal_format(value, TWOPLACES=Decimal(100) ** -2):
'Format a decimal.Decimal like to 2 decimal places.'
if not isinstance(value, Decimal):
value = Decimal(str(value))
return value.quantize(TWOPLACES) | Format a decimal.Decimal like to 2 decimal places. |
def expiring_memoize(obj):
cache = obj.cache = {}
last_access = obj.last_access = defaultdict(int)
@wraps(obj)
def memoizer(*args, **kwargs):
key = str(args) + str(kwargs)
if last_access[key] and last_access[key] + 10 < time():
if key in cache:
del cache[key]
... | Like memoize, but forgets after 10 seconds. |
def run_simulation(c1, c2):
print('running simulation...')
traits = character.CharacterCollection(character.fldr)
c1 = traits.generate_random_character()
c2 = traits.generate_random_character()
print(c1)
print(c2)
rules = battle.BattleRules(battle.rules_file)
b = battle.Battle(c1, c2, tr... | using character and planet, run the simulation |
def extract_sort(self, params):
sorts = params.pop('sort', [])
sorts = [sorts] if isinstance(sorts, basestring) else sorts
sorts = [(s[1:], 'desc')
if s.startswith('-') else (s, 'asc')
for s in sorts]
self.sorts = [
{self.adapter.sorts[s]: d}... | Extract and build sort query from parameters |
def results(self, use_cache=True, dialect=None, billing_tier=None):
return self._materialization.results(use_cache=use_cache, dialect=dialect,
billing_tier=billing_tier) | Materialize the view synchronously.
If you require more control over the execution, use execute() or execute_async().
Args:
use_cache: whether to use cached results or not.
dialect : {'legacy', 'standard'}, default 'legacy'
'legacy' : Use BigQuery's legacy SQL dialect.
'standar... |
def minifyspace(parser, token):
nodelist = parser.parse(('endminifyspace',))
parser.delete_first_token()
return MinifiedNode(nodelist) | Removes whitespace including tab and newline characters.
Do not use this if you are using a <pre> tag.
Example usage::
{% minifyspace %}
<p>
<a title="foo"
href="foo/">
Foo
</a>
</p>
{% endminifysp... |
def _drop_remaining_rules(self, *rules):
if rules:
for rule in rules:
try:
self._remaining_rules.remove(rule)
except ValueError:
pass
else:
self._remaining_rules = [] | Drops rules from the queue of the rules that still need to be
evaluated for the currently processed field.
If no arguments are given, the whole queue is emptied. |
def outer_product_sum(A, B=None):
if B is None:
B = A
outer = np.einsum('ij,ik->ijk', A, B)
return np.sum(outer, axis=0) | Computes the sum of the outer products of the rows in A and B
P = \Sum {A[i] B[i].T} for i in 0..N
Notionally:
P = 0
for y in A:
P += np.outer(y, y)
This is a standard computation for sigma points used in the UKF, ensemble
Kalman filter, etc., where A would be the... |
def iterate(self):
if not self._inLoop:
raise RuntimeError('run loop not started')
elif self._driverLoop:
raise RuntimeError('iterate not valid in driver run loop')
self.proxy.iterate() | Must be called regularly when using an external event loop. |
def is_super_admin(self, req):
return req.headers.get('x-auth-admin-user') == '.super_admin' and \
self.super_admin_key and \
req.headers.get('x-auth-admin-key') == self.super_admin_key | Returns True if the admin specified in the request represents the
.super_admin.
:param req: The swob.Request to check.
:param returns: True if .super_admin. |
def has_parent_vaults(self, vault_id):
if self._catalog_session is not None:
return self._catalog_session.has_parent_catalogs(catalog_id=vault_id)
return self._hierarchy_session.has_parents(id_=vault_id) | Tests if the ``Vault`` has any parents.
arg: vault_id (osid.id.Id): a vault ``Id``
return: (boolean) - ``true`` if the vault has parents, ``false``
otherwise
raise: NotFound - ``vault_id`` is not found
raise: NullArgument - ``vault_id`` is ``null``
raise: O... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.